text
stringlengths
27
775k
#! /bin/sh ## ## Copyright (C) by Argonne National Laboratory ## See COPYRIGHT in top-level directory ## cfile="src/mpi/coll/src/csel_json_autogen.c" create_json_buf() { json_file=$1 bufname=$2 echo "const char $bufname[] = {" >> $cfile # split all the characters in the json file into a character array cat ${json_file} | awk '{ printf("%s ", $0); }' | \ sed -e "s/[[:space:]]//g" -e "s/\(.\)/'\1',/g" >> $cfile echo "0 };" >> $cfile echo >> $cfile } rm -f $cfile cat > $cfile<<EOF /* * Copyright (C) by Argonne National Laboratory * See COPYRIGHT in top-level directory */ /* DO NOT EDIT! This file is autogenerated from the corresponding * json files */ EOF # create specific json buffers create_json_buf maint/tuning/coll/mpir/generic.json MPII_coll_generic_json create_json_buf maint/tuning/coll/ch4/generic.json MPIDI_coll_generic_json create_json_buf maint/tuning/coll/ch4/posix_generic.json MPIDI_POSIX_coll_generic_json
// Copyright eeGeo Ltd (2012-2015), All Rights Reserved #include "PlaceJumpController.h" #include "IPlaceJumpsModel.h" #include "LatLongAltitude.h" #include "Types.h" #include "ICameraTransitionController.h" namespace ExampleApp { namespace PlaceJumps { namespace SdkModel { PlaceJumpController::PlaceJumpController(CameraTransitions::SdkModel::ICameraTransitionController& cameraTransitionController) : m_cameraTransitionController(cameraTransitionController) { } void PlaceJumpController::JumpTo(const View::IPlaceJumpModel& jumpModel) { m_cameraTransitionController.StartTransitionTo(jumpModel.GetLocation().ToECEF(), jumpModel.GetDistance(), Eegeo::Math::Deg2Rad(jumpModel.GetHeadingDegrees())); } } } }
/* * Copyright 2015-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.zookeeper.discovery; import java.util.HashMap; import java.util.Map; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.cloud.commons.util.InetUtils; import org.springframework.cloud.zookeeper.support.StatusConstants; import org.springframework.util.StringUtils; /** * Properties related to Zookeeper's Service Discovery. * * @author Spencer Gibb * @since 1.0.0 */ @ConfigurationProperties(ZookeeperDiscoveryProperties.PREFIX) public class ZookeeperDiscoveryProperties { /** * Zookeeper Discovery Properties prefix. */ public static final String PREFIX = "spring.cloud.zookeeper.discovery"; /** * Default URI spec. */ public static final String DEFAULT_URI_SPEC = "{scheme}://{address}:{port}"; private InetUtils.HostInfo hostInfo; private boolean enabled = true; /** * Root Zookeeper folder in which all instances are registered. */ private String root = "/services"; /** * The URI specification to resolve during service registration in Zookeeper. */ private String uriSpec = DEFAULT_URI_SPEC; /** Id used to register with zookeeper. Defaults to a random UUID. */ private String instanceId; /** * Predefined host with which a service can register itself in Zookeeper. Corresponds * to the {code address} from the URI spec. */ private String instanceHost; /** * IP address to use when accessing service (must also set preferIpAddress to use). */ private String instanceIpAddress; /** * Use ip address rather than hostname during registration. */ private boolean preferIpAddress = false; /** Port to register the service under (defaults to listening port). */ private Integer instancePort; /** Ssl port of the registered service. */ private Integer instanceSslPort; /** * Register as a service in zookeeper. */ private boolean register = true; /** * Gets the metadata name/value pairs associated with this instance. This information * is sent to zookeeper and can be used by other instances. */ private Map<String, String> metadata = new HashMap<>(); /** * The initial status of this instance (defaults to * {@link StatusConstants#STATUS_UP}). */ private String initialStatus = StatusConstants.STATUS_UP; /** * Order of the discovery client used by `CompositeDiscoveryClient` for sorting * available clients. */ private int order = 0; // Visible for Testing protected ZookeeperDiscoveryProperties() { } public ZookeeperDiscoveryProperties(InetUtils inetUtils) { this.hostInfo = inetUtils.findFirstNonLoopbackHostInfo(); this.instanceHost = this.hostInfo.getHostname(); this.instanceIpAddress = this.hostInfo.getIpAddress(); } public boolean isEnabled() { return this.enabled; } public String getRoot() { return this.root; } public String getUriSpec() { return this.uriSpec; } public String getInstanceHost() { if (this.preferIpAddress && StringUtils.hasText(this.instanceIpAddress)) { return this.instanceIpAddress; } return this.instanceHost; } public void setEnabled(boolean enabled) { this.enabled = enabled; } public void setRoot(String root) { this.root = DependencyPathUtils.sanitize(root); } public void setUriSpec(String uriSpec) { this.uriSpec = uriSpec; } public String getInstanceId() { return this.instanceId; } public void setInstanceId(String instanceId) { this.instanceId = instanceId; } public void setInstanceHost(String instanceHost) { this.instanceHost = instanceHost; this.hostInfo.override = true; } public void setInstanceIpAddress(String instanceIpAddress) { this.instanceIpAddress = instanceIpAddress; this.hostInfo.override = true; } public void setPreferIpAddress(boolean preferIpAddress) { this.preferIpAddress = preferIpAddress; } public Map<String, String> getMetadata() { return this.metadata; } public void setMetadata(Map<String, String> metadata) { this.metadata = metadata; } public boolean isRegister() { return this.register; } public void setRegister(boolean register) { this.register = register; } public Integer getInstancePort() { return this.instancePort; } public void setInstancePort(Integer instancePort) { this.instancePort = instancePort; } public Integer getInstanceSslPort() { return this.instanceSslPort; } public void setInstanceSslPort(Integer instanceSslPort) { this.instanceSslPort = instanceSslPort; } public String getInitialStatus() { return this.initialStatus; } public void setInitialStatus(String initialStatus) { this.initialStatus = initialStatus; } public int getOrder() { return this.order; } public void setOrder(int order) { this.order = order; } @Override public String toString() { return "ZookeeperDiscoveryProperties{" + "enabled=" + this.enabled + ", root='" + this.root + '\'' + ", uriSpec='" + this.uriSpec + '\'' + ", instanceId='" + this.instanceId + '\'' + ", instanceHost='" + this.instanceHost + '\'' + ", instancePort='" + this.instancePort + '\'' + ", instanceSslPort='" + this.instanceSslPort + '\'' + ", metadata=" + this.metadata + ", register=" + this.register + ", initialStatus=" + this.initialStatus + ", order=" + this.order + '}'; } }
import lldb from lldbsuite.test.lldbtest import * from lldbsuite.test.decorators import * import os class TestTargetSourceMap(TestBase): mydir = TestBase.compute_mydir(__file__) @no_debug_info_test def test_source_map(self): """Test target.source-map' functionality.""" def assertBreakpointWithSourceMap(src_path): # Set a breakpoint after we remap source and verify that it succeeds bp = target.BreakpointCreateByLocation(src_path, 2) self.assertEquals(bp.GetNumLocations(), 1, "make sure breakpoint was resolved with map") # Now make sure that we can actually FIND the source file using this # remapping: retval = lldb.SBCommandReturnObject() self.dbg.GetCommandInterpreter().HandleCommand("source list -f main.c -l 2", retval) self.assertTrue(retval.Succeeded(), "source list didn't succeed.") self.assertNotEqual(retval.GetOutput(), None, "We got no ouput from source list") self.assertTrue("return" in retval.GetOutput(), "We didn't find the source file...") # Set the target soure map to map "./" to the current test directory src_dir = self.getSourceDir() src_path = os.path.join(src_dir, "main.c") yaml_path = os.path.join(src_dir, "a.yaml") yaml_base, ext = os.path.splitext(yaml_path) obj_path = self.getBuildArtifact("main.o") self.yaml2obj(yaml_path, obj_path) # Create a target with the object file we just created from YAML target = self.dbg.CreateTarget(obj_path) # Set a breakpoint before we remap source and verify that it fails bp = target.BreakpointCreateByLocation(src_path, 2) self.assertEquals(bp.GetNumLocations(), 0, "make sure no breakpoints were resolved without map") valid_path = os.path.dirname(src_dir) valid_path2 = os.path.dirname(valid_path) invalid_path = src_dir + "invalid_path" invalid_path2 = src_dir + "invalid_path2" # We make sure the error message contains all the invalid paths self.expect( 'settings set target.source-map . "%s" . "%s" . "%s" . "%s' \ % (invalid_path, src_dir, invalid_path2, valid_path), substrs=[ 'error: the replacement path doesn\'t exist: "%s"' % (invalid_path), 'the replacement path doesn\'t exist: "%s"' % (invalid_path2), ], error=True, ) self.expect( 'settings show target.source-map', substrs=[ '[0] "." -> "%s"' % (src_dir), '[1] "." -> "%s"' % (valid_path), ], ) assertBreakpointWithSourceMap(src_path) # Attempts to replace an index to an invalid mapping should have no effect. # Modifications to valid mappings should work. self.expect( 'settings replace target.source-map 0 . "%s" . "%s"' % (invalid_path, valid_path2), substrs=[ 'error: the replacement path doesn\'t exist: "%s"' % (invalid_path), ], error=True, ) self.expect( 'settings show target.source-map', substrs=[ '[0] "." -> "%s"' % (src_dir), '[1] "." -> "%s"' % (valid_path2), ] ) assertBreakpointWithSourceMap(src_path) # Let's clear and add the mapping back with insert-after self.runCmd('settings remove target.source-map 0') self.expect( 'settings show target.source-map', substrs=['[0] "." -> "%s"' % (valid_path2)], ) self.expect( 'settings insert-after target.source-map 0 . "%s" . "%s" . "%s"' \ % (invalid_path, invalid_path2, src_dir), substrs=[ 'error: the replacement path doesn\'t exist: "%s"' % (invalid_path), 'the replacement path doesn\'t exist: "%s"' % (invalid_path2), ], error=True, ) self.expect( 'settings show target.source-map', substrs=[ '[0] "." -> "%s"' % (valid_path2), '[1] "." -> "%s"' % (src_dir), ] ) # Let's clear using remove and add the mapping in with append self.runCmd('settings remove target.source-map 1') self.expect( 'settings show target.source-map', substrs=[ '[0] "." -> "%s"' % (valid_path2), ] ) self.runCmd('settings clear target.source-map') self.expect( 'settings append target.source-map . "%s" . "%s" . "%s"' % (invalid_path, src_dir, invalid_path2), substrs=[ 'error: the replacement path doesn\'t exist: "%s"' % (invalid_path), 'the replacement path doesn\'t exist: "%s"' % (invalid_path2), ], error=True, ) self.expect( 'settings show target.source-map', substrs=[ '[0] "." -> "%s"' % (src_dir), ] ) assertBreakpointWithSourceMap(src_path)
package com.padcmyanmar.mmnews.kotlin.data.vos import android.arch.persistence.room.ColumnInfo import android.arch.persistence.room.Entity import android.arch.persistence.room.Ignore import android.arch.persistence.room.PrimaryKey import android.text.TextUtils import com.google.gson.annotations.SerializedName @Entity(tableName = "comments") class CommentActionVO { @PrimaryKey(autoGenerate = true) var id: Long = 0 @SerializedName("comment-id") @ColumnInfo(name = "comment_id") var commentId: String? = null @SerializedName("comment") @ColumnInfo(name = "comment") var comment: String? = null @SerializedName("comment-date") @ColumnInfo(name = "comment_date") var commentDate: String? = null @SerializedName("acted-user") @Ignore var actedUser: ActedUserVO? = null @ColumnInfo(name = "acted_user_id") @Transient var actedUserId: String? = null get() { if (TextUtils.isEmpty(field)) { this.actedUserId = actedUser!!.userId } return field } @ColumnInfo(name = "news_id") @Transient var newsId: String? = null }
<?php /** * @project bkn-api. * @since 1/15/2017 6:43 PM * @author <a href = "fauzi.knightmaster.achmad@gmail.com">Achmad Fauzi</a> */ namespace app\ConstantValue; interface IModelAssignmentLetterConstant { const ASSIGNMENT_LETTER_TABLE_NAME = 'mst_assigment_letter'; const ASSIGNMENT_LETTER_COL_LETTER_TITLE = 'letter_type'; const ASSIGNMENT_LETTER_COL_LETTER_BODY = 'letter_body'; const ASSIGNMENT_LETTER_COL_LETTER_FOOTER = 'letter_footer'; const ASSIGNMENT_LETTER_COL_SIGNATURE = 'signature'; }
# 1D data @recipe function f(metric::Metric{T, 1}; axes="nil", coordinates=nothing) where {T <: UltrasoundMetric} axes == "nil" && error("Keyword missing: axes=\"x\" or axes=\"y\"..") check_coordinates(coordinates) title --> get_metric_display_name(metric) xguide --> "$(axes)-axis" aspect_ratio --> :equal legend --> false target_dimension = 1 axis_coordinates = get_axis_coordinates(axes, target_dimension, coordinates) return axis_coordinates, metric.val end function check_coordinates(coordinates) if isnothing(coordinates) error("You must set coordinates from scan in keyword arg: ;coordinates=scan.coordinates") end end function get_axis_coordinates(axis_name, target_dimension, coordinates) axis_name = Val(Symbol(axis_name)) target_dimension = Val(target_dimension) axis_coordinates = get_coordinates(axis_name, get_dimension(target_dimension, coordinates)) return round.(typeof(1.0mm), uconvert.(mm, axis_coordinates); digits=2) end get_dimension(::Val{1}, coordinates) = coordinates[:, :, 1, 1] get_dimension(::Val{2}, coordinates) = coordinates[:, 1, :, 1] get_dimension(::Val{3}, coordinates) = coordinates[:, 1, 1, :] get_coordinates(::Val{:x}, coordinates) = coordinates[1, :] get_coordinates(::Val{:y}, coordinates) = coordinates[2, :] get_coordinates(::Val{:z}, coordinates) = coordinates[3, :] # 2D data @recipe function f(metric::Metric{T, 2}; axes="nil", coordinates=nothing) where {T <: UltrasoundMetric} axes == "nil" && error("Keyword missing: axes=\"xy\" or axes=\"yz\"..") check_coordinates(coordinates) seriestype := :heatmap title --> get_metric_display_name(metric) xguide --> "$(axes[1])-axis" yguide --> "$(axes[2])-axis" aspect_ratio --> :equal metric_values_2d = metric.val axis_1_coordinates = get_axis_coordinates(axes[1], 1, coordinates) axis_2_coordinates = get_axis_coordinates(axes[2], 2, coordinates) axis_1_step_size = diff(axis_1_coordinates)[1] axis_2_step_size = diff(axis_2_coordinates)[1] if axis_1_step_size < 0mm axis_1_coordinates = reverse(axis_1_coordinates) axis_1_step_size = diff(axis_1_coordinates)[1] metric_values_2d = reverse(metric_values_2d, dims=1) end if axis_2_step_size < 0mm axis_2_coordinates = reverse(axis_2_coordinates) axis_2_step_size = diff(axis_2_coordinates)[1] metric_values_2d = reverse(metric_values_2d, dims=2) end xlims --> (axis_1_coordinates[1]-axis_1_step_size/2, axis_1_coordinates[end]+axis_1_step_size/2) ylims --> (axis_2_coordinates[1]-axis_2_step_size/2, axis_2_coordinates[end]+axis_2_step_size/2) return axis_1_coordinates, axis_2_coordinates, transpose(metric_values_2d) end # 3D data @recipe function f(metric::Metric{T, 3}; coordinates=nothing, axes="xyz") where {T <: UltrasoundMetric} check_coordinates(coordinates) # remove the keywords from the attribute dictionary to avoid potential errors xslice = pop!(plotattributes, :xslice) yslice = pop!(plotattributes, :yslice) zslice = pop!(plotattributes, :zslice) slice_position, slice_axis_name, slice_axis_dim = get_slice_info(xslice, yslice, zslice) slice_axis_coordinates = get_axis_coordinates(slice_axis_name, slice_axis_dim, coordinates) slice_idx, nearest_slice_position = get_slice_idx(slice_axis_coordinates, slice_position) metric_values_2d = get_slice_data(Val(slice_axis_name), slice_idx, metric.val) _, slice_image_dims = get_slice_image_axes(Val(slice_axis_name)) seriestype := :heatmap title := get_metric_display_name(metric) * "\n($(slice_axis_name)=$nearest_slice_position)" xguide --> "$(axes[slice_image_dims[1]])-axis" yguide --> "$(axes[slice_image_dims[2]])-axis" aspect_ratio --> :equal axis_1_coordinates = get_axis_coordinates(axes[slice_image_dims[1]], slice_image_dims[1], coordinates) axis_2_coordinates = get_axis_coordinates(axes[slice_image_dims[2]], slice_image_dims[2], coordinates) axis_1_step_size = diff(axis_1_coordinates)[1] axis_2_step_size = diff(axis_2_coordinates)[1] if axis_1_step_size < 0mm axis_1_coordinates = reverse(axis_1_coordinates) axis_1_step_size = diff(axis_1_coordinates)[1] metric_values_2d = reverse(metric_values_2d, dims=1) end if axis_2_step_size < 0mm axis_2_coordinates = reverse(axis_2_coordinates) axis_2_step_size = diff(axis_2_coordinates)[1] metric_values_2d = reverse(metric_values_2d, dims=2) end xlims --> (axis_1_coordinates[1]-axis_1_step_size/2, axis_1_coordinates[end]+axis_1_step_size/2) ylims --> (axis_2_coordinates[1]-axis_2_step_size/2, axis_2_coordinates[end]+axis_2_step_size/2) return axis_1_coordinates, axis_2_coordinates, transpose(metric_values_2d) end function get_slice_info(xslice, yslice, zslice) if reduce(+, isnan.([xslice, yslice, zslice])) != 2 error("Only one of the keyword arguments must be set: xslice, yslice, or zslice") end if !isnan(xslice) slice_position = xslice slice_axis_name = :x slice_axis_dim = 1 elseif !isnan(yslice) slice_position = yslice slice_axis_name = :y slice_axis_dim = 2 elseif !isnan(zslice) slice_position = zslice slice_axis_name = :z slice_axis_dim = 3 end return slice_position, slice_axis_name, slice_axis_dim end function get_slice_idx(slice_axis_positions, slice_position) slice_idx = findnearest(slice_axis_positions, uconvert(u"mm", slice_position)) nearest_slice_position = slice_axis_positions[slice_idx] return slice_idx, nearest_slice_position end findnearest(A::AbstractArray,t) = findmin(abs.(A.-t))[2] get_slice_data(::Val{:x}, slice_idx, data) = data[slice_idx, :, :] get_slice_data(::Val{:y}, slice_idx, data) = data[:, slice_idx, :] get_slice_data(::Val{:z}, slice_idx, data) = data[:, :, slice_idx] get_slice_image_axes(::Val{:x}) = "yz", [2,3] get_slice_image_axes(::Val{:y}) = "xz", [1,3] get_slice_image_axes(::Val{:z}) = "xy", [1,2]
using System; namespace WarOfEmpires.Database.ReferenceEntities { public abstract class BaseReferenceEntity<TEnum> where TEnum : Enum { public virtual TEnum Id { get; set; } public virtual string Name { get; set; } } }
<?php namespace App\Http\Controllers; use App\bookRoom; use App\Http\Requests\carRent; use App\Http\Requests\RoomRequest; use App\Post; use App\rent_car; use Illuminate\Http\Request; class PublicController extends Controller { // public function index(){ $posts = Post::all(); return view('welcome', compact('posts')); } public function singlePost(Post $post){ return view('singlePosst', compact('post')); } public function about(){ return view('about'); } public function contact(){ return view('contact'); } public function slidePhoto(){ return view('slider'); } public function carRent(){ return view('carRent'); } public function bookedCar( carRent $request){ $post = new rent_car(); $post->pic_up_location = $request['pic_up_location']; $post->pic_up_date = $request['picUpDate']; $post->return_date = $request['returnDate']; $post->car_type= $request['carType']; $post->mobile_number= $request['mobile']; $post->save(); return back()->with('success', 'Post successfully created'); } public function roomBook(){ return view('roomBook'); } public function roomBooked(RoomRequest $request){ $post = new bookRoom(); $post->pic_up_date = $request['picUpDate']; $post->return_date = $request['returnDate']; $post->room_number= $request['roomType']; $post->mobile_number= $request['mobile']; $post->save(); return back()->with('success', 'Post successfully created'); } }
def allLongestStrings(a) return a.filter{|e| e.size == a.map{|e| e.size}.max} end
///////////////////////////////////////////////////////////////////////////// // AGCEventDef.cpp : Implementation of CAGCEventDef // #include "pch.h" #include <AGC.h> #include "AGCEventDef.h" #include "resource.h" #include <..\TCLib\ObjectLock.h> ///////////////////////////////////////////////////////////////////////////// // TCSimpleLock Adaptor class TCSimpleLock { // Construction public: TCSimpleLock(LONG& nSync) : m_nSync(nSync) {} // Operations public: void Lock() { while (InterlockedExchange(&m_nSync, 1)) Sleep(10); } void Unlock() { InterlockedExchange(&m_nSync, 0); } // Data Members protected: LONG& m_nSync; }; ///////////////////////////////////////////////////////////////////////////// // CAGCEventDef ///////////////////////////////////////////////////////////////////////////// // Include the XML/XSL-generated files #include <AGCEventsRCH.h> #include <AGCEventsCPP.h> ///////////////////////////////////////////////////////////////////////////// // Static Initialization LONG CAGCEventDef::s_nInitSync = 0; bool CAGCEventDef::s_bInitialized = false; CAGCEventDef::XNameMap* CAGCEventDef::s_pNameMap = NULL; CAGCEventDef CAGCEventDef::s_Instance; /////////////////////////////////////////////////////////////////////////// // Construction / Destruction CAGCEventDef::CAGCEventDef() { // Debug-only data verification #ifdef _DEBUG { // Validate that all table entries are in ascending order const XEventDef* a = begin(); const XEventDef* b = a + 1; while (end() != b) assert(XLess()(*a++, *b++)); // Validate that all group scopes match int nIndent = 0; for (const XEventDef* it = begin(); end() != it; ++it) { assert(-1 <= it->m_nIndent && it->m_nIndent <= 1); nIndent += it->m_nIndent; assert(0 <= nIndent); } assert(0 == nIndent); } #endif // _DEBUG } void CAGCEventDef::Initialize() { TCSimpleLock spinLock(s_nInitSync); TCObjectLock<TCSimpleLock> lock(&spinLock); if (s_bInitialized) return; // Map all of the event names to ID's s_pNameMap = new XNameMap; for (const XEventDef* it = begin(); end() != it; ++it) { if (HIWORD(reinterpret_cast<DWORD>(it->m_pszName))) (*s_pNameMap)[it->m_pszName] = it->m_id; else { BSTR bstrName; ZSucceeded(GetString(it->m_pszName, &bstrName)); (*s_pNameMap)[bstrName] = it->m_id; } } s_bInitialized = true; } void CAGCEventDef::Terminate() { TCSimpleLock spinLock(s_nInitSync); TCObjectLock<TCSimpleLock> lock(&spinLock); if (!s_bInitialized) return; // Free each BSTR in the name map for (XNameMapIt it = s_pNameMap->begin(); it != s_pNameMap->end(); ++it) { const XEventDef* itFind = find(it->second); assert(itFind != end()); if (!HIWORD(reinterpret_cast<DWORD>(itFind->m_pszName))) SysFreeString(const_cast<BSTR>(it->first)); } delete s_pNameMap; s_pNameMap = NULL; s_bInitialized = false; } ///////////////////////////////////////////////////////////////////////////// // Attributes HRESULT CAGCEventDef::GetEventName(AGCEventID idEvent, BSTR* pbstrOut) { // Initialize the [out] parameter *pbstrOut = NULL; // Find the definition of the event ID const XEventDef* it = find(idEvent); if (end() == it) return E_INVALIDARG; // Get the event name string HRESULT hr = GetString(it->m_pszName, pbstrOut); // Indicate success return S_OK; } HRESULT CAGCEventDef::GetEventDescription(AGCEventID idEvent, BSTR* pbstrOut) { // Initialize the [out] parameter *pbstrOut = NULL; // Find the definition of the event ID const XEventDef* it = find(idEvent); if (end() == it) return E_INVALIDARG; // Get the event description string HRESULT hr = GetString(it->m_pszEventDescription, pbstrOut); // If not specified, use the event name string return (S_FALSE != hr) ? hr : GetEventName(it->m_id, pbstrOut); } HRESULT CAGCEventDef::GetEventDescription(IAGCEvent* pEvent, BSTR* pbstrOut, const XEventDef* pDefHint) { // Initialize the [out] parameter *pbstrOut = NULL; assert(pEvent); // Find the definition of the event ID, if not specified if (!pDefHint) { // Get the specified event's ID AGCEventID idEvent; RETURN_FAILED(pEvent->get_ID(&idEvent)); // Lookup the event ID in the table const XEventDef* it = find(idEvent); if (end() == it) return E_INVALIDARG; pDefHint = it; } else { #ifdef _DEBUG // Get the specified event's ID AGCEventID idEvent; RETURN_FAILED(pEvent->get_ID(&idEvent)); assert(pDefHint->m_id == idEvent); #endif // _DEBUG } // Get the event description formatting string CComBSTR bstrFmt; HRESULT hr = GetString(pDefHint->m_pszFormatDescription, &bstrFmt); RETURN_FAILED(hr); if (S_FALSE == hr) { // Use the static description return GetEventDescription(pDefHint->m_id, pbstrOut); } // Format the event assert(pEvent); return ExpandFmtString(bstrFmt, pEvent, pbstrOut); } HRESULT CAGCEventDef::GetEventParameters (IAGCEvent* pEvent, CAGCEventDef::XParamStrings& rParamStrings, const XEventDef* pDefHint) { assert(pEvent); // Find the definition of the event ID, if not specified if (!pDefHint) { // Get the specified event's ID AGCEventID idEvent; RETURN_FAILED(pEvent->get_ID(&idEvent)); // Lookup the event ID in the table const XEventDef* it = find(idEvent); if (end() == it) return E_INVALIDARG; pDefHint = it; } else { #ifdef _DEBUG // Get the specified event's ID AGCEventID idEvent; RETURN_FAILED(pEvent->get_ID(&idEvent)); assert(pDefHint->m_id == idEvent); #endif // _DEBUG } // Get the event description formatting string CComBSTR bstrFmt; HRESULT hr = GetString(pDefHint->m_pszFormatDescription, &bstrFmt); if (S_OK != hr) return hr; // Get the event parameters return ExpandParams(bstrFmt, pEvent, rParamStrings); } ///////////////////////////////////////////////////////////////////////////// // Implementation HRESULT CAGCEventDef::GetString(LPCOLESTR psz, BSTR* pbstrOut) { // Get the specified string as a DWORD DWORD dw = reinterpret_cast<DWORD>(psz); // If HIWORD is non-null, this is a string pointer if (HIWORD(dw)) { USES_CONVERSION; *pbstrOut = SysAllocString(psz); return *pbstrOut ? (SysStringLen(*pbstrOut) ? S_OK : S_FALSE) : E_OUTOFMEMORY; } return TCLoadBSTR(_Module.GetResourceInstance(), dw, pbstrOut); } HRESULT CAGCEventDef::ExpandFmtString(BSTR bstrFmt, IAGCEvent* pEvent, BSTR* pbstrOut) { assert(BSTRLen(bstrFmt)); assert(pbstrOut); // Create a growable stream into which we'll write IStreamPtr spStm; RETURN_FAILED(CreateStreamOnHGlobal(NULL, true, &spStm)); // Initialize the parsing data XParseData data = {pEvent, spStm, NULL, NULL, NULL, bstrFmt, NULL, false}; // Iterate through the characters of the format string XStateProc pfnState = ParseState_Base; for (UINT cch = SysStringLen(bstrFmt); cch && pfnState; --cch) { RETURN_FAILED(pfnState(data)); pfnState = data.m_pfnNextState; ++data.m_pszInput; } data.m_bEndOfString = true; while (pfnState) { RETURN_FAILED(pfnState(data)); pfnState = data.m_pfnNextState; } // Get the current seek pointer of the stream (which is it's size) LARGE_INTEGER li = {0}; ULARGE_INTEGER uli; RETURN_FAILED(spStm->Seek(li, STREAM_SEEK_CUR, &uli)); UINT cchStream = (UINT)uli.QuadPart / sizeof(OLECHAR); // Get the HGLOBAL underlying the stream HGLOBAL hGlobal = NULL; RETURN_FAILED(GetHGlobalFromStream(spStm, &hGlobal)); assert(hGlobal); // Lock the HGLOBAL LPCOLESTR pszOut = reinterpret_cast<LPCOLESTR>(GlobalLock(hGlobal)); assert(pszOut); // Create a BSTR from the byte stream *pbstrOut = SysAllocStringLen(pszOut, cchStream); // Unlock the HGLOBAL GlobalUnlock(hGlobal); // Indicate success or failure return *pbstrOut ? S_OK : E_OUTOFMEMORY; } HRESULT CAGCEventDef::ExpandParams(BSTR bstrFmt, IAGCEvent* pEvent, CAGCEventDef::XParamStrings& rParams) { assert(BSTRLen(bstrFmt)); // Initialize the parsing data XParseData data = {pEvent, NULL, &rParams, NULL, NULL, bstrFmt, NULL, false}; // Iterate through the characters of the format string XStateProc pfnState = ParseState_Base; for (UINT cch = SysStringLen(bstrFmt); cch && pfnState; --cch) { RETURN_FAILED(pfnState(data)); pfnState = data.m_pfnNextState; ++data.m_pszInput; } data.m_bEndOfString = true; while (pfnState) { RETURN_FAILED(pfnState(data)); pfnState = data.m_pfnNextState; } // Indicate success return S_OK; } HRESULT __fastcall CAGCEventDef::ParseState_WriteInput(CAGCEventDef::XParseData& data) { HRESULT hr = S_OK; if (data.m_pStm) hr = data.m_pStm->Write(data.m_pszInput, sizeof(*data.m_pszInput), NULL); data.m_pfnNextState = SUCCEEDED(hr) ? ParseState_Base : NULL; return hr; } HRESULT __fastcall CAGCEventDef::ParseState_WriteVar(CAGCEventDef::XParseData& data) { UINT cch = data.m_pszVarEnd - data.m_pszVarBegin; UINT cb = cch * sizeof(*data.m_pszVarEnd); HRESULT hr = S_OK; if (data.m_pStm) hr = data.m_pStm->Write(data.m_pszVarBegin, cb, NULL); data.m_pfnNextState = SUCCEEDED(hr) ? ParseState_Base : NULL; return hr; } HRESULT __fastcall CAGCEventDef::ParseState_End(CAGCEventDef::XParseData& data) { assert(data.m_bEndOfString); data.m_pfnNextState = NULL; return S_OK; } HRESULT __fastcall CAGCEventDef::ParseState_Base(CAGCEventDef::XParseData& data) { if (data.m_bEndOfString) return ParseState_End(data); if (OLESTR('%') != *data.m_pszInput) return ParseState_WriteInput(data); data.m_pszVarBegin = data.m_pszInput + 1; data.m_pfnNextState = ParseState_InVar; return S_OK; } HRESULT __fastcall CAGCEventDef::ParseState_InVar(CAGCEventDef::XParseData& data) { if (data.m_bEndOfString) { --data.m_pszVarBegin; data.m_pszVarEnd = data.m_pszInput; RETURN_FAILED(ParseState_WriteVar(data)); return ParseState_End(data); } if (OLESTR('%') == *data.m_pszInput) { if (data.m_pszInput == data.m_pszVarBegin) return ParseState_WriteInput(data); data.m_pszVarEnd = data.m_pszInput; return ParseState_ProcessVar(data); } return S_OK; } HRESULT __fastcall CAGCEventDef::ParseState_ProcessVar(CAGCEventDef::XParseData& data) { // Get the variable as a VT_BSTR variant UINT cch = data.m_pszVarEnd - data.m_pszVarBegin; CComVariant varKey; V_VT(&varKey) = VT_BSTR; V_BSTR(&varKey) = ::SysAllocStringLen(data.m_pszVarBegin, cch); if (!V_BSTR(&varKey)) { data.m_pfnNextState = NULL; return E_OUTOFMEMORY; } // Lookup the variable in the event's properties CComVariant varValue; HRESULT hr = data.m_pEvent->get_Property(&varKey, &varValue); if (FAILED(hr)) { data.m_pfnNextState = NULL; return hr; } // Just write the variable name as-is if it doesn't exist in the event if (VT_EMPTY == V_VT(&varValue)) { // Include the leading and trailing '%' characters --data.m_pszVarBegin; ++data.m_pszVarEnd; if (data.m_pParams) { cch = data.m_pszVarEnd - data.m_pszVarBegin; BSTR bstrValue = SysAllocStringLen(data.m_pszVarBegin, cch); if (!bstrValue) { data.m_pfnNextState = NULL; return E_OUTOFMEMORY; } data.m_pParams->push_back(bstrValue); } return ParseState_WriteVar(data); } // Convert the variant to a string if (VT_BSTR != V_VT(&varValue)) { VariantChangeTypeEx(&varValue, &varValue, GetThreadLocale(), VARIANT_LOCALBOOL | VARIANT_ALPHABOOL, VT_BSTR); if (VT_BSTR != V_VT(&varValue)) { data.m_pfnNextState = ParseState_Base; return S_OK; } } UINT cchValue = BSTRLen(V_BSTR(&varValue)); if (!cchValue) { if (data.m_pParams) { BSTR bstrValue = SysAllocString(L""); if (!bstrValue) { data.m_pfnNextState = NULL; return E_OUTOFMEMORY; } data.m_pParams->push_back(bstrValue); } data.m_pfnNextState = ParseState_Base; return S_OK; } // Write the value string data.m_pszVarBegin = V_BSTR(&varValue); data.m_pszVarEnd = data.m_pszVarBegin + cchValue; RETURN_FAILED(ParseState_WriteVar(data)); if (data.m_pParams) { data.m_pParams->push_back(V_BSTR(&varValue)); V_VT(&varValue) = VT_EMPTY; // Detaches the BSTR from the VARIANT } // Indicate success return S_OK; }
# == Schema Information # # Table name: bank_accounts # # id :integer not null, primary key # name :string # account_type_description :string # account_no :string # user_id :integer # institution_id :integer # opening_balance :integer # closing_balance :integer # created_at :datetime not null # updated_at :datetime not null # closing_date :date # account_category :string default("asset") # allow_default_allocations :boolean default(FALSE) # default_sub_account_amount :integer default(0) # status :string default("open") # account_type :string default("normal") # statement_day :integer # payment_due_day :integer # is_cash :boolean default(TRUE) # import_format :string default("") # household_id :bigint(8) # shared_examples_for "CreditCard" do describe "#is_credit_card" do it "returns true if account_type == 'credit_card'" do bank_account = build(:bank_account, account_type: 'credit_card') expect(bank_account.is_credit_card).to eq true expect(bank_account.credit_card?).to eq true end it "returns true if account_type == 'credit_card'" do bank_account = build(:bank_account, account_type: 'normal') expect(bank_account.is_credit_card).to eq false expect(bank_account.credit_card?).to eq false end end describe "#add_brought_forward_transactions" do before do @credit_card = create(:bank_account, account_type: 'credit_card') @start_date = Date.new(2015,06,23) @end_date = Date.new(2015,07,22) @inside_date = Date.new(2015,07,01) @inside_date_2 = Date.new(2015,07,05) @outside_date = Date.new(2015,07,23) end before do @credit_card.transactions << build(:unpaid_transaction, transaction_date: @inside_date, withdrawal_amount: 400_00, deposit_amount: 0) @credit_card.transactions << build(:unpaid_transaction, transaction_date: @inside_date, withdrawal_amount: 150_00, deposit_amount: 0) end it "does nothing if the account is not a credit card" do @bank_account = create(:bank_account, account_type: 'normal') result = @bank_account.add_brought_forward_transactions(@start_date, @end_date) expect(result).to eq false end it "calls #build_transactions_to_bring_forward" do expect(@credit_card).to receive(:build_transactions_to_bring_forward) .with(@start_date, @end_date).and_call_original @credit_card.add_brought_forward_transactions(@start_date, @end_date) end it "calls #build_adjustment_transaction" do expect(@credit_card).to receive(:build_adjustment_transaction) .with(@start_date, @end_date).and_call_original @credit_card.add_brought_forward_transactions(@start_date, @end_date) end it "marks the original transactions as 'paid' and 'brought_forward'" do @credit_card.add_brought_forward_transactions(@start_date, @end_date) expect(@credit_card.transactions.first.status).to eq 'paid' expect(@credit_card.transactions.second.status).to eq 'paid' end it "marks the original transactions as brought_forward_status = 'brought_forward'" do @credit_card.add_brought_forward_transactions(@start_date, @end_date) expect(@credit_card.transactions.first.brought_forward_status).to eq 'brought_forward' expect(@credit_card.transactions.second.brought_forward_status).to eq 'brought_forward' end context "when there is a net balance unpaid" do it "adds brought_forward transactions and the adjustment transaction" do @credit_card.add_brought_forward_transactions(@start_date, @end_date) expect(@credit_card.transactions.size).to eq 5 # 2 original transactions, 2 copies + 1 adjustment end end context "when there no net balance unpaid" do it "adds brought_forward transactions only (no adjustment transaction)" do balancing_transaction = build(:unpaid_transaction, transaction_date: @inside_date, withdrawal_amount: 0, deposit_amount: 400_00 + 150_00) @credit_card.transactions << balancing_transaction adjustment = @credit_card.build_adjustment_transaction(@start_date, @end_date) expect(adjustment.net_amount).to eq 0 @credit_card.add_brought_forward_transactions(@start_date, @end_date) # 3 original transactions, 3 copies, no adjustment expect(@credit_card.transactions.size).to eq 6 end end end describe "#build_transactions_to_bring_forward" do before do @credit_card = create(:bank_account, account_type: 'credit_card') @start_date = Date.new(2015,06,23) @end_date = Date.new(2015,07,22) @inside_date = Date.new(2015,07,01) @inside_date_2 = Date.new(2015,07,05) @outside_date = Date.new(2015,07,23) end it "returns new transactions for every transaction not paid in the credit period" do @credit_card.transactions << build(:transaction, transaction_date: @inside_date, status:'unpaid') @credit_card.transactions << build(:transaction, transaction_date: @inside_date_2, status:'unpaid') new_transactions = @credit_card.build_transactions_to_bring_forward(@start_date, @end_date) expect(new_transactions.size).to eq 2 end it "ignores 'paid' transactions in the credit period" do @credit_card.transactions << build(:transaction, transaction_date: @inside_date, status:'paid') new_transactions = @credit_card.build_transactions_to_bring_forward(@start_date, @end_date) expect(new_transactions.size).to eq 0 end it "ignores'unpaid' transactions outside the credit period" do @credit_card.transactions << build(:transaction, transaction_date: @outside_date, status:'unpaid') new_transactions = @credit_card.build_transactions_to_bring_forward(@start_date, @end_date) expect(new_transactions.size).to eq 0 end context "when an unpaid transaction exists" do before do @credit_card.transactions << build(:transaction, transaction_date: @inside_date, description: 'Racing Wheel', status:'unpaid') new_transactions = @credit_card.build_transactions_to_bring_forward(@start_date, @end_date) @added_transaction = new_transactions.first end it "marks the new transactions with transaction date as one day after the end date" do expect(@added_transaction.transaction_date).to eq (@end_date + 1) end it "marks the new transactions as brought_forward_status = 'added'" do expect(@added_transaction.brought_forward_status).to eq 'added' end it "makes the description of those new transactions = old description + (b/f)" do expect(@added_transaction.description).to eq 'Racing Wheel (B/F)' end it "marks the new transaction as brought_forward_status = 'added'" do expect(@added_transaction.brought_forward_status).to eq 'added' end end end describe "#build_adjustment_transaction" do before do @credit_card = create(:bank_account, account_type: 'credit_card') @start_date = Date.new(2015,06,23) @end_date = Date.new(2015,07,22) @inside_date = Date.new(2015,07,01) @credit_card.transactions << build(:transaction, transaction_date: @inside_date, withdrawal_amount: 400_00, deposit_amount: 10_00, description: 'Racing Wheel', status:'unpaid') @credit_card.transactions << build(:transaction, transaction_date: @inside_date, withdrawal_amount: 150_00, deposit_amount: 20_00, description: 'Pedals', status:'unpaid') @adjustment_transaction = @credit_card.build_adjustment_transaction(@start_date, @end_date) end it "creates a transaction with withdrawal_amount = reversal of sum of all withdrawals" do expect(@adjustment_transaction.withdrawal_amount).to eq ( 400_00 + 150_00 ) * -1 end it "creates a transaction with deposit_amount = reversal of sum of all deposits" do expect(@adjustment_transaction.deposit_amount).to eq ( 10_00 + 20_00 ) * -1 end it "creates a transaction with description as 'Balance B/F Adj Entry'" do expect(@adjustment_transaction.description).to eq 'Balance B/F Adj Entry' end it "creates a transaction with transaction date one day after the end date" do expect(@adjustment_transaction.transaction_date).to eq @end_date + 1 end it "creates a transaction with status = 'unpaid'" do expect(@adjustment_transaction.status).to eq 'paid' end it "creates a transaction with brought_forward_status = 'adjustment'" do expect(@adjustment_transaction.brought_forward_status).to eq 'adjustment' end end describe "#remove_brought_forward_transactions" do before do @credit_card = create(:bank_account, account_type: 'credit_card') @start_date = Date.new(2015,06,23) @end_date = Date.new(2015,07,22) @inside_date = Date.new(2015,07,01) @inside_date_2 = Date.new(2015,07,05) @outside_date = Date.new(2015,07,23) @first = build(:transaction, transaction_date: @inside_date, withdrawal_amount: 400_00, deposit_amount: 10_00, description: 'Racing Wheel', status:'unpaid') @second = build(:transaction, transaction_date: @inside_date, withdrawal_amount: 150_00, deposit_amount: 20_00, description: 'Pedals', status:'unpaid') @credit_card.transactions << @first @credit_card.transactions << @second @credit_card.add_brought_forward_transactions(@start_date, @end_date) end it "changes 'brought_forward' transactions in the period back to 'unpaid'" do @credit_card.remove_brought_forward_transactions(@start_date, @end_date) @first.reload @second.reload expect(@first.status).to eq 'unpaid' expect(@second.status).to eq 'unpaid' end it "sets the brought_forward_status of 'brought_forward' transactions to nil" do @credit_card.remove_brought_forward_transactions(@start_date, @end_date) @first.reload @second.reload expect(@first.brought_forward_status).to eq nil expect(@second.brought_forward_status).to eq nil end it "removes the transactions added in the next period" do expect(@credit_card.transactions.size).to eq 5 expect{ @credit_card.remove_brought_forward_transactions(@start_date, @end_date) }.to change{ Transaction.count }.by(-3) end end describe "Credit Card dates" do before do end context "when statement_day < current day" do before do @credit_card = build(:bank_account, account_type: 'credit_card', statement_day: 10) @today = Date.new(2015, 10, 11) end it "#current_period_statement_start returns the day after statement_day in the current month" do expect(@credit_card.current_period_statement_start(@today)).to eq Date.new(2015, 10, 11) end it "#current_period_statement_end returns the date of the statement_day in the next month" do expect(@credit_card.current_period_statement_end(@today)).to eq Date.new(2015, 11, 10) end it "#previous_period_statement_start returns the date after the statement_day in the previous month" do expect(@credit_card.previous_period_statement_start(@today)).to eq Date.new(2015, 9, 11) end it "#previous_period_statement_end returns the statement_day in the current month" do expect(@credit_card.previous_period_statement_end(@today)).to eq Date.new(2015, 10, 10) end end context "when current day == statement_day" do before do @credit_card = build(:bank_account, account_type: 'credit_card', statement_day: 10) @today = Date.new(2015, 12, 10) end it "#current_period_statement_start returns the day after statement_day in the previous month" do expect(@credit_card.current_period_statement_start(@today)).to eq Date.new(2015, 11, 11) end it "#current_period_statement_end returns the statement_day in the current month" do expect(@credit_card.current_period_statement_end(@today)).to eq Date.new(2015, 12, 10) end it "#previous_period_statement_start returns the day after the statement_day in the 2nd month prior" do expect(@credit_card.previous_period_statement_start(@today)).to eq Date.new(2015, 10, 11) end it "#previous_period_statement_end returns the statement_day in the previous month" do expect(@credit_card.previous_period_statement_end(@today)).to eq Date.new(2015, 11, 10) end end context "when current day < statement_day" do before do @credit_card = build(:bank_account, account_type: 'credit_card', statement_day: 10) @today = Date.new(2015, 1, 1) end it "#current_period_statement_start returns the date after statement_day in the previous month" do expect(@credit_card.current_period_statement_start(@today)).to eq Date.new(2014, 12, 11) end it "#current_period_statement_end returns the statement_day in the current month" do expect(@credit_card.current_period_statement_end(@today)).to eq Date.new(2015, 1, 10) end it "#previous_period_statement_start returns the day after statement_day in the 2nd month prior" do expect(@credit_card.previous_period_statement_start(@today)).to eq Date.new(2014, 11, 11) end it "#previous_period_statement_end returns the statement_day in the previous month" do expect(@credit_card.previous_period_statement_end(@today)).to eq Date.new(2014, 12, 10) end end context "when the current month doesn't have the day before the statement day" do before do @credit_card = build(:bank_account, account_type: 'credit_card', statement_day: 30) @today = Date.new(2015, 2, 15) end it "#current_period_statement_start returns the date after statement_day in the previous month" do expect(@credit_card.current_period_statement_start(@today)).to eq Date.new(2015, 1, 31) end it "#current_period_statement_end returns the last date in the current month" do expect(@credit_card.current_period_statement_end(@today)).to eq Date.new(2015, 2, 28) end end context "when payment_due_day < statement day < current day" do before do @credit_card = build(:bank_account, account_type: 'credit_card', payment_due_day: 1, statement_day: 10) @today = Date.new(2015, 10, 24) end it "#current_period_payment_due returns the payment_due_day in the next month" do expect(@credit_card.current_period_payment_due(@today)).to eq Date.new(2015, 12, 1) end it "#previous_period_payment_due returns the payment_due_day in the current month" do expect(@credit_card.previous_period_payment_due(@today)).to eq Date.new(2015, 11, 1) end end context "when payment_due_day < current day < statement day" do before do @credit_card = build(:bank_account, account_type: 'credit_card', payment_due_day: 1, statement_day: 10) @today = Date.new(2015, 2, 5) end it "#current_period_payment_due returns the payment_due_day in the next month" do expect(@credit_card.current_period_payment_due(@today)).to eq Date.new(2015, 3, 1) end it "#previous_period_payment_due returns the payment_due_day in the current month" do expect(@credit_card.previous_period_payment_due(@today)).to eq Date.new(2015, 2, 1) end end 1 context "when payment_due_day > statement day" do before do @credit_card = build(:bank_account, account_type: 'credit_card', payment_due_day: 16, statement_day: 10) @today = Date.new(2015, 2, 15) end it "#current_period_payment_due returns the payment_due_day in the current month" do expect(@credit_card.current_period_payment_due(@today)).to eq Date.new(2015, 3, 16) end it "#previous_period_payment_due returns the payment_due_day in the current month" do expect(@credit_card.previous_period_payment_due(@today)).to eq Date.new(2015, 2, 16) end end end # end of Credit Card dates end
#!/usr/bin/env bash export KUBE_NAMESPACE=$1 export DEPLOYMENT=$2 read -p "Kube Api Endpoint: " export KUBE_API_ENDPOINT=$REPLY helm upgrade \ --install \ --force \ --set ingress.hosts[0]=api.order-service.${KUBE_API_ENDPOINT} \ --set ingress.tls[0].hosts[0]=api.order-service.${KUBE_API_ENDPOINT} \ --set ingress.tls[0].secretName=order-service-tls-cert \ --namespace=${KUBE_NAMESPACE} \ ${KUBE_NAMESPACE} \ helm/${DEPLOYMENT}
@testset "gain" begin # Create a system. Ngrid = [3,3,3] N = 3prod(Ngrid) ind_in = 5:7 εc = ones(ComplexF64, N) εc[ind_in] .= 12+0.1im m = 1 ωₐ = 1.0 γ˔ = 1.0 D₀ = fill(0.01, N) D₀[ind_in] .= 1.0 M = 4 ω = rand(M) a² = rand(M) Ψ = randn(ComplexF64,N,M) ψ = [Ψ[:,j] for j = 1:M] # Check automatic differentiation performed in the GainProfile constructor produces correct # results. gain = gen_gain_2lv(ωₐ, γ˔) gp = GainProfile(gain, N) gain′ = SALTBase.gen_gain′_2lv(ωₐ, γ˔) abs2gain = SALTBase.gen_abs2gain_2lv(ωₐ, γ˔) abs2gain′ = SALTBase.gen_abs2gain′_2lv(ωₐ, γ˔) @test gp.gain′[1].(ω) ≈ gain′.(ω) @test gp.abs2gain[1].(ω) ≈ abs2gain.(ω) @test gp.abs2gain′[1].(ω) ≈ abs2gain′.(ω) hb = Vector{Float64}(undef, N) hole_burning!(hb, abs2gain, ω, a², [abs2.(ψ[m]) for m = 1:M]) @test all(hb .> 1) @test hb ≈ 1 .+ abs2.(Ψ) * (abs2gain.(ω) .* a²) end # @testset "gain"
--- title: Toss site: https://toss.im logo: toss.svg ---
package github.aq.mvcstudentdemo.service; import github.aq.mvcstudentdemo.dao.StudentDao; import github.aq.mvcstudentdemo.model.Student; import java.util.Collection; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @Component public class StudentService { @Autowired private StudentDao studentDao; public Collection<Student> getAllStudents() { return this.studentDao.getAllStudents(); } public Student getStudentById(int id) { return this.studentDao.getStudentById(id); } public void deleteStudentById(int id) { this.studentDao.deleteStudentById(id); } public void updateStudent(Student student) { this.studentDao.updateStudent(student); } public void insertStudent(Student student) { this.studentDao.insertStudent(student); } }
<?php namespace common\domain\models; class Changes { /** * @param Change[] $changes * * @return array with new and old value maps */ public static function toArray(array $changes) { return \common\domain\utils\Changes::toArray($changes); } }
package io.nuvo.nio import io.nuvo.runtime.Config._ import java.nio.ByteBuffer /** * Trait for buffer allocators. */ trait BufferAllocator { /** * Allocate a buffer using the configured default size and sets it into native byte order * @return the allocated buffer */ def allocate(): RawBuffer /** * Allocate a buffer optimized for I/O using the configure default size and sets it into native byte order * @return the allocated buffer */ def allocateDirect(): RawBuffer /** * Allocate a buffer and sets it into native byte order * @param size the buffer size * @return the allocated buffer */ def allocate(size: Int): RawBuffer /** * Allocate a buffer optimized for I/O and sets it into native byte order * @param size the buffer size * @return the allocated buffer */ def allocateDirect(size: Int): RawBuffer /** * May release the resources associated with this buffer. * * @param buf the buffer to release */ def release(buf: RawBuffer) } object SimpleBufferAllocator extends BufferAllocator { final def allocate(size: Int): RawBuffer = { val buf = new RawBuffer(ByteBuffer.allocate(size)) buf order(ByteOrder nativeOrder) } final def allocateDirect(size: Int): RawBuffer = { val buf = new RawBuffer(ByteBuffer.allocateDirect(size)) buf order(ByteOrder nativeOrder) } final def release(buf: RawBuffer) {} /** * Allocate a buffer using the configured default size and sets it into native byte order * @return the allocated buffer */ final def allocate(): RawBuffer = { val buf = new RawBuffer(ByteBuffer.allocate(Networking.defaultBufferSize)) buf order(ByteOrder nativeOrder) } /** * Allocate a buffer optimized for I/O using the configure default size and sets it into native byte order * @return the allocated buffer */ final def allocateDirect(): RawBuffer = { val buf = new RawBuffer(ByteBuffer.allocateDirect(Networking.defaultBufferSize)) buf order(ByteOrder nativeOrder) } } object CircularBufferAllocator extends BufferAllocator { private var bufferList = (1 to Networking.defaultBufferCacheSize).toList map ( _ => { val b = new RawBuffer(ByteBuffer.allocateDirect(Networking.defaultBufferSize)) b order (ByteOrder nativeOrder) b }) /** * Allocate a buffer and sets it into native byte order * @param size the buffer size * @return the allocated buffer */ final def allocate(size: Int): RawBuffer = SimpleBufferAllocator.allocate(size) /** * Allocate a buffer optimized for I/O and sets it into native byte order * @param size the buffer size * @return the allocated buffer */ final def allocateDirect(size: Int): RawBuffer = SimpleBufferAllocator.allocateDirect(size) /** * May release the resources associated with this buffer. * * @param buf the buffer to release */ def release(buf: RawBuffer) = { if (buf.capacity == Networking.defaultBufferSize) synchronized { buf.clear() buf order (ByteOrder nativeOrder) this.bufferList = buf +: bufferList } } /** * Allocate a buffer using the configured default size and sets it into native byte order * @return the allocated buffer */ def allocate(): RawBuffer = synchronized { bufferList match { case x::xs => x case List() => SimpleBufferAllocator.allocate(Networking.defaultBufferSize) } } /** * Allocate a buffer optimized for I/O using the configure default size and sets it into native byte order * @return the allocated buffer */ def allocateDirect(): RawBuffer = synchronized { bufferList match { case x::xs => x case List() => SimpleBufferAllocator.allocateDirect(Networking.defaultBufferSize) } } }
OCLineReporter ============== Place your OCLint JSON reports in this directory. ###Example ![Overview](../screenshots/ReportsDirectory.png)
// DESCRIPTION = Disallow unused variables (no-unused-vars) // STATUS = 2 /* eslint no-console: 0*/ /* eslint @typescript-eslint/no-unused-vars: 0*/ /* eslint @typescript-eslint/no-non-null-assertion: 0*/ /* eslint @typescript-eslint/consistent-type-assertions: 0*/ // <!START // Bad /* // Write-only variables are not considered as used. let y = 10; y = 5; // A read for a modification of itself is not considered as used. let z = 0; z = z + 1; */ // Good // Write-only variables are not considered as used. let y = 10; y = 5; // A read for a modification of itself is not considered as used. let z = 0; z = z + y; console.log(z); // END!>
# frozen_string_literal: true FactoryBot.define do sequence(:filepath) do |n| "#{n}_#{Faker::File.file_name(nil, nil, 'txt')}" end end
/* Copyright 2021 WeAreFrank! Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package nl.nn.adapterframework.util; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; public class CookieUtil { public static Cookie getCookie(HttpServletRequest request, String name) { Cookie[] cookies = request.getCookies(); if(cookies != null) { for (Cookie cookie : cookies) { if(name.equals(cookie.getName())) { return cookie; } } } return null; } public static void addCookie(HttpServletRequest request, HttpServletResponse response, Cookie cookie, int maxAge) { cookie.setMaxAge(maxAge); cookie.setSecure(request.isSecure()); cookie.setHttpOnly(true); response.addCookie(cookie); } }
package main import ( "net" "net/http" _ "net/http/pprof" "github.com/spf13/cobra" "github.com/liupzmin/weewoe/config" "github.com/liupzmin/weewoe/log" "github.com/liupzmin/weewoe/mail" pb "github.com/liupzmin/weewoe/proto" "github.com/liupzmin/weewoe/scrape" "github.com/robfig/cron/v3" "github.com/spf13/viper" "google.golang.org/grpc" "google.golang.org/grpc/reflection" ) const ( appName = "w2psd" shortAppDesc = "A daemon for checking distributed processes' status." longAppDesc = "w2psd is a daemon to check the status of variety of processes." ) var ( w2Flags *config.Flags rootCmd = &cobra.Command{ Use: appName, Short: shortAppDesc, Long: longAppDesc, Run: run, } ) func init() { initFlags() } func main() { if err := rootCmd.Execute(); err != nil { log.Panicf("run failed %s", err) } } func run(cmd *cobra.Command, args []string) { log.SetLevel(*w2Flags.LogLevel) scrape.Init() cronSendMail() sendAlert() go httpServer() s := startGRPCServer() select { case <-waitSignals(): log.Info("SHUTTING DOWN......") s.Stop() scrape.Stop() } } func startGRPCServer() *grpc.Server { s := grpc.NewServer() pb.RegisterStateServer(s, &scrape.State{}) reflection.Register(s) go func() { lis, err := net.Listen("tcp", ":9527") if err != nil { log.Fatalf("failed to listen: %v", err) } if err := s.Serve(lis); err != nil { log.Fatalf("grpc serve failed: %v", err) } }() return s } func cronSendMail() { m := mail.New() go func() { for _ = range scrape.SendMail { m.Run() } }() if viper.GetBool("mail.send") { c := cron.New() _, err := c.AddJob(viper.GetString("mail.cron"), m) if err != nil { log.Panic("add job panic", log.FieldErr(err)) } c.Start() } } func sendAlert() { if viper.GetBool("alert.notify") { alert := scrape.Alert{URL: viper.GetString("alert.url")} scrape.CollectorMap["process"].AddListener(alert) } } func httpServer() { http.HandleFunc("/", scrape.ProcessHandler) http.HandleFunc("/list", scrape.GetProcesses) if err := http.ListenAndServe(":9528", nil); err != nil { log.Panicf("http server start failed: %s", err.Error()) } } func initFlags() { w2Flags = config.NewFlags() rootCmd.PersistentFlags().StringVarP( w2Flags.LogLevel, "logLevel", "l", config.DefaultLogLevel, "Specify a log level (info, warn, debug, trace, error)", ) rootCmd.Flags().StringVarP( w2Flags.LogFile, "logFile", "", config.DefaultLogFile, "Specify the log file", ) rootCmd.Flags() }
--- html_meta: "description": "" "property=og:description": "" "property=og:title": "" "keywords": "" --- (lifecycle-methods-label)= # Use Lifecycle Methods Lifecycle methods are methods which are called on specific external events. For example the {file}`componentDidMount` method is called when the component gets added to the dom. We can use this method to do additional calls. For example in our case we want to fetch the initial data from the backend. ```{code-block} jsx :emphasize-lines: 1-3 :lineno-start: 31 :linenos: true componentDidMount() { this.props.getFaqItems(); } ``` The {file}`getFaqItems` method is mapped using the connect call. The full {file}`Faq` component will now look like this: ```{code-block} jsx :emphasize-lines: 6,16-17,31-33,95 :linenos: true import React, { Component } from "react"; import { connect } from "react-redux"; import PropTypes from "prop-types"; import FaqItem from "./FaqItem"; import { addFaqItem, getFaqItems } from "../actions"; class Faq extends Component { static propTypes = { faq: PropTypes.arrayOf( PropTypes.shape({ question: PropTypes.string.isRequired, answer: PropTypes.string.isRequired }) ), addFaqItem: PropTypes.func.isRequired, getFaqItems: PropTypes.func.isRequired }; constructor(props) { super(props); this.state = { question: "", answer: "" }; } componentDidMount() { this.props.getFaqItems(); } onChangeQuestion = (event) => { this.setState({ question: event.target.value }); } onChangeAnswer = (event) => { this.setState({ answer: event.target.value }); } onSubmit = (event) => { this.props.addFaqItem(this.state.question, this.state.answer); this.setState({ question: "", answer: "" }); event.preventDefault(); } render() { return ( <div> <ul> {this.props.faq.map((item, index) => ( <FaqItem question={item.question} answer={item.answer} index={index} /> ))} </ul> <form onSubmit={this.onSubmit}> <label> Question: <input name="question" type="text" value={this.state.question} onChange={this.onChangeQuestion} /> </label> <label> Answer: <textarea name="answer" value={this.state.answer} onChange={this.onChangeAnswer} /> </label> <input type="submit" value="Add" /> </form> </div> ); } } export default connect( (state, props) => ({ faq: state.faq }), { addFaqItem, getFaqItems } )(Faq); ``` ````{admonition} Differences :class: toggle ```dpatch --- a/src/components/Faq.jsx +++ b/src/components/Faq.jsx @@ -3,7 +3,7 @@ import { connect } from "react-redux"; import PropTypes from "prop-types"; import FaqItem from "./FaqItem"; -import { addFaqItem } from "../actions"; +import { addFaqItem, getFaqItems } from "../actions"; class Faq extends Component { static propTypes = { @@ -13,7 +13,8 @@ class Faq extends Component { answer: PropTypes.string.isRequired }) ), - addFaqItem: PropTypes.func.isRequired + addFaqItem: PropTypes.func.isRequired, + getFaqItems: PropTypes.func.isRequired }; constructor(props) { @@ -27,6 +28,10 @@ class Faq extends Component { }; } + componentDidMount() { + this.props.getFaqItems(); + } + onChangeQuestion = (event) => { this.setState({ question: event.target.value @@ -89,5 +94,5 @@ export default connect( (state, props) => ({ faq: state.faq }), - { addFaqItem } + { addFaqItem, getFaqItems } )(Faq); ``` ````
# encoding: UTF-8 # # Copyright (c) 2010-2017 GoodData Corporation. All rights reserved. # This source code is licensed under the BSD-style license found in the # LICENSE file in the root directory of this source tree. require_relative 'base_action' module GoodData module LCM2 class CollectDataProduct < BaseAction DESCRIPTION = 'Collect DataProduct to be used in the actions' PARAMS = define_params(self) do description 'Client Used for Connecting to GD' param :gdc_gd_client, instance_of(Type::GdClientType), required: true description 'DataProduct to manage' param :data_product, instance_of(Type::StringType), required: false description 'Organization Name' param :organization, instance_of(Type::StringType), required: false description 'Domain' param :domain, instance_of(Type::StringType), required: false description 'Logger' param :gdc_logger, instance_of(Type::GdLogger), required: true end RESULT_HEADER = [ :data_product ] class << self def call(params) params = params.to_hash client = params.gdc_gd_client domain_name = params.organization || params.domain fail "Either organisation or domain has to be specified in params" unless domain_name domain = client.domain(domain_name) || fail("Invalid domain name specified - #{domain_name}") if params.key?(:data_product) data_product_id = params.data_product else params.gdc_logger.info "Using data product 'default' since none was specified in brick parameters" data_product_id = 'default' end begin data_product = domain.data_products(data_product_id) rescue RestClient::BadRequest params.gdc_logger.info "Can not find DataProduct #{params.data_product}, creating it instead" data_product = domain.create_data_product(id: params.data_product) end results = [ { data_product: data_product_id } ] { results: results, params: { data_product: data_product } } end end end end end
package pltype import ( pb "github.com/findy-network/findy-common-go/grpc/agency/v1" "github.com/golang/glog" ) // name constants const ( HandshakePairwiseName = "HANDSHAKE" ) // Protocol constants const ( Terminate = "" Nothing = "" Agent = "urn:indy:sov:agent:message_type:findy.fi" // This will be for old and EA/CA PW Aries = "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec" // This will be for all Aries protocols DIDOrgAries = "https://didcomm.org" // This will be for all Aries protocols CA = "urn:indy:sov:agent_api:message_type:findy.fi" SA = "urn:indy:sov:service_agent_api:message_type:findy.fi" LibindyRequestPresentationID = "libindy-request-presentation-0" LibindyPresentationID = "libindy-presentation-0" UserAction = "user-action" ConnectionTrustAgent = "CONNECTION_TRUST_AGENT" // internal use only ProtocolConnection = "connection" Connection = Agent + "/" + ProtocolConnection // these were for the Indy agent protocol, new Aries constants are in the // protocol files. ConnectionResponse = Connection + "/1.0/response" ConnectionRequest = Connection + "/1.0/request" ConnectionOffer = Connection + "/1.0/offer" ConnectionHandshake = Connection + "/1.0/invite" ConnectionOk = Connection + "/1.0/ok" // terminates Acknowledgement cycle, internal use ConnectionError = Connection + "/1.0/error" // if error occurs we send error payload, especially handy with ws ConnectionPing = Connection + "/1.0/ping" ConnectionMsg = Connection + "/1.0/msg" ConnectionAck = Connection + "/1.0/acknowledgement" ) const ( ProtocolRouting = "routing/1.0/forward" RoutingForward = Aries + "/" + ProtocolRouting DIDOrgRoutingForward = DIDOrgAries + "/" + ProtocolRouting ) const ( ProtocolNotification = "notification" HandlerProblemReport = "problem-report" HandlerAck = "ack" ProblemReport = Aries + "/" + ProtocolNotification NotificationProblemReport = ProblemReport + "/1.0/" + HandlerProblemReport NotificationAck = ProblemReport + "/1.0/" + HandlerAck DIDOrgProblemReport = DIDOrgAries + "/" + ProtocolNotification DIDOrgNotificationProblemReport = DIDOrgProblemReport + "/1.0/" + HandlerProblemReport DIDOrgNotificationAck = DIDOrgProblemReport + "/1.0/" + HandlerAck ) // Issue Credential protocol constants const ( ProtocolIssueCredential = "issue-credential" HandlerIssueCredentialPropose = "propose-credential" HandlerIssueCredentialOffer = "offer-credential" HandlerIssueCredentialRequest = "request-credential" HandlerIssueCredentialIssue = "issue-credential" HandlerIssueCredentialACK = "ack" HandlerIssueCredentialNACK = "nack" ObjectTypeCredentialPreview = "credential-preview" HandlerIssueCredentialUserAction = UserAction IssueCredential = Aries + "/" + ProtocolIssueCredential IssueCredentialPropose = IssueCredential + "/1.0/" + HandlerIssueCredentialPropose IssueCredentialOffer = IssueCredential + "/1.0/" + HandlerIssueCredentialOffer IssueCredentialUserAction = IssueCredential + "/1.0/" + HandlerIssueCredentialUserAction IssueCredentialRequest = IssueCredential + "/1.0/" + HandlerIssueCredentialRequest IssueCredentialIssue = IssueCredential + "/1.0/" + HandlerIssueCredentialIssue IssueCredentialACK = IssueCredential + "/1.0/" + HandlerIssueCredentialACK IssueCredentialNACK = IssueCredential + "/1.0/" + HandlerIssueCredentialNACK IssueCredentialCredentialPreview = IssueCredential + "/1.0/" + ObjectTypeCredentialPreview DIDOrgIssueCredential = DIDOrgAries + "/" + ProtocolIssueCredential DIDOrgIssueCredentialPropose = DIDOrgIssueCredential + "/1.0/" + HandlerIssueCredentialPropose DIDOrgIssueCredentialOffer = DIDOrgIssueCredential + "/1.0/" + HandlerIssueCredentialOffer DIDOrgIssueCredentialUserAction = DIDOrgIssueCredential + "/1.0/" + HandlerIssueCredentialUserAction DIDOrgIssueCredentialRequest = DIDOrgIssueCredential + "/1.0/" + HandlerIssueCredentialRequest DIDOrgIssueCredentialIssue = DIDOrgIssueCredential + "/1.0/" + HandlerIssueCredentialIssue DIDOrgIssueCredentialACK = DIDOrgIssueCredential + "/1.0/" + HandlerIssueCredentialACK DIDOrgIssueCredentialNACK = DIDOrgIssueCredential + "/1.0/" + HandlerIssueCredentialNACK DIDOrgIssueCredentialCredentialPreview = DIDOrgIssueCredential + "/1.0/" + ObjectTypeCredentialPreview ) // DID exchange aka Connection related constants const ( Invitation = "invitation" HandlerOffer = "offer" HandlerRequest = "request" HandlerResponse = "response" AriesProtocolConnection = "connections" AriesConnection = Aries + "/" + AriesProtocolConnection AriesConnectionInvitation = AriesConnection + "/1.0/" + Invitation AriesConnectionRequest = AriesConnection + "/1.0/" + HandlerRequest AriesConnectionOffer = AriesConnection + "/1.0/" + HandlerOffer // todo: not used anymore AriesConnectionResponse = AriesConnection + "/1.0/" + HandlerResponse DIDOrgAriesConnection = DIDOrgAries + "/" + AriesProtocolConnection DIDOrgAriesConnectionInvitation = DIDOrgAriesConnection + "/1.0/" + Invitation DIDOrgAriesConnectionRequest = DIDOrgAriesConnection + "/1.0/" + HandlerRequest DIDOrgAriesConnectionOffer = DIDOrgAriesConnection + "/1.0/" + HandlerOffer DIDOrgAriesConnectionResponse = DIDOrgAriesConnection + "/1.0/" + HandlerResponse ) // Present Proof protocol constants const ( ProtocolPresentProof = "present-proof" HandlerPresentProofPropose = "propose-presentation" HandlerPresentProofRequest = "request-presentation" HandlerPresentProofPresentation = "presentation" HandlerPresentProofACK = "ack" HandlerPresentProofNACK = "nack" HandlerPresentUserAction = UserAction ObjectTypePresentationPreview = "presentation-preview" PresentProof = Aries + "/" + ProtocolPresentProof PresentProofPropose = PresentProof + "/1.0/" + HandlerPresentProofPropose PresentProofRequest = PresentProof + "/1.0/" + HandlerPresentProofRequest PresentProofPresentation = PresentProof + "/1.0/" + HandlerPresentProofPresentation PresentProofUserAction = PresentProof + "/1.0/" + HandlerPresentUserAction PresentProofACK = PresentProof + "/1.0/" + HandlerPresentProofACK PresentProofNACK = PresentProof + "/1.0/" + HandlerPresentProofNACK PresentationPreviewObj = PresentProof + "/1.0/" + ObjectTypePresentationPreview DIDOrgPresentProof = DIDOrgAries + "/" + ProtocolPresentProof DIDOrgPresentProofPropose = DIDOrgPresentProof + "/1.0/" + HandlerPresentProofPropose DIDOrgPresentProofRequest = DIDOrgPresentProof + "/1.0/" + HandlerPresentProofRequest DIDOrgPresentProofPresentation = DIDOrgPresentProof + "/1.0/" + HandlerPresentProofPresentation DIDOrgPresentProofUserAction = DIDOrgPresentProof + "/1.0/" + HandlerPresentUserAction DIDOrgPresentProofACK = DIDOrgPresentProof + "/1.0/" + HandlerPresentProofACK DIDOrgPresentProofNACK = DIDOrgPresentProof + "/1.0/" + HandlerPresentProofNACK DIDOrgPresentationPreviewObj = DIDOrgPresentProof + "/1.0/" + ObjectTypePresentationPreview ) // Basic Message protocol constants const ( ProtocolBasicMessage = "basicmessage" HandlerMessage = "message" BasicMessage = Aries + "/" + ProtocolBasicMessage BasicMessageSend = BasicMessage + "/1.0/" + HandlerMessage DIDOrgBasicMessage = DIDOrgAries + "/" + ProtocolBasicMessage DIDOrgBasicMessageSend = DIDOrgBasicMessage + "/1.0/" + HandlerMessage ) // Trust Ping protocol constants const ( ProtocolTrustPing = "trust_ping" HandlerPing = "ping" HandlerPingResponse = "ping_response" TrustPing = Aries + "/" + ProtocolTrustPing TrustPingPing = TrustPing + "/1.0/" + HandlerPing TrustPingResponse = TrustPing + "/1.0/" + HandlerPingResponse DIDOrgTrustPing = DIDOrgAries + "/" + ProtocolTrustPing DIDOrgTrustPingPing = DIDOrgTrustPing + "/1.0/" + HandlerPing DIDOrgTrustPingResponse = DIDOrgTrustPing + "/1.0/" + HandlerPingResponse ) // SA API msg types const ( SAPing = SA + "/ping/1.0/ping" SAIssueCredential = SA + "/issue_credential" SAIssueCredentialAcceptPropose = SAIssueCredential + "/1.0/accept_propose" SAPresentProof = SA + "/present_proof" SAPresentProofAcceptPropose = SAPresentProof + "/1.0/accept_propose" SAPresentProofAcceptValues = SAPresentProof + "/1.0/accept_values" ) // CA API msg types const ( CASchema = CA + "/schema" CASchemaCreate = CASchema + "/1.0/create" CACredDef = CA + "/credential_definition" CACredDefCreate = CACredDef + "/1.0/create" CALedger = CA + "/ledger" CALedgerWriteDid = CALedger + "/1.0/write_did" CALedgerGetCredDef = CALedger + "/1.0/get_cred_def" CALedgerGetSchema = CALedger + "/1.0/get_schema" CADID = CA + "/did" CADIDVerKey = CADID + "/1.0/verkey" CAWallet = CA + "/wallet" CAWalletGet = CAWallet + "/1.0/get" // Protocol launchers - protocol string must match Aries protocol CAPairwise = CA + "/" + AriesProtocolConnection CAPairwiseInvitation = CAPairwise + "/1.0/invitation" CAPairwiseCreate = CAPairwise + "/1.0/create" // Protocol launcher - protocol string must match Aries protocol CATrustPing = CA + "/" + ProtocolTrustPing + "/1.0/ping" // Protocol launcher - protocol string must match Aries protocol CAGetJWT = CA + "/" + "login" + "/1.0/jwt" CATask = CA + "/task" CATaskStatus = CATask + "/1.0/status" CATaskReady = CATask + "/1.0/ready" CATaskList = CATask + "/1.0/list" CANotify = CA + "/notify" CANotifyStatus = CANotify + "/1.0/status" CANotifyUserAction = CANotify + "/1.0/user-action" // Protocol launchers - protocol string must match Aries protocol CACred = CA + "/" + ProtocolIssueCredential CACredRequest = CACred + "/1.0/request" // TODO CACredOffer = CACred + "/1.0/propose" // Protocol launchers - protocol string must match Aries protocol CAProof = CA + "/" + ProtocolPresentProof CAProofPropose = CAProof + "/1.0/propose" // TODO CAProofRequest = CAProof + "/1.0/request" // Protocol launcher - protocol string must match Aries protocol CABasicMessage = CA + "/" + ProtocolBasicMessage + "/1.0/send" CAProblemReport = CA + "/notification/1.0/problem_report" CAPingOwnCA = CA + "/ping/1.0/own_ca" CAContinuePresentProofProtocol = CA + "/protocol/1.0/continue-present-proof" CAContinueIssueCredentialProtocol = CA + "/protocol/1.0/continue-issue-credential" ) var protocolType = map[string]pb.Protocol_Type{ AriesProtocolConnection: pb.Protocol_DIDEXCHANGE, ProtocolIssueCredential: pb.Protocol_ISSUE_CREDENTIAL, ProtocolPresentProof: pb.Protocol_PRESENT_PROOF, ProtocolTrustPing: pb.Protocol_TRUST_PING, ProtocolBasicMessage: pb.Protocol_BASIC_MESSAGE, } func ProtocolTypeForFamily(family string) pb.Protocol_Type { if protocol, ok := protocolType[family]; ok { return protocol } glog.Warningf("no protocol type found for family %s", family) return pb.Protocol_NONE } func ProtocolRoleForType(handlerID string) pb.Protocol_Role { glog.V(10).Infoln(handlerID, " ---> ", receiversRoleFromType[handlerID]) return receiversRoleFromType[handlerID] } // receiversRoleFromType is a lookup table for internal protocol starter types and their // relation to gRPC API's protocol role. Note! This protocol message receiver // side. var receiversRoleFromType = map[string]pb.Protocol_Role{ HandlerRequest: pb.Protocol_ADDRESSEE, HandlerIssueCredentialOffer: pb.Protocol_ADDRESSEE, HandlerIssueCredentialPropose: pb.Protocol_INITIATOR, HandlerPresentProofRequest: pb.Protocol_ADDRESSEE, HandlerPresentProofPropose: pb.Protocol_INITIATOR, HandlerPing: pb.Protocol_ADDRESSEE, HandlerMessage: pb.Protocol_ADDRESSEE, }
-module(basics). -include("include.hrl"). -export([ fun_test/1, atom_test/0, send_test/1, multiline_string_test/0 ]). fun_test(ok) -> ok; fun_test(X) when is_atom(X) -> X; fun_test(X) -> X. atom_test() -> ok, a_b@c, 'Bob', 'Weird Atom Because I Can'. send_test(X) -> X ! ok. multiline_string_test() -> "abc def".
! chk_long_statement.f90 ! Check: does the compiler support statements of 40 lines or more? ! program chk_long_statement implicit none character(len=60), dimension(40) :: string string = (/ '1aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '2aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '3aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '4aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '5aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '6aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '7aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '8aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '9aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '0aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '1aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '2aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '3aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '4aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '5aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '6aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '7aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '8aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '9aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '0aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '1aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '2aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '3aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '4aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '5aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '6aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '7aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '8aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '9aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '0aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '1aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '2aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '3aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '4aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '5aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '6aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '7aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '8aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '9aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' , & '0aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' /) write( *, '(a)' ) 'The compiler supports statements of at least 40 lines long' end program chk_long_statement
# -*- coding: utf-8 -*- import requests PUSH_URL = "https://api.pushbullet.com/v2/pushes" DEVICES_URL = "https://api.pushbullet.com/v2/devices" class PushbulletClient(object): """Pushbullet client""" def __init__(self, api_key): self.api_key = api_key self.session = requests.Session() self.session.headers.update({"Access-Token": self.api_key}) def list_devices(self): """List devices""" response = self.session.get(DEVICES_URL) if response.status_code == 200: return response.json() def device_by_nickname(self, nickname): """Get device identifier with a nickname""" devices = self.list_devices() if devices: return next((d["iden"] for d in devices.get("devices", []) if nickname == d.get("nickname")), None) def push_note(self, device_iden, title, body): """Push note to a device""" self.session.post( PUSH_URL, json={ "device_iden": device_iden, "type": "note", "title": title, "body": body }).raise_for_status()
package com.skyyo.template.application.models.remote import com.squareup.moshi.JsonClass @JsonClass(generateAdapter = true) class SignInRequest( val email: String, val password: String )
/* * File: OpenKinect.hpp * Author: imam * * Created on 13 October 2015, 10:49 PM */ /* * libfreenect class from the scratch to use multiple Kinect devices in OpenCV * */ #ifndef OPENKINECT_HPP #define OPENKINECT_HPP #include <thread> #include <mutex> #include <opencv2/opencv.hpp> #include <libfreenect.h> /* Freenect class for libfreenect library * you just need one class object for all Kinect devices * once create it is initialised * after starting one or more Kinect objects process their request by start member function * when all Kinect objects are stoped, you can stop process thread by stop member function * * typical application procedure as follows: * * - create one Freenect object in your application * - make sure Freenect object is initialised * - create one or more kinect objects and initialise them * - start Kinect objects * - start Freenect object * - stop Kinect objects * - stop Freenect object */ class Freenect { bool success; bool started; std::thread *freenect_thread; public: freenect_context *context; // libfreenect context for all Kinect devices volatile int stop_freenect_thread; Freenect (); ~Freenect (); bool isInitialised (); void initialise (); void start (); void stop (); int no_of_kinects (); }; class Kinect { bool success; bool depth_started; bool rgb_started; public: freenect_device *device; // device context freenect_frame_mode rgb_mode; freenect_video_format rgb_format; freenect_resolution rgb_resolution; freenect_frame_mode depth_mode; freenect_depth_format depth_format; freenect_resolution depth_resolution; cv::Mat depth; // depth camera OpenCV image container cv::Mat rgb; // RGB camera OpenCV image container std::mutex depth_mutex; std::mutex rgb_mutex; Kinect (int _index, freenect_context *context); ~Kinect (); bool isInitialised (); void initialise (int _index, freenect_context *context); bool depthStarted (); bool rgbStarted (); void start (); void stop (); }; extern void freenect_thread_func (Freenect *freenect_object); extern void freenect_depth_callback (freenect_device *device, void *pixel, uint32_t timestamp); extern void freenect_rgb_callback (freenect_device *device, void *pixel, uint32_t timestamp); #endif /* OPENKINECT_HPP */
package ru.job4j.io; import ru.job4j.io.connect.SocketServer; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.IOException; import java.io.PrintStream; import java.net.Socket; import java.nio.charset.StandardCharsets; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class SocketServerTest { public static final String LN = System.getProperty("line.separator"); Socket socket = mock(Socket.class); ByteArrayOutputStream bos = new ByteArrayOutputStream(); @Before public void before() { System.setOut(new PrintStream(bos)); } @After public void close() throws IOException { bos.close(); System.setOut(System.out); socket.close(); } public void testServer(final String request, final String expected) throws IOException { ByteArrayOutputStream out = new ByteArrayOutputStream(); ByteArrayInputStream in = new ByteArrayInputStream(request.getBytes(StandardCharsets.UTF_8)); when(socket.getOutputStream()).thenReturn(out); when(socket.getInputStream()).thenReturn(in); SocketServer server = new SocketServer(socket); server.startServer(); final DataInputStream dis = new DataInputStream(new ByteArrayInputStream(out.toByteArray())); StringBuilder sb = new StringBuilder(); while (dis.available() > 0) { sb.append(dis.readUTF()); } assertThat(sb.toString(), is(expected)); dis.close(); in.close(); out.close(); } @Test public void whenClientAnswerThenChooseRandom() throws IOException { this.testServer(String.join(LN,"exit", ""), ""); } @Test public void whenClientHelloThenBackGreatOracle() throws IOException { this.testServer( String.join(LN, "Hello Oracle", "exit" ), String.format("Hello, dear friend, I'm a Oracle.%s", LN) ); } @Test public void whenClientAnyThenBackDontUnderstand() throws IOException { this.testServer( String.join(LN,"be or not to be?", "exit"), String.join(LN,"I don't understand.", "")); } }
#!/usr/bin/env bash python train_ShuffleNetV2_robot.py \ --json_path='/path/to/json/robot_keypoints_20190426_640x360.json' \ --saved_model='robotkp_LWShuffleNetV2_v1_16_cat_w1.0_v0.1.pth' \ --model='LWShuffleNetV2_v1_16_cat' \ --multistage 0 \ --gpu_ids 3 \ --epochs=400 \ --batch_size=24 \ --workers=4 \ --lr=0.5 \ --batch_size_val=4 \ --workers_val=1 \ --data_augment True \ --resample True \ --logdir='./logs/robotkp_LWShuffleNetV2_v1_16_cat_w1.0' 2>&1 |tee train_LWShuffleNetV2_v1_16_cat_w1.0_v0.1.log
import React, { FC, Dispatch, SetStateAction } from 'react'; import { useHistory } from 'lib/router'; import { SIGNIN_URL } from 'constants/urls'; import { Modal } from 'components'; interface LoginModalProps { visible: boolean; setVisible: Dispatch<SetStateAction<boolean>>; } const LoginModal: FC<LoginModalProps> = ({ visible, setVisible }) => { const history = useHistory(); const moveSignin = () => { history.push(SIGNIN_URL); }; return ( <Modal type="confirm" header={<div>로그인이 필요합니다</div>} body={<div>로그인 페이지로 이동하시겠습니까?</div>} visible={visible} setVisible={setVisible} onConfirm={moveSignin} /> ); }; export default LoginModal;
#!/usr/bin/env ruby # # Seed AbstractLocation and AbstractEvent objects with earliest known # versions of venues and events imported from 3rd party calendars. The # new recurring event import backend performs de-duplication and event # updates at the source level; this allows us to match against original # values found during the last import instead of current values stored # with events. Doing this allows us to change venue and event titles # without triggering a new event on next import, as well as identify # which fields have been edited manually using the web interface so we # don't accidentally blow those changes away with data from the upstream # calendar. # # You should run this script if you plan to take advantage of recurring # event imports from 3rd party calendars. # Venue.where("source_id IS NOT NULL").order(:id).each do |venue| # tries to build the original venue as first imported if version = venue.versions.where("object is not null").first venue = version.reify rescue venue # okay w/ current venue if fails end # double-check that the venue is still associated with a source next if !venue.source # ...and not just an _id to oblivion abstract_location = AbstractLocation.new( :site_id => venue.site_id, :source_id => venue.source_id, :venue_id => venue.id, :url => venue.url, :title => venue.title, :description => venue.description, :address => venue.address, :street_address => venue.street_address, :locality => venue.locality, :region => venue.region, :postal_code => venue.postal_code, :country => venue.country, :latitude => venue.latitude, :longitude => venue.longitude, :email => venue.email, :telephone => venue.telephone, :created_at => venue.created_at, :updated_at => venue.created_at, :result => 'created', ) # double-check that another abstract venue isn't identical; some # imports, mostly web-based, created the exact same venue multiple # times due to bugs and otherwise renaming the venue's title next if abstract_location.find_existing abstract_location.save! end Event.where("source_id IS NOT NULL").order(:id).each do |event| # tries to build original event as first imported if version = event.versions.where("object is not null").first event = version.reify rescue event # okay w/ current event if fails end # double-check that the event is still associated with a source next if !event.source # ...and not just an _id to oblivion abstract_event = AbstractEvent.new( :site_id => event.site_id, :source_id => event.source_id, :event_id => event.id, :url => event.url, :title => event.title, :start_time => event.start_time, :end_time => event.end_time, :description => event.description, :created_at => event.created_at, :updated_at => event.created_at, :result => 'created', ) location = AbstractLocation.where(:venue_id => event.venue_id).first abstract_event.abstract_location = location # our Event-based dupe detection wasn't very good, so some events got # reimported multiple times due to changed event titles and times next if abstract_event.find_existing abstract_event.save! end
# coffeeorder This is my updated version of the Udacity "Just Java" app. Here are some screenshots of the app First page: Quantity <img src="./content/quantity.png" alt="drawing" width="325"/> Second page: Toppings <img src="./content/toppings.png" alt="drawing" width="325"/> Thrid page: ID <img src="./content/id.png" alt="drawing" width="325"/> Fourth page: Summary <img src="./content/summary.png" alt="drawing" width="325"/> Order completed page <img src="./content/order.png" alt="drawing" width="325"/> ### Feedback I would be very pleased about an honest feedback. You can give me feedback via my [Email](mailto:schaepersliam@gmail.com) or you can open a new issue if you find a bug. Thank you for your help gaining new experience! ### Using of the code If you want to use the code not just for learning, please contact me via my [Email](mailto:schaepersliam@gmail.com). ### Want to build Android apps too? Check this [Udacity course](https://de.udacity.com/course/android-basics-user-input--ud836) out. :) ### You don't understand something in the code? Feel free to contact me at any time via [Email](mailto:schaepersliam@gmail.com). ### Submit your changes If you are interested in magnifying this project please make a new pull request. I will definitively look after all of them! :)
--- layout: post title:04-05 Ruby Koans Questions [4/05/22] --- # Ruby Koans Questions to Answer - explain pushing and popping arrays - how do you know the object id? - I don't really understand when to put in NoMethodError - What do you use Inspect for?
package app import ( "github.com/gorilla/mux" "github.com/deemount/kraken/api/config" "github.com/deemount/kraken/api/config/driver" ) // App ... type App struct { // refer DB driver.DataService Routes config.Routes // pointer API *config.API Kraken *config.Kraken Options *config.Options Swagger *config.Swagger // legal Router *mux.Router V1 *mux.Router }
# Copyright 2016 Max Erickson # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from django.shortcuts import render, redirect from django.core.urlresolvers import reverse from django.forms.models import model_to_dict from django.contrib.auth import authenticate, login from django.contrib.auth import logout as auth_logout from django.contrib.auth.decorators import login_required from django.contrib import messages from django.utils import timezone import datetime from django.conf import settings from .models import Token, EmailTemplates from .forms import EmailTemplatesForm #~ from .utils import get_patients, oauth_session from .utils import ChronoHandler, oauth_session def home(request): if request.user.is_authenticated(): handler=ChronoHandler(request.user) patients = handler.get_patients()#birthday=datetime.date.today()) doctors = handler.doctor_map() for p in patients: p['doctor_name'] = doctors[p['doctor']] username = request.user.get_username() context = {'username': username, 'patient_list': patients} return render(request, 'greetings/index.html', context) return render(request, 'greetings/splash.html') def register(request): oauth = oauth_session(callback=request.build_absolute_uri(reverse('greetings:authorize'))) authorization_url, state = oauth.authorization_url(settings.GREETINGS_OAUTH_AUTHORIZATION_URL) request.session['oauth_state'] = state request.session.modified = True return redirect(authorization_url) # handle OAuth2 callback def authorize(request): # handle explicit error parameters in redirect if 'error' in request.GET: messages.error(request, 'OAuth authentication with drchrono failed.') return redirect('greetings:home') oauth = oauth_session(callback=request.build_absolute_uri(reverse('greetings:authorize')), state=request.session['oauth_state'] ) token = {} try: token = oauth.fetch_token( settings.GREETINGS_OAUTH_TOKEN_URL, code=request.GET['code'], #~ grant_type='authorization_code', #~ authorization_response=authorization_response, client_secret=settings.GREETINGS_OAUTH_CLIENT_SECRET) except: raise messages.error(request, 'Could not retrieve OAuth token from drchrono.') if token: # fetch profile to associate token with username. profile = oauth.get('https://drchrono.com/api/users/current').json() user = authenticate(remote_user=profile['username']) login(request, user) #~ # serialize token information to database for later use. try: # fetch and update token tkn = user.token tkn.update(token) except Token.DoesNotExist: # new user, create token tkn = Token.from_dict(user, token) tkn.save() eml = EmailTemplates(user=user) eml.save() messages.info(request, 'Welcome to Greetings!') return redirect('greetings:home') def logout(request): auth_logout(request) messages.success(request, 'You have been logged out.') return redirect('greetings:home') @login_required(login_url='greetings:home') def settings_view(request): templatedata = request.user.emailtemplates # save post and reload page if request.method == 'POST': form = EmailTemplatesForm(request.POST, initial=model_to_dict(templatedata), instance=templatedata) if form.is_valid(): if form.has_changed(): form.save() messages.success(request, 'Acccount settings updated.') return redirect('greetings:settings') else: form = EmailTemplatesForm(initial=model_to_dict(templatedata)) return render(request, 'greetings/manage.html', {'email_templates': form}) @login_required(login_url='greetings:home') def delete(request): if request.method == 'POST': user = request.user auth_logout(request) user.delete() messages.success(request, 'Account information has been deleted.') return redirect('greetings:home') return render(request, 'greetings/confirm_delete.html')
using UnityEngine; public class WaitForInputTimeout : CustomYieldInstruction { public bool ReceivedInput { get; set; } private float _targetTime; private float _seconds; public WaitForInputTimeout(float seconds) { _seconds = seconds; Reset(); } public override bool keepWaiting { get { if (InputX.AnyInput()) { ReceivedInput = true; return false; } return _targetTime > Time.time; } } public new void Reset() { ReceivedInput = false; _targetTime = Time.time + _seconds; base.Reset(); } }
from typing import ( Any, Dict, List, Tuple, Union, ) from eth_utils import ( to_canonical_address, decode_hex, big_endian_to_int, ) from eth_typing import ( Address, ) from sharding.contracts.utils.smc_utils import ( get_smc_json, ) from sharding.handler.exceptions import ( LogParsingError, ) class LogParser(object): def __init__(self, *, event_name: str, log: Dict[str, Any]) -> None: event_abi = self._extract_event_abi(event_name=event_name) topics = [] data = [] for item in event_abi["inputs"]: if item['indexed'] is True: topics.append((item['name'], item['type'])) else: data.append((item['name'], item['type'])) self._set_topic_value(topics=topics, log=log) self._set_data_value(data=data, log=log) def _extract_event_abi(self, *, event_name: str) -> Dict[str, Any]: for func in get_smc_json()['abi']: if func['name'] == event_name and func['type'] == 'event': return func raise LogParsingError("Can not find event {}".format(event_name)) def _set_topic_value(self, *, topics: List[Tuple[str, Any]], log: Dict[str, Any]) -> None: if len(topics) != len(log['topics'][1:]): raise LogParsingError( "Error parsing log topics, expect" "{} topics but get {}.".format(len(topics), len(log['topics'][1:])) ) for (i, topic) in enumerate(topics): val = self._parse_value(val_type=topic[1], val=log['topics'][i + 1]) setattr(self, topic[0], val) def _set_data_value(self, *, data: List[Tuple[str, Any]], log: Dict[str, Any]) -> None: data_bytes = decode_hex(log['data']) if len(data) * 32 != len(data_bytes): raise LogParsingError( "Error parsing log data, expect" "{} data but get {}.".format(len(data), len(data_bytes)) ) for (i, (name, type_)) in enumerate(data): val = self._parse_value(val_type=type_, val=data_bytes[i * 32: (i + 1) * 32]) setattr(self, name, val) def _parse_value(self, *, val_type: str, val: bytes) -> Union[bool, Address, bytes, int]: if val_type == 'bool': return bool(big_endian_to_int(val)) elif val_type == 'address': return to_canonical_address(val[-20:]) elif val_type == 'bytes32': return val elif 'int' in val_type: return big_endian_to_int(val) else: raise LogParsingError( "Error parsing the type of given value. Expect bool/address/bytes32/int*" "but get {}.".format(val_type) )
using System; using UnityEngine; namespace KAU.Utilities.Serializer { /// <summary> Each Primitive should be used in Here </summary> public class PlayerPrefsSerializerPrimitiveString : IDataSerializer<string> { protected override string _SaveName { get { return "playerSave.kau"; } } public override bool isSaveExist() { return PlayerPrefs.HasKey(SaveName); } protected override void SaveT(string data) { PlayerPrefs.SetString(SaveName, data); PlayerPrefs.Save(); } protected override string LoadT() { return PlayerPrefs.GetString(SaveName, null); } public override void Delete() { if (isSaveExist()) { PlayerPrefs.DeleteKey(SaveName); } } } /// <summary> Each Primitive should be used in Here </summary> public class PlayerPrefsSerializerPrimitiveInt : IDataSerializer<int> { protected override string _SaveName { get { return "playerSave.kau"; } } public override bool isSaveExist() { return PlayerPrefs.HasKey(SaveName); } protected override void SaveT(int data) { PlayerPrefs.SetString(SaveName, data.ToString()); PlayerPrefs.Save(); } protected override int LoadT() { return int.Parse( PlayerPrefs.GetString(SaveName, "0")); } public override void Delete() { if (isSaveExist()) { PlayerPrefs.DeleteKey(SaveName); } } } /// <summary> Sadly does't work with primitive types! </summary> public class PlayerPrefsSerializer<W> : IDataSerializer<W> where W : new() { protected override string _SaveName { get { return "playerSave.kau"; } } public override bool isSaveExist() { return PlayerPrefs.HasKey(SaveName); } protected override void SaveT(W data) { string dataJson = JsonUtility.ToJson(data); PlayerPrefs.SetString(SaveName, dataJson); PlayerPrefs.Save(); } protected override W LoadT() { W data = default(W); if (isSaveExist()) { string dataJson = PlayerPrefs.GetString(SaveName); if (!string.IsNullOrEmpty(dataJson) ) { data = JsonUtility.FromJson<W>(dataJson); } } return data; } public override void Delete() { if (isSaveExist()) { PlayerPrefs.DeleteKey(SaveName); } } } }
import {Template} from 'src/types' export const trimAndRemoveQuotes = elt => { const trimmed = elt.trim() const dequoted = trimmed.replace(/(^")|("$)/g, '') return dequoted } export const formatTempVar = name => `:${name.replace(/:/g, '').replace(/\s/g, '')}:` export const getSelectedValue = (template: Template): string | null => { const selected = template.values.find(v => v.selected) if (selected) { return selected.value } return null } export const getLocalSelectedValue = (template: Template): string | null => { const selected = template.values.find(v => v.localSelected) if (selected) { return selected.value } return null }
using GetIntoTeachingApi.Services; using Hangfire; using Microsoft.AspNetCore.Builder; using Prometheus; namespace GetIntoTeachingApi.Utils { public static class ApplicationExtensions { public static IApplicationBuilder UsePrometheusHangfireExporter(this IApplicationBuilder app) { var jobStorage = (JobStorage)app.ApplicationServices.GetService(typeof(JobStorage)); var metrics = (IMetricService)app.ApplicationServices.GetService(typeof(IMetricService)); var exporter = new HangfirePrometheusExporter(jobStorage, metrics); Metrics.DefaultRegistry.AddBeforeCollectCallback(() => exporter.ExportHangfireStatistics()); return app; } } }
module Exposition class Post < ActiveRecord::Base include Concerns::Models::Post end end
import 'SettingsModel.dart'; import 'package:http/http.dart' as http; import 'dart:convert'; class Clue { final int id; final String answer; final String question; final int value; final String airdate; final int category_id; final int game_id; final int invalid_count; static List<Clue> clues = []; Clue( {this.id, this.answer, this.question, this.value, this.airdate, this.category_id, this.game_id, this.invalid_count}); factory Clue.fromJson(Map<String, dynamic> json) { return Clue( id: json['id'], answer: json['answer'], question: json['question'], value: json['value'], airdate: json['airdate'], category_id: json['category_id'], game_id: json['game_id'], invalid_count: json['invalid_count']); } static Future<List<Clue>> fetch(SettingsModel settings) async { final response = await http.get(settings.url); if (response.statusCode == 200) { clues.clear(); for (var clue in json.decode(response.body)['clues']) { clues.add(Clue.fromJson(clue)); } return clues; } else { // If that call was not successful, throw an error. throw Exception('Failed to load clues'); } } }
<?php namespace App\Entity; use ApiPlatform\Core\Annotation\ApiResource; use ApiPlatform\Core\Annotation\ApiSubresource; use App\Repository\TagRepository; use Doctrine\ORM\Mapping as ORM; #[ORM\Entity(repositoryClass: TagRepository::class)] #[ApiResource] class Tag { #[ORM\Id] #[ORM\GeneratedValue(strategy: "IDENTITY")] #[ORM\SequenceGenerator(sequenceName: "tag_id_seq", allocationSize: 1, initialValue: 1)] #[ORM\Column(type: 'integer')] private $id; #[ORM\Column(type: 'string', length: 255)] private $title; #[ORM\Column(type: 'datetime_immutable', nullable: true)] private $createdAt; #[ORM\Column(type: 'datetime_immutable', nullable: true)] private $updatedAt; // TODO // #[ORM\ManyToMany(targetEntity: Article::class, inversedBy: "tags")] // #[ApiSubresource] // private $articles; public function getId(): ?int { return $this->id; } public function getTitle(): ?string { return $this->title; } public function setTitle(string $title): self { $this->title = $title; return $this; } public function getCreatedAt(): ?\DateTimeImmutable { return $this->createdAt; } public function setCreatedAt(?\DateTimeImmutable $createdAt): self { $this->createdAt = $createdAt; return $this; } public function getUpdatedAt(): ?\DateTimeImmutable { return $this->updatedAt; } public function setUpdatedAt(?\DateTimeImmutable $updatedAt): self { $this->updatedAt = $updatedAt; return $this; } // // /** // * @return mixed // */ // public function getArticles() // { // return $this->articles; // } // // /** // * @param mixed $articles // */ // public function setArticles($articles): void // { // $this->articles = $articles; // } }
## Some resources in the future that might be useful server_security=https://plusbryan.com/my-first-5-minutes-on-a-server-or-essential-security-for-linux-servers
struct Cancelled <: Exception end cancellation_token() = Promise{Cancelled}() const CancellationTokenType = typeof(cancellation_token()) const DissolveHandle = let send, _ = Reagents.channel() handle = Reagents.dissolve(send) typeof(handle) end struct CancelScope token::CancellationTokenType dissolvehandle::Union{DissolveHandle,Nothing} end # TODO: store `CancelScope`? @contextvar CANCELLATION_TOKEN::Union{CancellationTokenType,Nothing} = nothing function Julio.cancel!(token::CancellationTokenType) tryputting(token)(Cancelled()) end function withopen(f, scope::CancelScope) try with_context(f, CANCELLATION_TOKEN => scope.token) catch err if iscancelled(err) && is_token_cancelled(scope.token) && !Julio.iscancelled() return end rethrow() end end # Unfortunately, other `Base.open` methods uses `Function`... Base.open(f::Function, scope::CancelScope) = withopen(f, scope) # const CancellationTokenRefType = typeof(Reagents.Ref{Promise{Cancelled}}()) # @contextvar CANCELLATION_TOKEN::CancellationTokenRefType = CancellationTokenRefType() function waiting_cancel() token = CANCELLATION_TOKEN[] if token === nothing return Return(Block()) else if istracing() cancelled = PostCommit(_ -> @trace(label = :cancelled)) # cancelled = PostCommit(_ -> (yield(); @trace(label = :cancelled))) else cancelled = Identity() end return _fetching(token) ⨟ cancelled end end function cancellable_react!(reagent::Reagents.Reagent, x = nothing) ans = (waiting_cancel() | reagent)(x) if ans isa Cancelled throw(ans) end return ans end @inline function handle_result(@nospecialize(ans)) if ans isa EventResult return ans.f() elseif ans isa Error throw(ans.value) elseif ans === nothing return nothing else return something(ans) end end Julio.sync(reagent::Reagents.Reagent, x = nothing) = handle_result(cancellable_react!(reagent, x)) # TODO: formalize output format better (should we use a sum type always?) # TODO: unify this with `handle_result` asresult(f) = Map() do ans if ans isa Error return ans else x = something(ans, Some(nothing)) return EventResult(() -> f(x)) end end lowerevent(reagent::Reagents.Reagent) = reagent lowerevent((reagent, f)::Pair{<:Reagents.Reagent}) = reagent ⨟ asresult(f) lowerevent((fargs, f)::Pair{<:Tuple}) = event(fargs...) ⨟ asresult(f) lowerevent(fargs::Tuple) = event(fargs...) function Julio.select(event1, events...) reagent = mapfoldl(lowerevent, |, (event1, events...)) return handle_result(cancellable_react!(reagent)) end #= struct TryFailed end function Julio.try(f, args...) y = Julio.sync(event(f, args...) | Return(TryFailed())) return !(y isa TryFailed) end =# apply(f, args...) = Julio.sync(lowerevent((f, args...))) function Julio.withtimeout(f::F, seconds::Real) where {F} token = cancellation_token() with_context(CANCELLATION_TOKEN => token) do timer = Timer(seconds) do _ Julio.cancel!(token) end try return Some(f()) catch err err isa Cancelled && return nothing rethrow() finally close(timer) end end end event(::typeof(Julio.sleep), seconds::Real) = event(sleep, seconds) function event(::typeof(sleep), seconds::Real) promise = Julio.Promise{Nothing}() Timer(seconds) do _ put_nocancel!(promise, nothing) end return fetching(promise) end Julio.sleep(seconds) = apply(sleep, seconds) Julio.shield(f) = with_context(f, CANCELLATION_TOKEN => nothing) function Julio.oncancel(f, args...; kwargs...) token = @something(CANCELLATION_TOKEN[], return nothing) reagent = fetching(token) ⨟ PostCommit() do _ f(args...; kwargs...) end return Reagents.dissolve(reagent; once = true) end Julio.cancel!(handle::DissolveHandle) = Reagents.clear!(handle)
package com.amazonaws.xray.opentelemetry.tracing.metadata; import com.amazonaws.xray.entities.Entity; import io.opentelemetry.trace.Span; import java.util.Map; public class EntityMetadataFactory { private static final String OT_METADATA_NAMESPACE = "sdk"; private static final String OT_METADATA_KEY = "open_telemetry"; /** * Return metadata from an Entity initializing it if it isn't already. * @param entity the entity * @param kind the kind to use if initializing new metadata * @return the metadata */ public static EntityMetadata getOrCreate(final Entity entity, final Span.Kind kind) { Map<String, Object> otNamespace = entity.getMetadata().get(OT_METADATA_NAMESPACE); Object otMetadataObject = null; if (otNamespace != null) { otMetadataObject = otNamespace.get(OT_METADATA_KEY); } if (otMetadataObject instanceof EntityMetadata) { return (EntityMetadata) otMetadataObject; } else { EntityMetadata metadata = EntityMetadata.create(kind); entity.putMetadata(OT_METADATA_NAMESPACE, OT_METADATA_KEY, metadata); return metadata; } } }
package com.tunasushi.activity; import android.app.Activity; import android.os.Bundle; import android.widget.SeekBar; import android.widget.SeekBar.OnSeekBarChangeListener; import com.tunasushi.R; import com.tunasushi.tuna.TAnalysis; /** * @author TunaSashimi * @date 2015-10-30 16:53 * @Copyright 2015 TunaSashimi. All rights reserved. * @Description */ public class TAnalysisActivity extends Activity { private TAnalysis tAnalysis; private SeekBar seekbar; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_t_analysis); tAnalysis = findViewById(R.id.tAnalysis); seekbar = findViewById(R.id.seekbar); seekbar.setOnSeekBarChangeListener(new OnSeekBarChangeListener() { @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { tAnalysis.setAnalyaiaControlXY(progress, progress); } @Override public void onStartTrackingTouch(SeekBar seekBar) { } @Override public void onStopTrackingTouch(SeekBar seekBar) { } }); } }
using System; using System.IO; using Aliencube.GitHubActions.Teams.ConsoleApp; using FluentAssertions; using MessageCardModel; using MessageCardModel.Actions; using Newtonsoft.Json; using Microsoft.VisualStudio.TestTools.UnitTesting; namespace GitHubActions.Teams.ConsoleApp.Tests { [TestClass] public class ActionConverterTests { [TestMethod] public void Given_Type_It_Should_Inherit() { typeof(ActionConverter) .Should().BeDerivedFrom<JsonConverter>(); } [TestMethod] public void Given_Value_When_WriteJson_Invoked_THen_It_Should_Throw() { var converter = new ActionConverter(); Action action = () => converter.WriteJson(null, null, null); action.Should().Throw<NotImplementedException>(); } [DataTestMethod] [DataRow(typeof(BaseAction), true)] [DataRow(typeof(ActionCardAction), false)] [DataRow(typeof(HttpPostAction), false)] [DataRow(typeof(OpenUriAction), false)] public void Given_Type_When_CanConvert_Invoked_It_Should_Return(Type type, bool expected) { var converter = new ActionConverter(); var result = converter.CanConvert(type); result.Should().Be(expected); } [DataTestMethod] [DataRow("{ \"@type\": \"ActionCard\" }", typeof(ActionCardAction))] [DataRow("{ \"@type\": \"HttpPOST\" }", typeof(HttpPostAction))] [DataRow("{ \"@type\": \"OpenUri\" }", typeof(OpenUriAction))] public void Given_Json_Value_When_ReadJson_Invoked_Then_It_Should_Return(string json, Type expected) { var serialiser = new JsonSerializer(); var result = default(object); var converter = new ActionConverter(); using (var reader = new StringReader(json)) using (var jsonReader = new JsonTextReader(reader)) { result = converter.ReadJson(jsonReader, null, null, serialiser); } result.Should().BeOfType(expected); } } }
# Python Counter Dictionary e.g.) ``` >>> from pycountdict import CounterDictionary >>> cd = CounterDictionary() >>> cd['a'] = 1 >>> print(cd) a: 1 >>> cd['b'] = 1 >>> print(cd) a: 1, b: 1 >>> cd['A'] = 1 >>> print(cd) a: 1, A: 1, b: 1 >>> cd['a'] = 1 >>> print(cd) a: 2, A: 1, b: 1 ```
//#define USE_PP // Copyright (c) <2015> <Playdead> // This file is subject to the MIT License as seen in the root of this folder structure (LICENSE.TXT) // AUTHOR: Lasse Jon Fuglsang Pedersen <lasse@playdead.com> #if UNITY_5_5_OR_NEWER #define SUPPORT_STEREO #endif #if USE_PP using System; using PDTAAFork.Scripts.Classes; using UnityEngine; using UnityEngine.Rendering.PostProcessing; namespace PDTAAFork.Scripts.PostProcessingStack { /// <summary> /// /// </summary> [Serializable] [PostProcess(typeof(TemporalReprojectionRenderer), PostProcessEvent.AfterStack, "PDTAAFork/TemporalReprojection")] //[RequireComponent(typeof(PpStackFrustumJitter))] public sealed class TemporalReprojectionEffect : PostProcessEffectSettings { /// <summary> /// /// </summary> public FloatParameter flip_x = new FloatParameter {value = 0}; } /// <inheritdoc /> /// <summary> /// </summary> public class TemporalReprojectionRenderer : PostProcessEffectRenderer<TemporalReprojectionEffect> { // Should generally be used before screen space effects. /// <summary> /// /// </summary> public enum Neighborhood { Min_max3_x3_, Min_max3_x3_rounded_, Min_max3_x3_weighted_, Min_max4_tap_varying_ }; /// <summary> /// /// </summary> public enum HistoryInterpolation { Interpolation_none_, Interpolation_cat_mull_rom_, Interpolation_cat_mull_rom_cubic_ } /// <summary> /// /// </summary> public enum Clipping { Clipping_none_, Clipping_default_, Clipping_phasic_, Clipping_phasic_directional_, Clipping_phasic_variance_ } static readonly int _motion_scale = Shader.PropertyToID("_MotionScale"); static readonly int _feedback_max = Shader.PropertyToID("_FeedbackMax"); static readonly int _feedback_min = Shader.PropertyToID("_FeedbackMin"); static readonly int _prev_tex = Shader.PropertyToID("_PrevTex"); static readonly int _main_tex = Shader.PropertyToID("_MainTex"); static readonly int _velocity_neighbor_max = Shader.PropertyToID("_VelocityNeighborMax"); static readonly int _velocity_buffer = Shader.PropertyToID("_VelocityBuffer"); static readonly int _jitter_uv = Shader.PropertyToID("_JitterUV"); static readonly int _adaptive_clipping_gamma = Shader.PropertyToID("_AdaptiveClippingGamma"); static readonly int _adaptive_clipping_gamma_min = Shader.PropertyToID("_AdaptiveClippingGammaMin"); static readonly int _velocity_weight = Shader.PropertyToID("_VelocityWeight"); static readonly int _clipping_phase_in_factor = Shader.PropertyToID("_ClippingPhaseInFactor"); static readonly int _y_co_cg_chroma_shrinkage_factor = Shader.PropertyToID("_YCoCgChromaShrinkageFactor"); static RenderBuffer[] _mrt = new RenderBuffer[2]; Shader _reprojection_shader; Material _reprojection_material; RenderTexture[,] _reprojection_buffer; int[] _reprojection_index = {-1, -1}; Camera _camera; [SerializeField] FrustumJitter _frustumJitter; [SerializeField] VelocityBuffer _velocityBuffer; [SerializeField] Neighborhood neighborhood = Neighborhood.Min_max3_x3_weighted_; [SerializeField] HistoryInterpolation _history_interpolation = HistoryInterpolation.Interpolation_cat_mull_rom_; [SerializeField] Clipping _clipping = Clipping.Clipping_phasic_; [SerializeField] bool unjitterColorSamples = true; [SerializeField] bool unjitterNeighborhood = false; [SerializeField] bool unjitterReprojection = false; [SerializeField] bool useYCoCg = false; [SerializeField] bool useDilation = true; [SerializeField] [Range(0.0f, 1.0f)] float feedbackMin = 0.92f; [SerializeField] [Range(0.0f, 1.0f)] float feedbackMax = 0.95f; [SerializeField] bool varianceClipping = true; [SerializeField] bool adaptiveClipping = true; [SerializeField] bool velocity_debug = false; [SerializeField] [Range(0.01f, 99.9f)] float varianceClippingGamma = 1.0f; [SerializeField] [Range(0.01f, 9.9f)] float varianceClippingGammaMin = 0.666f; [SerializeField] [Range(0.01f, 1.0f)] float chromaShrinkageFactor = 0.125f; [SerializeField] [Range(0.01f, 100.0f)] float velocityWeight = 10; [SerializeField] [Range(0.01f, 100.0f)] float clippingPhaseInFactor = 1; [SerializeField] bool useMotionBlur = false; [SerializeField] [Range(0.0f, 2.0f)] float motionBlurStrength = 1.0f; [SerializeField] bool motionBlurIgnoreFF = false; [SerializeField] RenderTextureFormat render_texture_format = RenderTextureFormat.ARGB32; [SerializeField] FilterMode filtering = FilterMode.Bilinear; [SerializeField] Boolean shrinkChrome = true; [SerializeField] Boolean luminanceOrientBox = false; [SerializeField] Boolean clipTowardsCenter = false; void OnPreCull() { this._frustumJitter?.OnPreCull(ref this._camera); } void OnPreRender() { this._velocityBuffer?.OnPreRender(ref this._camera); } void OnPostRender() { this._velocityBuffer?.OnPostRender(ref this._camera, ref this._frustumJitter); } void Reset() { if (!this._camera) { this._camera = Camera.main; } if (!this._reprojection_shader) { this._reprojection_shader = Shader.Find("Playdead/Post/TemporalReprojection"); } if (this._velocityBuffer == null) { this._frustumJitter = new FrustumJitter(); } if (this._velocityBuffer == null) { this._velocityBuffer = new VelocityBuffer(); } } void Setup() { this.Reset(); this.Clear(); this._velocityBuffer?.Start(); } void Clear() { TaaUtilities.EnsureArray(ref this._reprojection_index, 2); this._reprojection_index[0] = -1; this._reprojection_index[1] = -1; this._frustumJitter?.Clear(ref this._camera); this._velocityBuffer?.Clear(); } void Awake() { this.Setup(); } void Resolve(RenderTexture source, RenderTexture destination) { TaaUtilities.EnsureArray(ref this._reprojection_buffer, 2, 2); TaaUtilities.EnsureArray(ref this._reprojection_index, 2, initial_value : -1); TaaUtilities.EnsureMaterial(ref this._reprojection_material, this._reprojection_shader); if (this._reprojection_material == null) { Graphics.Blit(source, destination); return; } #if SUPPORT_STEREO var eye_index = this._camera.stereoActiveEye == Camera.MonoOrStereoscopicEye.Right ? 1 : 0; #else int eyeIndex = 0; #endif var buffer_w = source.width; var buffer_h = source.height; if (TaaUtilities.EnsureRenderTarget(ref this._reprojection_buffer[eye_index, 0], buffer_w, buffer_h, this.render_texture_format, this.filtering, anti_aliasing : source.antiAliasing)) { this.Clear(); } if (TaaUtilities.EnsureRenderTarget(ref this._reprojection_buffer[eye_index, 1], buffer_w, buffer_h, this.render_texture_format, this.filtering, anti_aliasing : source.antiAliasing)) { this.Clear(); } #if SUPPORT_STEREO var stereo_enabled = this._camera.stereoEnabled; #else bool stereo_enabled = false; #endif #if UNITY_EDITOR var allow_motion_blur = !stereo_enabled && Application.isPlaying; #else bool allow_motion_blur = !stereo_enabled; #endif TaaUtilities.EnsureKeyword(this._reprojection_material, "CAMERA_PERSPECTIVE", !this._camera.orthographic); TaaUtilities.EnsureKeyword(this._reprojection_material, "CAMERA_ORTHOGRAPHIC", this._camera.orthographic); TaaUtilities.EnsureKeyword(this._reprojection_material, "MINMAX_3X3", this.neighborhood == Neighborhood.Min_max3_x3_); TaaUtilities.EnsureKeyword(this._reprojection_material, "MINMAX_3X3_ROUNDED", this.neighborhood == Neighborhood.Min_max3_x3_rounded_); TaaUtilities.EnsureKeyword(this._reprojection_material, "MINMAX_3X3_WEIGHTED", this.neighborhood == Neighborhood.Min_max3_x3_weighted_); TaaUtilities.EnsureKeyword(this._reprojection_material, "MINMAX_4TAP_VARYING", this.neighborhood == Neighborhood.Min_max4_tap_varying_); TaaUtilities.EnsureKeyword(this._reprojection_material, "INTRPL_NONE", this._history_interpolation == HistoryInterpolation.Interpolation_none_); TaaUtilities.EnsureKeyword(this._reprojection_material, "INTRPL_CATMULL_ROM", this._history_interpolation == HistoryInterpolation.Interpolation_cat_mull_rom_); TaaUtilities.EnsureKeyword(this._reprojection_material, "INTRPL_CATMULL_ROM_CUBIC", this._history_interpolation == HistoryInterpolation.Interpolation_cat_mull_rom_cubic_); TaaUtilities.EnsureKeyword(this._reprojection_material, "UNJITTER_COLORSAMPLES", this.unjitterColorSamples); TaaUtilities.EnsureKeyword(this._reprojection_material, "UNJITTER_NEIGHBORHOOD", this.unjitterNeighborhood); TaaUtilities.EnsureKeyword(this._reprojection_material, "UNJITTER_REPROJECTION", this.unjitterReprojection); TaaUtilities.EnsureKeyword(this._reprojection_material, "CLIPPING_NONE", this._clipping == Clipping.Clipping_none_); TaaUtilities.EnsureKeyword(this._reprojection_material, "CLIPPING_DEFAULT", this._clipping == Clipping.Clipping_default_); TaaUtilities.EnsureKeyword(this._reprojection_material, "CLIPPING_PHASIC", this._clipping == Clipping.Clipping_phasic_); TaaUtilities.EnsureKeyword(this._reprojection_material, "CLIPPING_PHASIC_DIRECTIONAL", this._clipping == Clipping.Clipping_phasic_directional_); TaaUtilities.EnsureKeyword(this._reprojection_material, "CLIPPING_PHASIC_VARIANCE", this._clipping == Clipping.Clipping_phasic_variance_); TaaUtilities.EnsureKeyword(this._reprojection_material, "CLIP_TOWARDS_CENTER", this.clipTowardsCenter); TaaUtilities.EnsureKeyword(this._reprojection_material, "VARIANCE_CLIPPING", this.varianceClipping); TaaUtilities.EnsureKeyword(this._reprojection_material, "ADAPTIVE_CLIPPING", this.adaptiveClipping); TaaUtilities.EnsureKeyword(this._reprojection_material, "USE_YCOCG", this.useYCoCg); TaaUtilities.EnsureKeyword(this._reprojection_material, "YCOCG_SHRINK_CHROMA_MIN_MAX", this.shrinkChrome); TaaUtilities.EnsureKeyword(this._reprojection_material, "YCOCG_ORIENT_IN_LUMINANCE_AXIS", this.luminanceOrientBox); TaaUtilities.EnsureKeyword(this._reprojection_material, "USE_DILATION", this.useDilation); TaaUtilities.EnsureKeyword(this._reprojection_material, "USE_MOTION_BLUR", this.useMotionBlur && allow_motion_blur); if (this._velocityBuffer != null) { TaaUtilities.EnsureKeyword(this._reprojection_material, "USE_MAX_NEIGHBOR_VELOCITY", this._velocityBuffer.ActiveVelocityNeighborMax != null); TaaUtilities.EnsureKeyword(this._reprojection_material, "VELOCITY_DEBUG", this.velocity_debug); } if (this._reprojection_index[eye_index] == -1) { // bootstrap this._reprojection_index[eye_index] = 0; this._reprojection_buffer[eye_index, this._reprojection_index[eye_index]].DiscardContents(); Graphics.Blit(source, this._reprojection_buffer[eye_index, this._reprojection_index[eye_index]]); } var index_read = this._reprojection_index[eye_index]; var index_write = (this._reprojection_index[eye_index] + 1) % 2; var jitter_uv = this._frustumJitter._ActiveSample; jitter_uv.x /= source.width; jitter_uv.y /= source.height; jitter_uv.z /= source.width; jitter_uv.w /= source.height; this._reprojection_material.SetVector(_jitter_uv, jitter_uv); this._reprojection_material.SetTexture(_velocity_buffer, this._velocityBuffer.ActiveVelocityBuffer); this._reprojection_material.SetTexture(_velocity_neighbor_max, this._velocityBuffer.ActiveVelocityNeighborMax); this._reprojection_material.SetTexture(_main_tex, source); this._reprojection_material.SetTexture(_prev_tex, this._reprojection_buffer[eye_index, index_read]); this._reprojection_material.SetFloat(_feedback_min, this.feedbackMin); this._reprojection_material.SetFloat(_feedback_max, this.feedbackMax); this._reprojection_material.SetFloat(_adaptive_clipping_gamma, this.varianceClippingGamma); this._reprojection_material.SetFloat(_adaptive_clipping_gamma_min, this.varianceClippingGammaMin); this._reprojection_material.SetFloat(_y_co_cg_chroma_shrinkage_factor, this.chromaShrinkageFactor); this._reprojection_material.SetFloat(_velocity_weight, this.velocityWeight); this._reprojection_material.SetFloat(_motion_scale, this.motionBlurStrength * (this.motionBlurIgnoreFF ? Mathf.Min(1.0f, 1.0f / this._velocityBuffer.TimeScale) : 1.0f)); this._reprojection_material.SetFloat(_clipping_phase_in_factor, this.clippingPhaseInFactor); // reproject frame n-1 into output + history buffer _mrt[0] = this._reprojection_buffer[eye_index, index_write].colorBuffer; _mrt[1] = destination.colorBuffer; Graphics.SetRenderTarget(_mrt, source.depthBuffer); this._reprojection_material.SetPass(0); this._reprojection_buffer[eye_index, index_write].DiscardContents(); TaaUtilities.DrawFullscreenQuad(); this._reprojection_index[eye_index] = index_write; } void OnRenderImage(RenderTexture source, RenderTexture destination) { if (destination != null && source.antiAliasing == destination.antiAliasing) { // resolve without additional blit when not end of chain this.Resolve(source, destination); } else { var internal_destination = RenderTexture.GetTemporary(source.width, source.height, 0, this.render_texture_format, RenderTextureReadWrite.Default, source.antiAliasing); this.Resolve(source, internal_destination); Graphics.Blit(internal_destination, destination); RenderTexture.ReleaseTemporary(internal_destination); } } void OnApplicationQuit() { this._velocityBuffer.OnApplicationQuit(); if (this._reprojection_buffer != null) { TaaUtilities.ReleaseRenderTarget(ref this._reprojection_buffer[0, 0]); TaaUtilities.ReleaseRenderTarget(ref this._reprojection_buffer[0, 1]); TaaUtilities.ReleaseRenderTarget(ref this._reprojection_buffer[1, 0]); TaaUtilities.ReleaseRenderTarget(ref this._reprojection_buffer[1, 1]); } } /// <summary> /// /// </summary> /// <param name="context"></param> /// <exception cref="NotImplementedException"></exception> public override void Render(PostProcessRenderContext context) { throw new NotImplementedException(); } } } #endif
use std::sync::Arc; use serde::{Deserialize, Serialize}; use twilight_model::guild::{Permissions, Role}; use twilight_model::id::RoleId; use super::is_default; #[derive(Debug, Serialize, Deserialize)] pub struct CachedRole { #[serde(rename = "a")] pub id: RoleId, #[serde(rename = "b")] pub name: String, #[serde(rename = "c", default, skip_serializing_if = "is_default")] pub color: u32, #[serde(rename = "d", default, skip_serializing_if = "is_default")] pub hoisted: bool, #[serde(rename = "e", default, skip_serializing_if = "is_default")] pub position: i64, #[serde(rename = "f")] pub permissions: Permissions, #[serde(rename = "g", default, skip_serializing_if = "is_default")] pub managed: bool, #[serde(rename = "h", default, skip_serializing_if = "is_default")] pub mentionable: bool, } impl CachedRole { pub fn from_role(role: &Role) -> Self { CachedRole { id: role.id, name: role.name.clone(), color: role.color, hoisted: role.hoist, position: role.position, permissions: role.permissions, managed: role.managed, mentionable: role.mentionable, } } } impl From<&Arc<CachedRole>> for CachedRole { fn from(role: &Arc<CachedRole>) -> Self { CachedRole { id: role.id, name: role.name.clone(), color: role.color, hoisted: role.hoisted, position: role.position, permissions: role.permissions, managed: role.managed, mentionable: role.mentionable, } } }
package com.tanyayuferova.muteme.data import com.google.android.gms.location.places.Place /** * Author: Tanya Yuferova * Date: 11/30/2018 */ fun LocationData.toLocation() = Location( id = id, name = name, address = address ) fun Place.toLocationData() = LocationData( id = id, name = name.toString(), address = address?.toString().orEmpty(), lat = latLng.latitude, lng = latLng.longitude )
<?php $form['id'] = 'form'; $select['class'] = 'form-control text-center'; ?> <!DOCTYPE html> <html lang="en"> <?php $this->load->view('templates/head',array('title'=>'Lista de Mensagens')); ?> <body> <?php $this->load->view('templates/header'); ?> <div class="container"> <div class="panel"> <h1 class="text-center">Cadastro de usuários</h1> <?= form_open('mensagem/enviar',$form); ?> <h4>Enviar mensagem</h4> <hr> <div class="row"> <div class="col-md-4"> <div class="form-group"> <?= form_label('Usuário:', 'user');?> <?php for($i = 0; $i < count($names); $i++): $name[$names[$i]['id']] = $names[$i]['name']; endfor; echo form_dropdown('name', $name, '', $select); ?> </div> </div> </div> <div class="form-group"> <label for="msg">Mensagem:</label> <textarea class="form-control" id="msg" name="msg" rows="3"></textarea> </div> <?php $this->load->view('templates/submit',array('return' => 'mensagens', 'type' => 'enviar')); ?> </form> </div> </div> <?php $this->load->view('templates/footer'); ?> <?php $this->load->view('templates/scripts'); ?> </body> </html>
/* Define an at-style functions like fstatat, unlinkat, fchownat, etc. Copyright (C) 2006 Free Software Foundation, Inc. This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. */ /* written by Jim Meyering */ #define CALL_FUNC(F) \ (AT_FUNC_USE_F1_COND \ ? AT_FUNC_F1 (F AT_FUNC_POST_FILE_ARGS) \ : AT_FUNC_F2 (F AT_FUNC_POST_FILE_ARGS)) /* Call AT_FUNC_F1 or AT_FUNC_F2 (testing AT_FUNC_USE_F1_COND to determine which) to operate on FILE, which is in the directory open on descriptor FD. If possible, do it without changing the working directory. Otherwise, resort to using save_cwd/fchdir, then AT_FUNC_F?/restore_cwd. If either the save_cwd or the restore_cwd fails, then give a diagnostic and exit nonzero. */ int AT_FUNC_NAME (int fd, char const *file AT_FUNC_POST_FILE_PARAM_DECLS) { struct saved_cwd saved_cwd; int saved_errno; int err; if (fd == AT_FDCWD || IS_ABSOLUTE_FILE_NAME (file)) return CALL_FUNC (file); { char buf[OPENAT_BUFFER_SIZE]; char *proc_file = openat_proc_name (buf, fd, file); if (proc_file) { int proc_result = CALL_FUNC (proc_file); int proc_errno = errno; if (proc_file != buf) free (proc_file); /* If the syscall succeeds, or if it fails with an unexpected errno value, then return right away. Otherwise, fall through and resort to using save_cwd/restore_cwd. */ if (0 <= proc_result) return proc_result; if (! EXPECTED_ERRNO (proc_errno)) { errno = proc_errno; return proc_result; } } } if (save_cwd (&saved_cwd) != 0) openat_save_fail (errno); if (fchdir (fd) != 0) { saved_errno = errno; free_cwd (&saved_cwd); errno = saved_errno; return -1; } err = CALL_FUNC (file); saved_errno = (err < 0 ? errno : 0); if (restore_cwd (&saved_cwd) != 0) openat_restore_fail (errno); free_cwd (&saved_cwd); if (saved_errno) errno = saved_errno; return err; } #undef CALL_FUNC
class EmbedController < OutletsController layout "embed" def verify @page_title = "Register an account" super end end
package com.github.bhlangonijr.kengine import com.github.bhlangonijr.chesslib.Board import com.github.bhlangonijr.chesslib.move.MoveList import java.util.concurrent.Executors class Search(val board: Board, val engine: SearchEngine) { private val executor = Executors.newSingleThreadExecutor() private var state: SearchState? = null var threads: Int = 1 fun reset() { board.loadFromFen(board.context.startFEN) } fun setupPosition(fen: String, moves: String) { if (moves.isNotBlank()) { val moveList = MoveList(fen) moveList.loadFromText(moves) board.loadFromFen(fen) for (move in moveList) { board.doMove(move) } } else { board.loadFromFen(fen) } } fun setupPosition(moves: String) { if (moves.isNotBlank()) { val moveList = MoveList() moveList.loadFromText(moves) board.loadFromFen(board.context.startFEN) for (move in moveList) { board.doMove(move) } } else { reset() } } @Synchronized fun start(params: SearchParams): Boolean { val search = this if (state == null) { val state = SearchState(params, board) executor.submit { engine.rooSearch(state) search.stop() } this.state = state } else { println("info string search in progress...") } return true } @Synchronized fun stop(): Boolean { state?.stopped = true state = null return true } }
DATA=../dataset/preprocess/NHG TRAIN=nhg_en_train VALID=nhg_en_valid TEST=nhg_ja_test SRC=en_XX TGT=hi_IN NAME=en-hi DEST=../dataset/postprocess/NHG DICT=../mbart.cc25/dict.txt python -u preprocess.py \ --source-lang ${SRC} \ --target-lang ${TGT} \ --trainpref ${DATA}/${TRAIN}.spm \ --validpref ${DATA}/${VALID}.spm \ --testpref ${DATA}/${TEST}.spm \ --destdir ${DEST}/${NAME} \ --thresholdtgt 0 \ --thresholdsrc 0 \ --srcdict ${DICT} \ --tgtdict ${DICT} \ --workers 70 \ --fp16
var path = require('path'); var fs = require('fs'); var COMMAND_SRC = /^(\w+)\.js$/; exports.LIST = []; fs.readdirSync(path.resolve(__dirname)).forEach(function(file) { var match; if (match = COMMAND_SRC.exec(file)) { var name = path.resolve(__dirname, file); if (name !== __filename && fs.statSync(name).isFile()) { exports[match[1]] = require(name); exports.LIST.push(match[1]); } } });
--- title: "[Python-CodingTest] 6-6. 중복순열 구하기 (DFS: 깊이우선탐색)" categories: - Python CodingTest tags: - [Python] toc: true toc_sticky: true date: 2022-04-26 last_modified_at: 2022-04-26 --- # 중복순열 구하기 ## 문제 정리 ### 입력 ``` 3 2 ``` ### 처리 과정 1. 중복순열 결과 리스트(`res[L]`)에 값 넣고 `DFS(L+1)`로 재귀 2. L이 m`(=2)`이 되면 `res` 출력 3. 총 개수를 카운트하기 위한 global 변수 `cnt` ### 출력 ``` 1 1 1 2 1 3 2 1 2 2 2 3 3 1 3 2 3 3 9 ``` ## 풀이 ```py import sys sys.stdin = open("./input/in6.txt", "rt") # input=sys.stdin.readline # 입력량이 많을 때 속도 증가 # str=input().rstrip() def DFS(L): global cnt if L==m: # 하나의 중복순열 완성 for x in res: print(x, end=' ') print() cnt+=1 else: for i in range(1,n+1): # 1부터 n까지 res[L]=i # 중복순열 리스트에 값(1~n) 넣기 DFS(L+1) if __name__=="__main__": n,m=map(int,input().split()) res=[0]*m # 중복순열 결과 리스트 cnt=0 DFS(0) print(cnt) ``` # 정리 - `global`: 전역변수를 사용하기 위한 키워드 *** <br> 💛 개인 공부 기록용 블로그입니다. 👻 [맨 위로 이동하기](#){: .btn .btn--primary }{: .align-right}
#include <stdio.h> #include <stdint.h> #include <assert.h> #include <cpuid.h> #include <immintrin.h> #include <sys/mman.h> /* XMM/YMM registers to use */ /* For permanently storing all round-keys. */ #define K0 "5" #define K1 "6" #define K2 "7" #define K3 "8" #define K4 "9" #define K5 "10" #define K6 "11" #define K7 "12" #define K8 "13" #define K9 "14" #define K10 "15" /* For enc/dec (contains value to work on) */ #define XMM_SCRATCH "4" /* Decryption key (temp) */ #define DK "3" /* For inserting xmm into ymm without loss of lower-half ymm */ #define YMM_SCRATCH "2" /* XMM instructions */ #define XMM_LOAD(xmm_n, l, h) \ __asm__ __volatile__ ( \ "pinsrq $0, %0, %%xmm" xmm_n " \n\t" \ "pinsrq $1, %1, %%xmm" xmm_n " \n\t" \ : \ : "r"((uint64_t)l), "r"((uint64_t)h) \ : "xmm" xmm_n); #define XMM_TO_VAR64(xmm_n, l, h) \ __asm__ __volatile__ ( \ "pinsrq $0, %0, %%xmm" xmm_n " \n\t" \ "pinsrq $1, %1, %%xmm" xmm_n " \n\t" \ : \ : "r"((uint64_t)l), "r"((uint64_t)h) \ : "xmm" xmm_n); #define XMM_TO_VAR(xmm_n, v) \ __asm__ __volatile__ ( \ "movdqa %%xmm" xmm_n ", %0 \n\t" \ : "=x"(v) \ : \ : "xmm" xmm_n); #define XMM_FROM_VAR(xmm_n, v) \ __asm__ __volatile__ ( \ "movdqa %0, %%xmm" xmm_n " \n\t" \ : \ : "x"(v) \ : "xmm" xmm_n); #define XMM_XOR(xmm_v, xmm_k) \ __asm__ __volatile__ ( \ "xorps %%xmm" xmm_k ", %%xmm" xmm_v " \n\t" \ : \ : \ : "xmm" xmm_v, "xmm" xmm_k); #define XMM_TO_XMM(xmm_n, xmm_m) \ __asm__ __volatile__ ( \ "movdqa %%xmm" xmm_n ", %%xmm" xmm_m " \n\t" \ : \ : \ : "xmm" xmm_n, "xmm" xmm_m); /* YMM instructions */ #define YMM_UPPER_FROM_XMM(ymm_n, xmm_n) \ __asm__ __volatile__ (\ "vmovdqa %%ymm" ymm_n ", %%ymm" YMM_SCRATCH " \n\t" \ "vinserti128 $1, %%xmm" xmm_n ", %%ymm" YMM_SCRATCH ", %%ymm" ymm_n " \n\t" \ : \ : \ : "xmm" xmm_n, "ymm" ymm_n, "ymm" YMM_SCRATCH); #define YMM_UPPER_TO_XMM(ymm_n, xmm_n) \ __asm__ __volatile__ (\ "vextracti128 $1, %%ymm" ymm_n ", %%xmm" xmm_n " \n\t" \ : \ : \ : "xmm" xmm_n, "ymm" ymm_n); /* AES instruction */ #define AES_ENC(xmm_v, xmm_k) \ __asm__ __volatile__ ( \ "aesenc %%xmm" xmm_k ", %%xmm" xmm_v " \n\t" \ : \ : \ : "xmm" xmm_v, "xmm" xmm_k); #define AES_ENCLAST(xmm_v, xmm_k) \ __asm__ __volatile__ ( \ "aesenclast %%xmm" xmm_k ", %%xmm" xmm_v " \n\t" \ : \ : \ : "xmm" xmm_v, "xmm" xmm_k); #define AES_IMC(xmm_k, xmm_dk) \ __asm__ __volatile__ ( \ "aesimc %%xmm" xmm_k ", %%xmm" xmm_dk " \n\t" \ : \ : \ : "xmm" xmm_k, "xmm" xmm_dk); #define AES_DEC(xmm_v, xmm_dk) \ __asm__ __volatile__ ( \ "aesdec %%xmm" xmm_dk ", %%xmm" xmm_v " \n\t" \ : \ : \ : "xmm" xmm_v, "xmm" xmm_dk); #define AES_DECLAST(xmm_v, xmm_dk) \ __asm__ __volatile__ ( \ "aesdeclast %%xmm" xmm_dk ", %%xmm" xmm_v " \n\t" \ : \ : \ : "xmm" xmm_v, "xmm" xmm_dk); #define AES_KEYEXP(xmm_prev, xmm_new, xmm_tmp, rcon) \ __asm__ __volatile__ ( \ "movdqa %%xmm" xmm_prev ", %%xmm" xmm_tmp " \n\t" \ "movdqa %%xmm" xmm_prev ", %%xmm" xmm_new " \n\t" \ "pslldq $4, %%xmm" xmm_tmp " \n\t" \ "pxor %%xmm" xmm_tmp ", %%xmm" xmm_new " \n\t" \ "pslldq $4, %%xmm" xmm_tmp " \n\t" \ "pxor %%xmm" xmm_tmp ", %%xmm" xmm_new " \n\t" \ "pslldq $4, %%xmm" xmm_tmp " \n\t" \ "pxor %%xmm" xmm_tmp ", %%xmm" xmm_new " \n\t" \ "aeskeygenassist $" #rcon ", %%xmm" xmm_prev ", %%xmm" xmm_tmp " \n\t" \ "pshufd $255, %%xmm" xmm_tmp ", %%xmm" xmm_tmp " \n\t" \ "pxor %%xmm" xmm_tmp ", %%xmm" xmm_new " \n\t" \ : \ : \ : "xmm" xmm_prev, "xmm" xmm_new, "xmm" xmm_tmp); #define AES_KEYGEN_ALL_YMM(xmm_k) \ XMM_TO_XMM(xmm_k, K0); \ YMM_UPPER_FROM_XMM(K0, K0); \ AES_KEYEXP(K0, K1, XMM_SCRATCH, 0x01); \ YMM_UPPER_FROM_XMM(K1, K1); \ AES_KEYEXP(K1, K2, XMM_SCRATCH, 0x02); \ YMM_UPPER_FROM_XMM(K2, K2); \ AES_KEYEXP(K2, K3, XMM_SCRATCH, 0x04); \ YMM_UPPER_FROM_XMM(K3, K3); \ AES_KEYEXP(K3, K4, XMM_SCRATCH, 0x08); \ YMM_UPPER_FROM_XMM(K4, K4); \ AES_KEYEXP(K4, K5, XMM_SCRATCH, 0x10); \ YMM_UPPER_FROM_XMM(K5, K5); \ AES_KEYEXP(K5, K6, XMM_SCRATCH, 0x20); \ YMM_UPPER_FROM_XMM(K6, K6); \ AES_KEYEXP(K6, K7, XMM_SCRATCH, 0x40); \ YMM_UPPER_FROM_XMM(K7, K7); \ AES_KEYEXP(K7, K8, XMM_SCRATCH, 0x80); \ YMM_UPPER_FROM_XMM(K8, K8); \ AES_KEYEXP(K8, K9, XMM_SCRATCH, 0x1B); \ YMM_UPPER_FROM_XMM(K9, K9); \ AES_KEYEXP(K9, K10, XMM_SCRATCH, 0x36); \ YMM_UPPER_FROM_XMM(K10, K10); #define AES_ENCROUNDS_YMMRK(xmm_v) \ YMM_UPPER_TO_XMM(K0, DK); \ XMM_XOR(xmm_v, DK); \ YMM_UPPER_TO_XMM(K1, DK); \ AES_ENC(xmm_v, DK); \ YMM_UPPER_TO_XMM(K2, DK); \ AES_ENC(xmm_v, DK); \ YMM_UPPER_TO_XMM(K3, DK); \ AES_ENC(xmm_v, DK); \ YMM_UPPER_TO_XMM(K4, DK); \ AES_ENC(xmm_v, DK); \ YMM_UPPER_TO_XMM(K5, DK); \ AES_ENC(xmm_v, DK); \ YMM_UPPER_TO_XMM(K6, DK); \ AES_ENC(xmm_v, DK); \ YMM_UPPER_TO_XMM(K7, DK); \ AES_ENC(xmm_v, DK); \ YMM_UPPER_TO_XMM(K8, DK); \ AES_ENC(xmm_v, DK); \ YMM_UPPER_TO_XMM(K9, DK); \ AES_ENC(xmm_v, DK); \ YMM_UPPER_TO_XMM(K10, DK); \ AES_ENCLAST(xmm_v, DK); #define AES_DECROUNDS_YMMRK(xmm_v) \ YMM_UPPER_TO_XMM(K10, DK); \ /* No IMC for K10 */ \ XMM_XOR(xmm_v, DK); \ YMM_UPPER_TO_XMM(K9, DK); \ AES_IMC(DK, DK); \ AES_DEC(xmm_v, DK); \ YMM_UPPER_TO_XMM(K8, DK); \ AES_IMC(DK, DK); \ AES_DEC(xmm_v, DK); \ YMM_UPPER_TO_XMM(K7, DK); \ AES_IMC(DK, DK); \ AES_DEC(xmm_v, DK); \ YMM_UPPER_TO_XMM(K6, DK); \ AES_IMC(DK, DK); \ AES_DEC(xmm_v, DK); \ YMM_UPPER_TO_XMM(K5, DK); \ AES_IMC(DK, DK); \ AES_DEC(xmm_v, DK); \ YMM_UPPER_TO_XMM(K4, DK); \ AES_IMC(DK, DK); \ AES_DEC(xmm_v, DK); \ YMM_UPPER_TO_XMM(K3, DK); \ AES_IMC(DK, DK); \ AES_DEC(xmm_v, DK); \ YMM_UPPER_TO_XMM(K2, DK); \ AES_IMC(DK, DK); \ AES_DEC(xmm_v, DK); \ YMM_UPPER_TO_XMM(K1, DK); \ AES_IMC(DK, DK); \ AES_DEC(xmm_v, DK); \ YMM_UPPER_TO_XMM(K0, DK); \ /* No IMC for K0 */ \ AES_DECLAST(xmm_v, DK); /* Area we will encrypt. XXX: make sure attacker cannot corrupt (store in ymm * or so). */ size_t _memsentry_max_region_size = -1; char *_memsentry_crypt_area = (char*)-1; char *_memsentry_crypt_iv = (char*)-1; /* Performs full CBC AES-128 encryption on a given area. Assumes the round keys * are in the upper parts of the ymm registers. */ void _memsentry_crypt_enc(char *data, size_t len, char *iv) { __m128i prevblock, tmp; size_t i; /* Use IV for first iteration of CBC instead of previous block. */ prevblock = _mm_load_si128((void*)iv); for (i = 0; i < len / 16; i++) { tmp = _mm_load_si128((void*)data); tmp = _mm_xor_si128(tmp, prevblock); XMM_FROM_VAR(XMM_SCRATCH, tmp); AES_ENCROUNDS_YMMRK(XMM_SCRATCH); XMM_TO_VAR(XMM_SCRATCH, prevblock); _mm_store_si128((void*)data, prevblock); data += 16; } } /* Performs full CBC AES-128 decryption on a given area. Assumes the round keys * are in the upper parts of the ymm registers. */ void _memsentry_crypt_dec(char *data, size_t len, char *iv) { __m128i prevblock, tmp, prevblock_crypt; size_t i; /* Use IV for first iteration of CBC instead of previous block. */ prevblock = _mm_load_si128((void*)iv); for (i = 0; i < len / 16; i++) { /* Save ciphertext as we need it next round for XOR. */ prevblock_crypt = _mm_load_si128((void*)data); XMM_FROM_VAR(XMM_SCRATCH, prevblock_crypt); AES_DECROUNDS_YMMRK(XMM_SCRATCH); XMM_TO_VAR(XMM_SCRATCH, tmp); tmp = _mm_xor_si128(tmp, prevblock); prevblock = prevblock_crypt; _mm_store_si128((void*)data, tmp); data += 16; } } void _memsentry_crypt_checkcompat(void) { unsigned a, b, c, d; __cpuid(1, a, b, c, d); if (!(c & 0x2000000)) { fprintf(stderr, "CPU does not support AES-NI extensions"); exit(EXIT_FAILURE); } __cpuid_count(7, 0, a, b, c, d); if (!(b & (1 << 5))) { fprintf(stderr, "CPU does not support AVX2 extensions"); exit(EXIT_FAILURE); } } void _memsentry_crypt_init_keys(void) { /* Load a random key into the upper part of an YMM register, which should * never be touched by the normal application. */ /* TODO random */ XMM_LOAD(XMM_SCRATCH, 0x0706050403020100, 0x0f0e0d0c0b0a0908); AES_KEYGEN_ALL_YMM(XMM_SCRATCH); } void _memsentry_crypt_init_region(void) { unsigned i; assert((_memsentry_max_region_size % 4096) == 0); _memsentry_crypt_area = mmap(NULL, _memsentry_max_region_size, PROT_READ | PROT_WRITE, MAP_ANONYMOUS | MAP_PRIVATE, 0, 0); /* CBC needs an IV */ _memsentry_crypt_iv = malloc(16); for (i = 0; i < 16; i++) _memsentry_crypt_iv[i] = i | (i << 4); /* Fill area with some data for debugging */ for (i = 0; i < _memsentry_max_region_size; i++) _memsentry_crypt_area[i] = i % 256; /* Start the area off as encrypted until a domain needs it */ _memsentry_crypt_enc(_memsentry_crypt_area, _memsentry_max_region_size, _memsentry_crypt_iv); }
sga="/Users/ed/Projects/sga" user="ubuntu" ip="shelleygodwinarchive.org" host="shelleygodwinarchive.org" ids=(a b c1 c2) for id in ${ids[*]} do tei="$sga/data/tei/ox/ox-frankenstein_notebook_$id.xml" echo "generating Manifest.jsonld for $tei" bin/unbind $tei http://$host/data/ox/ox-frankenstein-notebook_$id/Manifest.jsonld > Manifest.jsonld echo "deploying Manifest.jsonld to $ip" scp Manifest.jsonld $user@$ip:/usr/share/nginx/static/data/ox/ox-frankenstein-notebook_$id/ done
using System; using System.IO; using System.Linq; using PortableRest.Extensions; namespace PortableRest { /// <summary> /// A parameter that can be passed over an HTTP request with a given encoding. /// </summary> /// <remarks> /// This enables parameters to be encoded individually. For example, some parameters in a request may need to be /// unencoded, and some may need to be UrlEncoded. /// </remarks> internal class EncodedParameter { #region Properties /// <summary> /// /// </summary> internal string Key { get; set; } /// <summary> /// /// </summary> internal object Value { get; set; } /// <summary> /// /// </summary> internal ParameterEncoding Encoding { get; set; } #endregion #region Constructors /// <summary> /// /// </summary> /// <param name="key"></param> /// <param name="value"></param> internal EncodedParameter(string key, object value) { Key = key; Value = value; Encoding = ParameterEncoding.UriEncoded; } /// <summary> /// /// </summary> /// <param name="key"></param> /// <param name="value"></param> /// <param name="encoding"></param> internal EncodedParameter(string key, object value, ParameterEncoding encoding): this(key, value) { Encoding = encoding; } #endregion #region Internal Methods internal object GetEncodedValue() { //RWM: This will not work. Need to store encoding with parameter and deal with on output, not input. object finalValue = null; switch (Encoding) { case ParameterEncoding.Base64: if (!(Value is Stream)) throw new ArgumentException("ByteArray encoded objects must be passed in as a stream."); var bytes1 = (Value as Stream).ToArray(); finalValue = Convert.ToBase64String(bytes1); break; case ParameterEncoding.ByteArray: if (!(Value is Stream)) throw new ArgumentException("ByteArray encoded objects must be passed in as a stream."); finalValue = (Value as Stream).ToArray(); break; case ParameterEncoding.Unencoded: finalValue = Value.ToString(); break; case ParameterEncoding.UriEncoded: finalValue = Uri.EscapeDataString(Value.ToString()); break; } return finalValue; } #endregion } }
module Solver where import DataTypes import Utils -- Simplify the expression. simplify' :: [Law] -> Expression -> [Step] simplify' ls e = f steps where steps = rws ls e f [] = [] f (x:_) = x:(simplify' ls (expression x)) -- Wrapper around simplify'. -- Adds the "Simplification" step. simplify :: Bool -> [Law] -> Expression -> [Step] simplify verbose ls e = f (simplify' ls e) where f [] = [] f e' = if verbose then e' else [Step "Simplification" (expression $ last e')] -- Derive the expression. derive :: [Law] -> Expression -> [Step] derive ls e = f steps where steps = rws ls e f [] = [] f (x:_) = x:(derive ls (expression x)) -- Calculate the expression from beginning to end. calculate :: [Law] -> [Law] -> Expression -> Bool -> Calculation calculate dLaws sLaws e verbose = Calculation e (derivation ++ (simplify verbose sLaws (f e derivation))) where derivation = (derive dLaws e) -- Select the expression from the last step if it exists, -- otherwise return e'. f e' [] = e' f _ steps = expression (last steps)
import { AnalysisJSONI, ApplicantI, ConversionTypesI, ExistingRequestSearchI, LocationT, NameRequestI, RequestNameI, SelectOptionsI, StatsI, SubmissionTypeT, WaitingAddressSearchI } from '@/interfaces/models' import { NameChoicesIF, NrDataIF, RequestOrConsentIF } from '@/interfaces' interface RequestNameMapI extends RequestNameI {} export interface NewRequestIF { actingOnOwnBehalf: boolean, addressSuggestions: any[], allowAutoApprove: boolean, analysisJSON: AnalysisJSONI | null, analyzePending: boolean, applicant: ApplicantI, assumedNameOriginal: string, conditionsModalVisible: boolean, exitModalVisible: boolean, conflictId: string | null, conversionType: string, conversionTypeAddToSelect: ConversionTypesI | null, corpNum: string, corpSearch: string, designationIssueTypes: string[], displayedComponent: string, doNotAnalyzeEntities: string[], editMode: boolean, entity_type_cd: string, entityTypeAddToSelect: SelectOptionsI | null, errors: string[], existingRequestSearch: ExistingRequestSearchI, extendedRequestType: SelectOptionsI | null, getNameReservationFailed: boolean, helpMeChooseModalVisible: boolean, incorporateLoginModalVisible: boolean, affiliationErrorModalVisible: boolean, isPersonsName: boolean, issueIndex: number, location: LocationT, locationInfoModalVisible: boolean, mrasSearchInfoModalVisible: boolean, mrasSearchResultCode: number, name: string, nameOriginal: string, nameChoices: NameChoicesIF nameIsEnglish: boolean, nameAnalysisTimedOut: boolean, noCorpNum: boolean, noCorpDesignation: boolean, nr: Partial<NameRequestI>, nrData: NrDataIF, nrOriginal: Partial<NameRequestI>, nrRequestNameMap: RequestNameMapI[], nrRequiredModalVisible: boolean, pickEntityModalVisible: boolean, pickRequestTypeModalVisible: boolean, priorityRequest: boolean, quickSearch: boolean, quickSearchNames: object[], request_action_cd: string, request_jurisdiction_cd: string, requestExaminationOrProvideConsent: RequestOrConsentIF, requestActionOriginal: string, showActualInput: boolean, stats: StatsI | null, submissionTabNumber: number, submissionType: SubmissionTypeT | null, tabNumber: number, userCancelledAnalysis: boolean, isLoadingSubmission: boolean waitingAddressSearch: WaitingAddressSearchI | null }
import { GetLookupsQuery, useDeleteLookupMutation, useDeleteLookupValueMutation, useGetLookupsQuery } from 'client' import React, { MouseEventHandler, useState } from 'react' import { useQueryClient } from 'react-query' import type { Column, Row, TableInstance } from 'react-table' import { GqlType, notEmpty } from 'utils' import type { TableMouseEventHandler } from '../../../types/react-table-config' import { GraphQLError } from '../../components/GraphQLError' import { Loader } from '../../components/Loader' import { Page } from '../../components/Page' import { Table } from '../../components/Table' import { LookupsDialog } from './LookupsDialog' export type LookupAndValues = GqlType<GetLookupsQuery, ['lookups', 'edges', number, 'node']> const columns: Column<LookupAndValues>[] = [ { accessor: 'realm', }, ] const Lookups: React.FC = React.memo(() => { const [showEdit, setShowEdit] = useState(false) const [selection, setSelection] = useState<LookupAndValues[]>([]) const deleteLookup = useDeleteLookupMutation() const deleteLookupValue = useDeleteLookupValueMutation() const queryClient = useQueryClient() const { isLoading, error, data, refetch } = useGetLookupsQuery() if (error) { return <GraphQLError error={error} /> } if (isLoading || !data) { return <Loader /> } const list: LookupAndValues[] = data.lookups!.edges.map((v) => v.node).filter(notEmpty) const onAdd: TableMouseEventHandler = () => () => { setShowEdit(true) } const onCloseEdit: MouseEventHandler = () => { setShowEdit(false) setSelection([]) // noinspection JSIgnoredPromiseFromCall queryClient.invalidateQueries('getLookups') } const onDelete = (instance: TableInstance<LookupAndValues>) => () => { const updater = instance.selectedFlatRows .map((r) => r.original) .map((l) => { const updaters: Promise<any>[] = l.lookupValues.nodes.reduce((acc: Promise<any>[], lv) => { lv?.id && acc.push(deleteLookupValue.mutateAsync({ input: { id: lv.id } })) return acc }, []) updaters.push(deleteLookup.mutateAsync({ input: { id: l.id } })) return updaters }) .flat() Promise.allSettled(updater).then(() => queryClient.invalidateQueries('getLookups')) } const onEdit = (instance: TableInstance<LookupAndValues>) => () => { setShowEdit(true) setSelection(instance.selectedFlatRows.map((r) => r.original)) } const onClick = (row: Row<LookupAndValues>) => { setShowEdit(true) setSelection([row.original]) } return ( <Page title='Lookups'> {showEdit && <LookupsDialog open={showEdit} onClose={onCloseEdit} initialValues={selection[0]} />} <Table<LookupAndValues> name='lookups' data={list} columns={columns} onAdd={onAdd} onDelete={onDelete} onEdit={onEdit} onClick={onClick} onRefresh={() => refetch()} /> </Page> ) }) export default Lookups
using CalculatorApp.Models; using Microsoft.AspNetCore.Mvc; namespace CalculatorApp.Controllers { public class HomeController : Controller { [HttpGet] public IActionResult Index(Calculator calculator) { return View(calculator); } public IActionResult Calculate(Calculator calculator) { calculator.CalculateResult(); if (calculator.RightOperand == 0 && calculator.Operator == "/") { TempData["Error"] = "Can not divide by 0"; } else { Data.CalculatorHistory.Add(calculator); } return RedirectToAction("Index", calculator); } public IActionResult History() { return View(Data.CalculatorHistory); } } }
import { ActionType } from "../action"; type Ball = { x: number, y: number, z: number, } export type BallState = ReadonlyArray<Ball>; const initialState: BallState = [{ x: 0, y: 0, z:0 }]; export const balls = (state: BallState = initialState, action: ActionType): BallState => { switch (action.type) { case 'SET_BALL_STATE':{ return action.payload; } default: { return state; } } };
#!/bin/bash -e THIS_SCRIPT_DIR=$(cd $(dirname "${BASH_SOURCE[0]}") && pwd) if [ $# -lt 1 ]; then echo "Usage: .build-faiss-debian.sh faiss_version [docker_image_name]"; exit 1; fi FAISS_VERSION=$1 DOCKER_IMAGE_NAME=${2:-vektonn/faiss-lib} DOCKER_IMAGE_NAME_AND_TAG=$DOCKER_IMAGE_NAME:$FAISS_VERSION echo "Building faiss-lib docker image: $DOCKER_IMAGE_NAME_AND_TAG" docker image build \ --pull \ --build-arg "FAISS_VERSION=$FAISS_VERSION" \ --tag "$DOCKER_IMAGE_NAME_AND_TAG" \ --file "$THIS_SCRIPT_DIR/.build-faiss-debian.dockerfile" \ "$THIS_SCRIPT_DIR"
package co.anitrend.retrofit.graphql.data.bucket.model.upload import co.anitrend.retrofit.graphql.data.bucket.model.node.BucketFileNode import com.google.gson.annotations.SerializedName internal data class UploadResult( @SerializedName("uploadFile") val uploaded: BucketFileNode )
package dev.vusi.ktor.authsample.feature import io.ktor.auth.* data class FirebaseUserPrincipal(val uid: String, val emailAddress: String) : Principal
; RUN: opt < %s -instcombine -S | FileCheck %s target datalayout = "e-p:32:32" target triple = "i686-pc-linux-gnu" define i32 @main() { ; CHECK-LABEL: @main( ; CHECK: call i32 bitcast entry: %tmp = call i32 bitcast (i7* (i999*)* @ctime to i32 (i99*)*)( i99* null ) ret i32 %tmp } define i7* @ctime(i999*) { ; CHECK-LABEL: @ctime( ; CHECK: call i7* bitcast entry: %tmp = call i7* bitcast (i32 ()* @main to i7* ()*)( ) ret i7* %tmp }
<?php namespace App; class Greeter { private $greetings = [ 'Hello', 'Bonjour', 'Hola', 'Hallo', 'Ciao', 'Olá', 'Privét', 'Hej', 'Namasté', 'Shalóm', ]; public function __invoke(string $subject): string { return sprintf('%s, %s', $this->greetings[array_rand($this->greetings)], $subject); } }
import React, { Component } from 'react' import hoistNonReactStatic from 'hoist-non-react-statics' /** * { * loader: 异步获取组件的回调函数 * loading: loading动画,根据状态,传入不同的参数。 * timeout: 如果超过这个时间,那么会给loading组件传递一个timeOut为true的props * } * @param {object} opt 配置对象 * @returns {Component} 返回一个react组件 * @constructor */ export default function EnhanceLoadable(opt) { const { loader, timeout, doneCallback, errorCallback, loading: LoadingComponent, } = opt class EnhanceComponent extends Component { constructor(props) { super(props); this.state = { component: null, timeOut: false, error: null, }; } componentDidMount() { const { component } = this.state this.fetchComponent() if (timeout) { setTimeout(() => { if (component) { this.setState({ timeOut: true }) } }, timeout) } } componentDidUpdate(preProps, preState) { const { component } = this.state if (!preState.component && component) { if (doneCallback && typeof doneCallback === 'function') { doneCallback(component) } } } // fetchComponent = () => { // try { // const { default: component } = loader(); // // 为高阶组件设置displayName // const WrappedComponentName = component.displayName // || component.name // || 'Component' // // EnhanceComponent.displayName = `hocLoadable(${WrappedComponentName})` // // hoistNonReactStatic(EnhanceComponent, component) // // this.setState({ // component, // }); // } catch (e) { // this.setState({ // error: e, // }) // if (errorCallback && typeof errorCallback === 'function') { // errorCallback() // } // } // } fetchComponent = () => { const promise = loader() promise .then((res) => { const { default: component } = res // 为高阶组件设置displayName const WrappedComponentName = component.displayName || component.name || 'Component' EnhanceComponent.displayName = `hocLoadable(${WrappedComponentName})` hoistNonReactStatic(EnhanceComponent, component) this.setState({ component, }); }, (e) => { this.setState({ error: e, }) if (errorCallback && typeof errorCallback === 'function') { errorCallback() } }) } render() { const { component: C, timeOut, error } = this.state return C ? <C {...this.props} /> : ( <LoadingComponent error={error} timeOut={timeOut} retry={this.fetchComponent} /> ) } } return EnhanceComponent }
require "bunto/feed_meta_tag" require "bunto/bunto-feed" require "bunto/page_without_a_file" require "bunto/strip_whitespace"
Install-Module posh-git -Scope CurrentUser Install-Module oh-my-posh -Scope CurrentUser git config set color.status.add 'green bold' git config set color.status.changed 'red bold' git config set color.status.untracked 'red bold' git config set color.diff.old 'red bold'
import { choice, maybe, sequenceOf, str, avoid, regexp_parser } from "../../combinator"; import assert from "assert"; const IdentStart = /^[a-zA-Z_]/; const IdentBody = /^[a-zA-Z_0-9]+/; export const strict_keywords = choice(Array.from(new Set([ "break", "continue", "for", "of", "in", "on", 'as', 'if', "has", "while", "match", "return", "use", "yield", ]).values()).map((r) => { return str(r).map(z => { return { kind: "#strict #keyword literal", value: z } }) })).with_name('strict_keywords'); export const ident = avoid(strict_keywords)(sequenceOf( [regexp_parser("IdentStart", IdentStart), maybe(regexp_parser("IdentBody", IdentBody))] )).map(result => { return { kind: "ident", value: result[0] + result[1] }; }).with_name('ident'); export const ident_without_mute = avoid(strict_keywords)(sequenceOf( [regexp_parser("IdentStartWithoutMute", IdentStart), maybe(regexp_parser("IdentBody", IdentBody))] )).map(result => { return { kind: "ident", value: result[0] + result[1] }; }).failOn((r) => r.value === '_', 'single _ not supported').with_name('ident_without_mute'); assert.deepStrictEqual(ident.run("break").result.isErr(), true); assert.strictEqual(ident.run("match").result.isErr(), true); assert.strictEqual(ident.run("false").result.isErr(), false); assert.strictEqual(ident.run("bool").result.isErr(), false); assert.deepStrictEqual(ident.run("true").result.unwrap(), { kind: 'ident', value: 'true' }); assert.deepStrictEqual(strict_keywords.run("break").result.unwrap(), { kind: '#strict #keyword literal', value: 'break' });
package com.xjt.controller; import com.xjt.common.Request; import com.xjt.common.Result; import com.xjt.entity.goods.*; import com.xjt.service.IGoodsService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.CrossOrigin; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.ResponseBody; import java.util.List; @Controller @RequestMapping("/api/goods") public class GoodsController { @Autowired private IGoodsService goodsService; //添加商品 @RequestMapping("/add") @ResponseBody @CrossOrigin public void add( @RequestBody Request<AddRequestEntity> request) throws Exception { goodsService.add(request.getData()); } //修改商品 @RequestMapping("/update") @ResponseBody @CrossOrigin public void update( @RequestBody Request<UpdateGoodsRequestEntity> request) throws Exception { goodsService.update(request.getData()); } //商品一栏查看/检索 @RequestMapping("/search") @ResponseBody @CrossOrigin public Result search(@RequestBody Request<SearchGoodsRequestEntity> request) throws Exception { Result result = new Result(); List<SearchGoodsResponseEntity> detailResponseEntity = goodsService.search(request.getData()); result.setData(detailResponseEntity); return result; } //新增商家回复 @RequestMapping("/addCommendReply") @ResponseBody @CrossOrigin public void addCommendReply( @RequestBody Request<AddCommendReplyRequest> request) throws Exception { goodsService.addCommendReply(request.getData()); } //新增商品评论 @RequestMapping("/addCommend") @ResponseBody @CrossOrigin public void addCommend( @RequestBody Request<AddCommendRequest> request) throws Exception { goodsService.addCommend(request.getData()); } }
<?php use Illuminate\Foundation\Inspiring; use App\Task; use Carbon\Carbon; /* |-------------------------------------------------------------------------- | Console Routes |-------------------------------------------------------------------------- | | This file is where you may define all of your Closure based console | commands. Each Closure is bound to a command instance allowing a | simple approach to interacting with each command's IO methods. | */ Artisan::command('inspire', function () { $this->comment(Inspiring::quote()); })->describe('Display an inspiring quote'); Artisan::command('recurring_tasks:refresh', function () { $start = (new Carbon('now'))->hour(0)->minute(0)->second(0); $end = (new Carbon('now'))->hour(23)->minute(59)->second(59); $tasks = Task::join('lists', 'lists.id', '=', 'tasks.list_id') ->select(DB::raw('tasks.*, lists.recurring')) ->where('checked', 1) ->where(function($q) { $q->whereNotNull('predefined_id'); // ->orWhere('recurring', 1); }) ->whereNotBetween('tasks.created_at', [$start , $end]); if ($tasks->count()) { foreach ($tasks->get() as $task) { $task->update([ 'checked' => 0, ]); } } $this->comment($tasks->get()); })->describe('Refresh predefined recurring tasks if checked.');
package com.dreamlab.edgefs.misc; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.dreamlab.edgefs.controlplane.Fog; import com.dreamlab.edgefs.model.StreamMetadataComparator; import com.dreamlab.edgefs.thrift.ByteTypeStreamMetadata; import com.dreamlab.edgefs.thrift.DoubleTypeStreamMetadata; import com.dreamlab.edgefs.thrift.SQueryRequest; public class StaticStreamMetaComparator { private static final Logger LOGGER = LoggerFactory.getLogger(StreamMetadataComparator.class); private static final String FIELD_VALUE = "value"; private static final String FIELD_UPDATABLE = "updatable"; private static final String FIELD_VERSION = "version"; private static final String FIELD_OWNER = "owner"; private static final String FIELD_ENDTIME = "endTime"; private static final String FIELD_OTHER_PROPS = "otherProperties"; public static Set<String> retrieveStreamId(SQueryRequest squery, Fog fog) { /** Set of stream ids **/ Set<String> myStreamIdSet = new HashSet<String>(fog.getStreamMbIdMap().keySet()) ; LOGGER.info("BEFORE : The total streams registered are "+myStreamIdSet.toString()); Field[] fields = SQueryRequest.class.getDeclaredFields(); for (Field fd : fields) { try { if (fd.getName().startsWith("_")) continue; // no need to match the static fields if (Modifier.isStatic(fd.getModifiers())) continue; Object myObj = fd.get(squery); if (myObj == null) { continue; } Field valueField = fd.getType().getDeclaredField(FIELD_VALUE); /** Min replica **/ if (fd.getName().compareToIgnoreCase(("minReplica")) == 0) { String myStr = valueField.get(myObj) + ""; Integer myInt = Integer.valueOf(myStr); Set<String> streamIdSet = fog.getStreamMetaStreamIdMap().get(myStr); myStreamIdSet.retainAll(streamIdSet); } /** Reliability **/ if (fd.getName().compareToIgnoreCase(("reliability")) == 0) { String myStr = valueField.get(myObj) + ""; Set<String> streamIdSet = fog.getStreamMetaStreamIdMap().get(myStr); myStreamIdSet.retainAll(streamIdSet); } } catch (Exception e) { e.printStackTrace(); } } LOGGER.info("AFTER : The mystream id set is " + myStreamIdSet.toString()); return myStreamIdSet; } public static void main(String[] args) { SQueryRequest myReq = new SQueryRequest(); myReq.setMinReplica(new ByteTypeStreamMetadata((byte) 2, false)); myReq.setReliability(new DoubleTypeStreamMetadata(0.99, false)); Fog myFog = new Fog(); myFog.setStreamMbIdMap(new HashMap<String, Set<Long>>()); myFog.getStreamMbIdMap().put("streamid1", null); myFog.getStreamMbIdMap().put("streamid2", null); myFog.getStreamMbIdMap().put("streamid3", null); ArrayList<String> streamIdList = new ArrayList<String>(); streamIdList.add("streamid3"); streamIdList.add("streamid2"); myFog.setStreamMetaStreamIdMap(new ConcurrentHashMap<String, Set<String>>()); myFog.getStreamMetaStreamIdMap().put(""+2, new HashSet<String>(streamIdList)); streamIdList.clear(); streamIdList.add("streamid3"); streamIdList.add("streamid1"); myFog.getStreamMetaStreamIdMap().put("0.99", new HashSet<String>(streamIdList)); retrieveStreamId(myReq, myFog); } }
<?php class LabyRoomMinetunnel extends LabyRoomHelper { public function generate($oRoom) { $w = $oRoom->getWidth(); $h = $oRoom->getHeight(); if ($oRoom->getRoomData('type') !== '') { $oRoom->generateCentralPillar(BLOCK_WALL, $oRoom->getWidth() - 6, $oRoom->getHeight() - 6); } else { switch ($this->oRnd->getRandom(0, 1)) { case 0: $this->generateTunnelNormal($oRoom); break; case 1: $this->generateTunnelGrate($oRoom); break; } } } public function generateTunnelNormal($oRoom) { $w = $oRoom->getWidth(); $h = $oRoom->getHeight(); $oRoom->generateDunHallway(BLOCK_WALL, $oRoom->getRoomData('doormask'), 3); $oRoom->getPeri()->generateDunHallway(BLOCK_WALL, $oRoom->getRoomData('doormask'), 1); } public function generateTunnelGrate($oRoom) { $w = $oRoom->getWidth(); $h = $oRoom->getHeight(); $this->generateTunnelNormal($oRoom); $oRoom->generatePillarForest(BLOCK_WALL_GRATE, $w, $h, 0, 0, 2, 2); $oRoom->generateCorner(BLOCK_WALL, 0, ($w >> 1) - 2, ($h >> 1) - 2); $oRoom->generateCorner(BLOCK_WALL, 1, ($w >> 1) - 2, ($h >> 1) - 2); $oRoom->generateCorner(BLOCK_WALL, 2, ($w >> 1) - 2, ($h >> 1) - 2); $oRoom->generateCorner(BLOCK_WALL, 3, ($w >> 1) - 2, ($h >> 1) - 2); } }
import { normalizeResponse } from "~/plugins/Repository/index" describe("normaliseResponse", () => { it("normalizes a successful api response", () => { const resp = { code: 200, message: "foo", body: "bar", } const sut = normalizeResponse(resp) expect(sut.code).toEqual(resp.code) expect(sut.message).toEqual(resp.message) expect(sut.body).toEqual(resp.body) }) })
/* MIT License Copyright (c) 2016 Edward Rowe, RedBlueGames Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ namespace RedBlueGames.MulliganRenamer { using System.Collections; using System.Collections.Generic; using UnityEditor; using UnityEngine; using NUnit.Framework; using Object = UnityEngine.Object; public class BulkRenamerTests { private static readonly string TestFixturesFolderName = "TestFixtures"; private static readonly string TestFixturesPath = string.Concat("Assets/", TestFixturesFolderName); private static readonly string TestFixturesDirectory = string.Concat(TestFixturesPath, "/"); private GameObject CreatePrefabFromGameObject(GameObject originalObject, string newName) { #if UNITY_2018_3_OR_NEWER return PrefabUtility.SaveAsPrefabAsset(originalObject, newName); #else return PrefabUtility.CreatePrefab(string.Concat(TestFixturesDirectory, "Original.prefab"), originalObject); #endif } [SetUp] public void Setup() { AssetDatabase.CreateFolder("Assets", TestFixturesFolderName); AssetDatabase.CreateFolder(TestFixturesPath, "SubDirectory"); } [TearDown] public void TearDown() { AssetDatabase.DeleteAsset(TestFixturesPath); } [Test] public void RenameObjects_SingleAsset_Renames() { var gameObjectAsset = this.CreatePrefabFromGameObject( new GameObject("Original"), string.Concat(TestFixturesDirectory, "Original.prefab")); var singleAsset = new List<Object>() { gameObjectAsset }; var replaceNameOp = new ReplaceNameOperation(); replaceNameOp.NewName = "NewName"; var renameSequence = new RenameOperationSequence<IRenameOperation>(); renameSequence.Add(replaceNameOp); var bulkRenamer = new BulkRenamer(renameSequence); bulkRenamer.RenameObjects(singleAsset, true); Assert.AreEqual("NewName", singleAsset[0].name); } [Test] public void RenameObjects_MultipleAssets_Renames() { // Arrange var multipleObject0 = this.CreatePrefabFromGameObject( new GameObject("Asset0"), string.Concat(TestFixturesDirectory, "Asset0.prefab")); var multipleObject1 = this.CreatePrefabFromGameObject( new GameObject("Asset1"), string.Concat(TestFixturesDirectory, "Asset1.prefab")); var multipleAssets = new List<Object>() { multipleObject0, multipleObject1 }; var replaceStringOp = new ReplaceStringOperation(); replaceStringOp.SearchString = "Asset"; replaceStringOp.ReplacementString = "Thingy"; var renameSequence = new RenameOperationSequence<IRenameOperation>(); renameSequence.Add(replaceStringOp); var bulkRenamer = new BulkRenamer(renameSequence); bulkRenamer.RenameObjects(multipleAssets, true); var expectedNames = new List<string> { "Thingy0", "Thingy1" }; var resultingNames = new List<string>(); foreach (var obj in multipleAssets) { resultingNames.Add(obj.name); } Assert.AreEqual(expectedNames, resultingNames); } [Test] public void RenameObjects_MultipleGameObjects_Renames() { var gameObject0 = new GameObject("GameObject0"); var gameObject1 = new GameObject("GameObject1"); var gameObjects = new List<Object>() { gameObject0, gameObject1 }; var replaceStringOp = new ReplaceStringOperation(); replaceStringOp.SearchString = "Object"; replaceStringOp.ReplacementString = "Thingy"; var renameSequence = new RenameOperationSequence<IRenameOperation>(); renameSequence.Add(replaceStringOp); var bulkRenamer = new BulkRenamer(renameSequence); bulkRenamer.RenameObjects(gameObjects, true); var expectedNames = new List<string> { "GameThingy0", "GameThingy1" }; var resultingNames = new List<string>(); foreach (var obj in gameObjects) { resultingNames.Add(obj.name); } Assert.AreEqual(expectedNames, resultingNames); } [Test] public void RenameObjects_Spritesheet_Renames() { // Arrange var spriteSheetConfig = new SpriteSheetGenerationConfig(2, "Texture.png"); spriteSheetConfig.NamePrefix = "Texture_Sprite"; var textureWithSprites = this.SetupSpriteSheet(spriteSheetConfig); var replaceNameOp = new ReplaceNameOperation(); replaceNameOp.NewName = "NewSprite"; var enumerateOp = new EnumerateOperation(); enumerateOp.StartingCount = 1; var renameSequence = new RenameOperationSequence<IRenameOperation>(); renameSequence.Add(replaceNameOp); renameSequence.Add(enumerateOp); var path = AssetDatabase.GetAssetPath(textureWithSprites); var allAssetsAtPath = AssetDatabase.LoadAllAssetsAtPath(path); var allSprites = new List<Object>(); foreach (var asset in allAssetsAtPath) { if (asset is Sprite) { allSprites.Add(asset); } } var bulkRenamer = new BulkRenamer(renameSequence); bulkRenamer.RenameObjects(allSprites, true); var expectedNames = new List<string> { "NewSprite1", "NewSprite2", "NewSprite3", "NewSprite4" }; var resultingNames = new List<string>(); foreach (var sprite in allSprites) { resultingNames.Add(sprite.name); } Assert.AreEqual(expectedNames, resultingNames); } [Test] public void RenameObjects_EnumeratedObjectsWithDependentChanges_Renames() { // Arrange var enumeratedObject0 = this.CreatePrefabFromGameObject( new GameObject("EnumeratedObject0"), string.Concat(TestFixturesDirectory, "EnumeratedObject0.prefab")); var enumeratedObject1 = this.CreatePrefabFromGameObject( new GameObject("EnumeratedObject1"), string.Concat(TestFixturesDirectory, "EnumeratedObject1.prefab")); var enumeratedObject2 = this.CreatePrefabFromGameObject( new GameObject("EnumeratedObject2"), string.Concat(TestFixturesDirectory, "EnumeratedObject2.prefab")); var enumeratedObjects = new List<Object>() { enumeratedObject0, enumeratedObject1, enumeratedObject2, }; var removeCharactersOp = new RemoveCharactersOperation(); var removeCharacterOptions = new RemoveCharactersOperation.RenameOptions(); removeCharacterOptions.CharactersToRemove = "\\d"; removeCharacterOptions.CharactersAreRegex = true; removeCharacterOptions.IsCaseSensitive = false; removeCharactersOp.SetOptions(removeCharacterOptions); var enumerateOp = new EnumerateOperation(); enumerateOp.StartingCount = 1; var renameSequence = new RenameOperationSequence<IRenameOperation>(); renameSequence.Add(removeCharactersOp); renameSequence.Add(enumerateOp); // Act var bulkRenamer = new BulkRenamer(renameSequence); bulkRenamer.RenameObjects(enumeratedObjects, true); // Assert // Build two lists to compare against because Assert displays their differences nicely in its output. var expectedNames = new List<string> { "EnumeratedObject1", "EnumeratedObject2", "EnumeratedObject3" }; var resultingNames = new List<string>(); foreach (var obj in enumeratedObjects) { resultingNames.Add(obj.name); } Assert.AreEqual(expectedNames, resultingNames); } [Test] public void RenameObjects_ChangeObjectToExistingObjectName_SkipsRename() { // Arrange var conflictingObject0 = this.CreatePrefabFromGameObject( new GameObject("ConflictingObject0"), string.Concat(TestFixturesDirectory, "ConflictingObject0.prefab")); var existingObject = this.CreatePrefabFromGameObject( new GameObject("ExistingObject"), string.Concat(TestFixturesDirectory, "ExistingObject.prefab")); var conflictingObjectsWithoutAllNamesChanging = new List<Object>(); conflictingObjectsWithoutAllNamesChanging.Add(conflictingObject0); conflictingObjectsWithoutAllNamesChanging.Add(existingObject); var replaceFirstNameOp = new ReplaceStringOperation(); replaceFirstNameOp.SearchString = "ConflictingObject0"; replaceFirstNameOp.ReplacementString = "ExistingObject"; var renameSequence = new RenameOperationSequence<IRenameOperation>(); renameSequence.Add(replaceFirstNameOp); // Act and Assert var bulkRenamer = new BulkRenamer(renameSequence); bulkRenamer.RenameObjects(conflictingObjectsWithoutAllNamesChanging); var expectedName = "ConflictingObject0"; Assert.AreEqual(expectedName, conflictingObject0.name); } [Test] public void RenameObjects_ChangeObjectToExistingObjectNameNotInRenameGroup_SkipsRename() { // Arrange var conflictingObject0 = this.CreatePrefabFromGameObject( new GameObject("ConflictingObject0"), string.Concat(TestFixturesDirectory, "ConflictingObject0.prefab")); this.CreatePrefabFromGameObject( new GameObject("ExistingObject"), string.Concat(TestFixturesDirectory, "ExistingObject.prefab")); var conflictingObjectsWithoutAllNamesChanging = new List<Object>(); conflictingObjectsWithoutAllNamesChanging.Add(conflictingObject0); var replaceFirstNameOp = new ReplaceStringOperation(); replaceFirstNameOp.SearchString = "ConflictingObject0"; replaceFirstNameOp.ReplacementString = "ExistingObject"; var renameSequence = new RenameOperationSequence<IRenameOperation>(); renameSequence.Add(replaceFirstNameOp); // Act and Assert var bulkRenamer = new BulkRenamer(renameSequence); bulkRenamer.RenameObjects(conflictingObjectsWithoutAllNamesChanging); var expectedName = "ConflictingObject0"; Assert.AreEqual(expectedName, conflictingObject0.name); } [Test] public void RenameObjects_RenameObjectToExistingObjectNameButAtDifferentPath_Succeeds() { // Arrange var conflictingObject0 = this.CreatePrefabFromGameObject( new GameObject("ConflictingObject0"), string.Concat(TestFixturesDirectory, "ConflictingObject0.prefab")); var existingObject = this.CreatePrefabFromGameObject( new GameObject("ExistingObject"), string.Concat(TestFixturesDirectory, "SubDirectory/ExistingObject.prefab")); var replaceFirstNameOp = new ReplaceStringOperation(); replaceFirstNameOp.SearchString = "ConflictingObject0"; replaceFirstNameOp.ReplacementString = "ExistingObject"; var renameSequence = new RenameOperationSequence<IRenameOperation>(); renameSequence.Add(replaceFirstNameOp); var expectedNames = new List<string> { "ExistingObject", "ExistingObject" }; var objectsToRename = new List<UnityEngine.Object>() { conflictingObject0, existingObject, }; // Act and Assert var bulkRenamer = new BulkRenamer(renameSequence); bulkRenamer.RenameObjects(objectsToRename); var resultingNames = new List<string>(); foreach (var obj in objectsToRename) { resultingNames.Add(obj.name); } Assert.AreEqual(expectedNames, resultingNames); } [Test] public void RenameObjects_SpritesheetWithTargetNameAsSubstringInMultipleSprites_Renames() { // Tests Issue #143: https://github.com/redbluegames/unity-mulligan-renamer/issues/143 // Arrange var spriteSheetConfig = new SpriteSheetGenerationConfig(4, "Texture.png"); var targetSpriteName = "Texture_Sprite1"; spriteSheetConfig.NamePrefix = "Texture_Sprite"; var textureWithSprites = this.SetupSpriteSheet(spriteSheetConfig); var replaceNameOp = new ReplaceStringOperation(); replaceNameOp.SearchString = targetSpriteName; replaceNameOp.ReplacementString = "CoolSprite"; var renameSequence = new RenameOperationSequence<IRenameOperation>(); renameSequence.Add(replaceNameOp); var path = AssetDatabase.GetAssetPath(textureWithSprites); var allAssetsAtPath = AssetDatabase.LoadAllAssetsAtPath(path); var allSprites = new List<Object>(); Object targetSprite = null; foreach (var asset in allAssetsAtPath) { if (asset is Sprite) { allSprites.Add(asset); if (asset.name == targetSpriteName) { targetSprite = asset; } } } // Act var bulkRenamer = new BulkRenamer(renameSequence); bulkRenamer.RenameObjects(new List<Object>() { targetSprite }, true); // Assert var expectedNames = new List<string> { "CoolSprite", "Texture_Sprite2", "Texture_Sprite3", "Texture_Sprite4", "Texture_Sprite5", "Texture_Sprite6", "Texture_Sprite7", "Texture_Sprite8", "Texture_Sprite9", "Texture_Sprite10", "Texture_Sprite11", "Texture_Sprite12", "Texture_Sprite13", "Texture_Sprite14", "Texture_Sprite15", "Texture_Sprite16", }; var resultingNames = new List<string>(); foreach (var sprite in allSprites) { resultingNames.Add(sprite.name); } // In order to not depend on how these sprites are Loaded, we check Contains instead of comparing // the lists directly Assert.That(resultingNames.Count, Is.EqualTo(expectedNames.Count)); foreach (var name in resultingNames) { Assert.That(expectedNames.Contains(name), "Expected names did not contain name: " + name); } } [Test] public void RenameObjects_SpriteAndTexture_Renames() { // Tests Issue #139: https://github.com/redbluegames/unity-mulligan-renamer/issues/139 // Arrange var spriteSheetConfig = new SpriteSheetGenerationConfig(1, "Texture.png"); spriteSheetConfig.NamePrefix = "Texture_Sprite"; var textureWithSprites = this.SetupSpriteSheet(spriteSheetConfig); var replaceNameOp = new ReplaceStringOperation(); replaceNameOp.SearchString = "Texture"; replaceNameOp.ReplacementString = "Cool"; var renameSequence = new RenameOperationSequence<IRenameOperation>(); renameSequence.Add(replaceNameOp); var path = AssetDatabase.GetAssetPath(textureWithSprites); var allAssetsAtPath = AssetDatabase.LoadAllAssetsAtPath(path); // Act var bulkRenamer = new BulkRenamer(renameSequence); bulkRenamer.RenameObjects(new List<Object>(allAssetsAtPath), true); // Assert var resultingNames = new List<string>(); foreach (var asset in allAssetsAtPath) { resultingNames.Add(asset.name); } var expectedNames = new List<string> { "Cool", "Cool_Sprite1", }; Assert.AreEqual(expectedNames, resultingNames); } [Test] public void RenameObjects_SpritesheetWithNamesThatWillOverlap_Renames() { // Tests Issue #126: https://github.com/redbluegames/unity-mulligan-renamer/issues/126 // Arrange var spriteSheetConfig = new SpriteSheetGenerationConfig(2, "Texture.png"); spriteSheetConfig.NamePrefix = "Texture_Sprite"; spriteSheetConfig.UseZeroBasedIndexing = true; var textureWithSprites = this.SetupSpriteSheet(spriteSheetConfig); var removeNumbersOp = new RemoveCharactersOperation(); removeNumbersOp.SetOptionPreset(RemoveCharactersOperation.PresetID.Numbers); var enumerateOp = new EnumerateOperation(); enumerateOp.StartingCount = 1; enumerateOp.Increment = 1; var renameSequence = new RenameOperationSequence<IRenameOperation>(); renameSequence.Add(removeNumbersOp); renameSequence.Add(enumerateOp); var path = AssetDatabase.GetAssetPath(textureWithSprites); var allAssetsAtPath = AssetDatabase.LoadAllAssetsAtPath(path); var allSprites = new List<Object>(); foreach (var asset in allAssetsAtPath) { if (asset is Sprite) { allSprites.Add(asset); } } // Act var bulkRenamer = new BulkRenamer(renameSequence); bulkRenamer.RenameObjects(allSprites, true); // Assert var expectedNames = new List<string> { "Texture_Sprite1", "Texture_Sprite2", "Texture_Sprite3", "Texture_Sprite4", }; var resultingNames = new List<string>(); foreach (var sprite in allSprites) { resultingNames.Add(sprite.name); } Assert.AreEqual(expectedNames, resultingNames); } [Test] public void RenameObjects_SpritesheetsWithPrefixedNumbers_Renames() { // Tests Issue #163: https://github.com/redbluegames/unity-mulligan-renamer/issues/163 // Arrange var spriteSheetConfig = new SpriteSheetGenerationConfig(2, "NumberedSprites.png"); spriteSheetConfig.NamePrefix = "0_NumberedSprites"; spriteSheetConfig.UseZeroBasedIndexing = true; var textureWithSprites = this.SetupSpriteSheet(spriteSheetConfig); var replaceStringOperation = new ReplaceStringOperation(); replaceStringOperation.SearchString = "Numbered"; replaceStringOperation.ReplacementString = string.Empty; var renameSequence = new RenameOperationSequence<IRenameOperation>(); renameSequence.Add(replaceStringOperation); var path = AssetDatabase.GetAssetPath(textureWithSprites); var allAssetsAtPath = AssetDatabase.LoadAllAssetsAtPath(path); var allSprites = new List<Object>(); foreach (var asset in allAssetsAtPath) { if (asset is Sprite) { allSprites.Add(asset); } } // Act var bulkRenamer = new BulkRenamer(renameSequence); bulkRenamer.RenameObjects(allSprites, true); // Assert var expectedNames = new List<string> { "0_Sprites0", "0_Sprites1", "0_Sprites2", "0_Sprites3", }; var resultingNames = new List<string>(); foreach (var sprite in allSprites) { resultingNames.Add(sprite.name); } // In order to not depend on how these sprites are Loaded, we check Contains instead of comparing // the lists directly Assert.That(resultingNames.Count, Is.EqualTo(expectedNames.Count)); foreach (var name in resultingNames) { Assert.That(expectedNames.Contains(name), "Expected names did not contain name: " + name); } } private Texture2D SetupSpriteSheet(SpriteSheetGenerationConfig config) { var cellSize = 32; var texture = new Texture2D( cellSize * config.CellsPerSide, cellSize * config.CellsPerSide, TextureFormat.ARGB32, false, true); var size = Vector2.one * cellSize * config.CellsPerSide; for (int x = 0; x < size.x; ++x) { for (int y = 0; y < size.y; ++y) { texture.SetPixel(x, y, Color.cyan); } } texture.Apply(); // Need to save the texture as an Asset and store a reference to the Asset var path = string.Concat(TestFixturesDirectory, config.TextureName); byte[] bytes = texture.EncodeToPNG(); System.IO.File.WriteAllBytes(path, bytes); AssetDatabase.ImportAsset(path); var textureWithSprites = AssetDatabase.LoadAssetAtPath<Texture2D>(path); var importer = (TextureImporter)TextureImporter.GetAtPath(path); importer.isReadable = true; importer.textureType = TextureImporterType.Sprite; importer.spriteImportMode = SpriteImportMode.Multiple; var spriteMetaData = new SpriteMetaData[config.CellsPerSide * config.CellsPerSide]; for (int i = 0; i < config.CellsPerSide; ++i) { for (int j = 0; j < config.CellsPerSide; ++j) { var cellIndex = i * config.CellsPerSide + j; var x = i * cellSize; var y = j * cellSize; spriteMetaData[cellIndex].rect = new Rect(x, y, cellSize, cellSize); var spriteCount = config.UseZeroBasedIndexing ? cellIndex : cellIndex + 1; var name = string.Concat(config.NamePrefix, spriteCount.ToString()); spriteMetaData[cellIndex].name = name; } } importer.spritesheet = spriteMetaData; importer.SaveAndReimport(); return textureWithSprites; } private class SpriteSheetGenerationConfig { public int CellsPerSide { get; set; } public string TextureName { get; set; } public string NamePrefix { get; set; } public bool UseZeroBasedIndexing { get; set; } public SpriteSheetGenerationConfig(int cellsPerSide, string textureName) { this.CellsPerSide = cellsPerSide; this.TextureName = textureName; } } } }
package mempool import ( "testing" "github.com/anakreon/anacoin/internal/block" ) func TestNewUnconfirmedTransactions(t *testing.T) { unconfirmedTransactions := NewUnconfirmedTransactions() if len(unconfirmedTransactions) != 0 { t.Fail() } } func TestAddTransaction(t *testing.T) { newTransactionOne := buildTransactionWithId("one") newTransactionTwo := buildTransactionWithId("two") unconfirmedTransactions := NewUnconfirmedTransactions() unconfirmedTransactions.AddTransaction(newTransactionOne) unconfirmedTransactions.AddTransaction(newTransactionTwo) if len(unconfirmedTransactions) != 2 { t.Error("not 2 transactions") } if len(unconfirmedTransactions[0].In) == 1 && unconfirmedTransactions[0].In[0].TransactionID != "one" { t.Error("transaction 1 not in slot 0") } if len(unconfirmedTransactions[1].In) == 1 && unconfirmedTransactions[1].In[0].TransactionID != "two" { t.Error("transaction 2 not in slot 1") } } func TestClear(t *testing.T) { newTransactionOne := buildTransactionWithId("one") newTransactionTwo := buildTransactionWithId("two") unconfirmedTransactions := NewUnconfirmedTransactions() unconfirmedTransactions.AddTransaction(newTransactionOne) unconfirmedTransactions.AddTransaction(newTransactionTwo) unconfirmedTransactions.Clear() if len(unconfirmedTransactions) != 0 { t.Error("not 0 transactions") } } func buildTransactionWithId(id string) block.Transaction { return block.Transaction{ In: []block.TransactionInput{ block.TransactionInput{TransactionID: id}, }, } }
echo $(date) " - Starting Bastion Prep Script" export USERNAME_ORG=${USERNAME_ORG} export PASSWORD_ACT_KEY="${PASSWORD_ACT_KEY}" export POOL_ID=${POOL_ID} export SUDOUSER=${SUDOUSER} export CUSTOMROUTINGCERTTYPE=${CUSTOMROUTINGCERTTYPE} export CUSTOMMASTERCERTTYPE=${CUSTOMMASTERCERTTYPE} export CUSTOMROUTINGCAFILE="${CUSTOMROUTINGCAFILE}" export CUSTOMROUTINGCERTFILE="${CUSTOMROUTINGCERTFILE}" export CUSTOMROUTINGKEYFILE="${CUSTOMROUTINGKEYFILE}" export CUSTOMMASTERCAFILE="${CUSTOMMASTERCAFILE}" export CUSTOMMASTERCERTFILE="${CUSTOMMASTERCERTFILE}" export CUSTOMMASTERKEYFILE="${CUSTOMMASTERKEYFILE}" export CUSTOMDOMAIN="${CUSTOMDOMAIN}" export MINORVERSION=${MINORVERSION} export CUSTOMMASTERTYPE=${CUSTOMMASTERTYPE} export CUSTOMROUTINGTYPE=${CUSTOMROUTINGTYPE} export REPOSERVER=${REPOSERVER} export PRIVATEIP=${PRIVATEIP} export ROUTERIP=${ROUTERIP} export PRIVATEDNS=${PRIVATEDNS} export INFRADNS=${INFRADNS} export DEPLOYMENTTYPE=${DEPLOYMENTTYPE} export REGISTRYSERVER=${REGISTRYSERVER} rm -f /etc/yum.repos.d/*.repo sleep 10 if [[ ${DEPLOYMENTTYPE} == 3 ]] then # connected # Register Host with Cloud Access Subscription echo $(date) " - Register host with Cloud Access Subscription" subscription-manager register --force --username="${USERNAME_ORG}" --password="${PASSWORD_ACT_KEY}" || subscription-manager register --force --activationkey="${PASSWORD_ACT_KEY}" --org="${USERNAME_ORG}" RETCODE=$? if [ $RETCODE -eq 0 ] then echo "Subscribed successfully" elif [ $RETCODE -eq 64 ] then echo "This system is already registered." else echo "Incorrect Username / Password or Organization ID / Activation Key specified" exit 3 fi subscription-manager attach --pool=${POOL_ID} > attach.log if [ $? -eq 0 ] then echo "Pool attached successfully" else grep attached attach.log if [ $? -eq 0 ] then echo "Pool ${POOL_ID} was already attached and was not attached again." else echo "Incorrect Pool ID or no entitlements available" exit 4 fi fi # Disable all repositories and enable only the required ones echo $(date) " - Disabling all repositories and enabling only the required repos" subscription-manager repos --disable="*" subscription-manager repos \ --enable="rhel-7-server-rpms" \ --enable="rhel-7-server-extras-rpms" \ --enable="rhel-7-server-ose-3.11-rpms" \ --enable="rhel-7-server-ansible-2.6-rpms" \ --enable="rhel-7-fast-datapath-rpms" \ --enable="rh-gluster-3-client-for-rhel-7-server-rpms" \ --enable="rhel-7-server-optional-rpms" else cat > /etc/yum.repos.d/ose.repo <<EOF [rhel-7-repo] name=rhel-7-repo baseurl=http://${REPOSERVER}/repos enabled=1 gpgcheck=0 EOF fi # Update system to latest packages echo $(date) " - Update system to latest packages" yum -y update --exclude=WALinuxAgent echo $(date) " - System update complete" # Install base packages and update system to latest packages echo $(date) " - Install base packages" yum -y install wget git net-tools bind-utils iptables-services bridge-utils bash-completion httpd-tools kexec-tools sos psacct ansible yum -y update glusterfs-fuse echo $(date) " - Base package installation complete" # Install OpenShift utilities echo $(date) " - Installing OpenShift utilities" yum -y install openshift-ansible-3.11.${MINORVERSION} echo $(date) " - OpenShift utilities installation complete" # Install Docker echo $(date) " - Installing Docker" yum -y install docker # Update docker config for insecure registry rm -f /etc/docker/daemon.json cat > /etc/docker/daemon.json <<EOF { "insecure-registries" : ["${REGISTRYSERVER}","docker-registry-default.${INFRADNS}"] } EOF # Installing Azure CLI # From https://docs.microsoft.com/en-us/cli/azure/install-azure-cli-yum echo $(date) " - Installing Azure CLI" # sudo rpm --import https://packages.microsoft.com/keys/microsoft.asc # sudo sh -c 'echo -e "[azure-cli]\nname=Azure CLI\nbaseurl=https://packages.microsoft.com/yumrepos/azure-cli\nenabled=1\ngpgcheck=1\ngpgkey=https://packages.microsoft.com/keys/microsoft.asc" > /etc/yum.repos.d/azure-cli.repo' sudo yum install -y azure-cli echo $(date) " - Azure CLI installation complete" # Install ImageMagick to resize image for Custom Header sudo yum install -y ImageMagick # Configure DNS so it always has the domain name echo $(date) " - Adding DOMAIN to search for resolv.conf" if [[ "${CUSTOMDOMAIN}" == "none" ]] then DOMAINNAME=`domainname -d` else DOMAINNAME=${CUSTOMDOMAIN} fi echo "DOMAIN=${DOMAINNAME}" >> /etc/sysconfig/network-scripts/ifcfg-eth0 echo $(date) " - Restarting NetworkManager" runuser -l ${SUDOUSER} -c "ansible localhost -o -b -m service -a \"name=NetworkManager state=restarted\"" echo $(date) " - NetworkManager configuration complete" # Run Ansible Playbook to update ansible.cfg file echo $(date) " - Updating ansible.cfg file" if [[ ${DEPLOYMENTTYPE} == 3 ]] then wget --retry-connrefused --waitretry=1 --read-timeout=20 --timeout=15 -t 5 https://raw.githubusercontent.com/microsoft/openshift-container-platform-playbooks/master/updateansiblecfg.yaml ansible-playbook -f 10 ./updateansiblecfg.yaml else wget --retry-connrefused --waitretry=1 --read-timeout=20 --timeout=15 -t 5 http://${SCRIPTSLOCATION}/ocpp/updateansiblecfg.yaml ansible-playbook -f 10 ./ocpp/updateansiblecfg.yaml fi # Create certificate files if [[ "${CUSTOMMASTERCERTTYPE}" == "custom" ]] then echo $(date) " - Creating custom master certificate files" runuser -l ${SUDOUSER} -c "echo \"${CUSTOMMASTERCAFILE}\" > /tmp/masterca.pem" runuser -l ${SUDOUSER} -c "echo \"${CUSTOMMASTERCERTFILE}\" > /tmp/mastercert.pem" runuser -l ${SUDOUSER} -c "echo \"${CUSTOMMASTERKEYFILE}\" > /tmp/masterkey.pem" echo $(date) " - Custom master certificate files masterca.pem, mastercert.pem, masterkey.pem created in /tmp" fi if [ "${CUSTOMROUTINGCERTTYPE}" == "custom" ] then echo $(date) " - Creating custom routing certificate files" runuser -l ${SUDOUSER} -c "echo \"${CUSTOMROUTINGCAFILE}\" > /tmp/routingca.pem" runuser -l ${SUDOUSER} -c "echo \"${CUSTOMROUTINGCERTFILE}\" > /tmp/routingcert.pem" runuser -l ${SUDOUSER} -c "echo \"${CUSTOMROUTINGKEYFILE}\" > /tmp/routingkey.pem" echo $(date) " - Custom routing certificate files routingca.pem, routingcert.pem, routingkey.pem created in /tmp" fi # Add DNS host entries for cluster naming cat >> /etc/hosts <<EOF ${PRIVATEIP} ${PRIVATEDNS} ${ROUTERIP} ${INFRADNS} EOF echo $(date) " - Script Complete"
# frozen_string_literal: true module Bundler module Plugin autoload :API, "bundler/plugin/api" autoload :DSL, "bundler/plugin/dsl" autoload :Index, "bundler/plugin/index" autoload :Installer, "bundler/plugin/installer" autoload :SourceList, "bundler/plugin/source_list" class MalformattedPlugin < PluginError; end class UndefinedCommandError < PluginError; end PLUGIN_FILE_NAME = "plugins.rb".freeze module_function @commands = {} # Installs a new plugin by the given name # # @param [Array<String>] names the name of plugin to be installed # @param [Hash] options various parameters as described in description # @option options [String] :source rubygems source to fetch the plugin gem from # @option options [String] :version (optional) the version of the plugin to install def install(names, options) paths = Installer.new.install(names, options) save_plugins paths rescue PluginError => e paths.values.map {|path| Bundler.rm_rf(path) } if paths Bundler.ui.error "Failed to install plugin #{name}: #{e.message}\n #{e.backtrace.join("\n ")}" end # Evaluates the Gemfile with a limited DSL and installs the plugins # specified by plugin method # # @param [Pathname] gemfile path def gemfile_install(gemfile = nil, &inline) if block_given? builder = DSL.new builder.instance_eval(&inline) definition = builder.to_definition(nil, true) else definition = DSL.evaluate(gemfile, nil, {}) end return unless definition.dependencies.any? plugins = Installer.new.install_definition(definition) save_plugins plugins end # The index object used to store the details about the plugin def index @index ||= Index.new end # The directory root to all plugin related data def root @root ||= Bundler.user_bundle_path.join("plugin") end # The cache directory for plugin stuffs def cache @cache ||= root.join("cache") end # To be called via the API to register to handle a command def add_command(command, cls) @commands[command] = cls end # Checks if any plugins handles the command def command?(command) !index.command_plugin(command).nil? end # To be called from Cli class to pass the command and argument to # approriate plugin class def exec_command(command, args) raise UndefinedCommandError, "Command #{command} not found" unless command? command load_plugin index.command_plugin(command) unless @commands.key? command @commands[command].new.exec(command, args) end # currently only intended for specs # # @return [String, nil] installed path def installed?(plugin) Index.new.installed?(plugin) end # Post installation processing and registering with index # # @param [Hash] plugins mapped to their installtion path def save_plugins(plugins) plugins.each do |name, path| path = Pathname.new path validate_plugin! path register_plugin name, path Bundler.ui.info "Installed plugin #{name}" end end # Checks if the gem is good to be a plugin # # At present it only checks whether it contains plugins.rb file # # @param [Pathname] plugin_path the path plugin is installed at # @raise [Error] if plugins.rb file is not found def validate_plugin!(plugin_path) plugin_file = plugin_path.join(PLUGIN_FILE_NAME) raise MalformattedPlugin, "#{PLUGIN_FILE_NAME} was not found in the plugin!" unless plugin_file.file? end # Runs the plugins.rb file in an isolated namespace, records the plugin # actions it registers for and then passes the data to index to be stored. # # @param [String] name the name of the plugin # @param [Pathname] path the path where the plugin is installed at def register_plugin(name, path) commands = @commands @commands = {} begin load path.join(PLUGIN_FILE_NAME), true rescue StandardError => e raise MalformattedPlugin, "#{e.class}: #{e.message}" end index.register_plugin name, path.to_s, @commands.keys ensure @commands = commands end # Executes the plugins.rb file # # @param [String] name of the plugin def load_plugin(name) # Need to ensure before this that plugin root where the rest of gems # are installed to be on load path to support plugin deps. Currently not # done to avoid conflicts path = index.plugin_path(name) load path.join(PLUGIN_FILE_NAME) end class << self private :load_plugin, :register_plugin, :save_plugins, :validate_plugin! end end end
import classes from './FollowButton.module.css'; export const FollowButton = () => { return ( <a href="/newsletter"> <div className={classes.button}>Follow</div> </a> ); };
module ArticleRepresentable extend ActiveSupport::Concern included do include Representable::JSON property :id, render_nil: true property :title, render_nil: true property :content, render_nil: true property :is_pickup property :user do property :name end property :created_at_ja, exec_context: :decorator def created_at_ja represented.created_at.strftime('%Y年%m月%日') end end end
import { SerialNum } from '../view/game'; export const allowSwap: number[][] = [ [-1, 1, 3, -1], [-1, 2, 4, 0], [-1, -1, 5, 1], [0, 4, 6, -1], [1, 5, 7, 3], [2, -1, 8, 4], [3, 7, -1, -1], [4, 8, -1, 6], [5, -1, -1, 7], ]; export const allowDict: string[] = ['w', 'd', 's', 'a']; export class QueueItem { nowString: SerialNum[]; operations: string; zeroIndex: number; constructor(now_string: SerialNum[], steps: string) { this.nowString = now_string; this.operations = steps; this.zeroIndex = now_string.indexOf(0); } public getNext(): QueueItem[] { const newItemList: QueueItem[] = []; allowSwap[this.zeroIndex].forEach((otherIndex, value) => { if (otherIndex !== -1) { const nowString = [...this.nowString]; nowString[this.zeroIndex] = nowString[otherIndex]; nowString[otherIndex] = 0; const swaps = this.operations + allowDict[value]; newItemList.push(new QueueItem(nowString, swaps)); } }); return newItemList; } } export function getSteps(serialNumber: SerialNum[]): string | undefined { const nowString = serialNumber.join(''); const targetString = new Array<number>(9) .fill(0) .map((value, index) => { return serialNumber.includes(<SerialNum>(index + 1)) ? index + 1 : 0; }) .join(''); const q: QueueItem[] = [new QueueItem([...serialNumber], '')]; const seenStringSet = new Set<string>(); seenStringSet.add(nowString); while (q.length !== 0) { const qItem = q.shift() as QueueItem; if (qItem.nowString.join('') === targetString) { return qItem.operations; } const newItemList = qItem.getNext(); newItemList.forEach((newItem) => { if (!seenStringSet.has(newItem.nowString.join(''))) { seenStringSet.add(newItem.nowString.join('')); q.push(newItem); } }); } return undefined; }
/** * Copyright (c) 2016 Carnegie Mellon University. All Rights Reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following acknowledgments and disclaimers. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * 3. The names "Carnegie Mellon University," "SEI" and/or "Software * Engineering Institute" shall not be used to endorse or promote products * derived from this software without prior written permission. For written * permission, please contact permission@sei.cmu.edu. * * 4. Products derived from this software may not be called "SEI" nor may "SEI" * appear in their names without prior written permission of * permission@sei.cmu.edu. * * 5. Redistributions of any form whatsoever must retain the following * acknowledgment: * * This material is based upon work funded and supported by the Department * of Defense under Contract No. FA8721-05-C-0003 with Carnegie Mellon * University for the operation of the Software Engineering Institute, a * federally funded research and development center. Any opinions, * findings and conclusions or recommendations expressed in this material * are those of the author (s) and do not necessarily reflect the views of * the United States Department of Defense. * * NO WARRANTY. THIS CARNEGIE MELLON UNIVERSITY AND SOFTWARE ENGINEERING * INSTITUTE MATERIAL IS FURNISHED ON AN “AS-IS” BASIS. CARNEGIE MELLON * UNIVERSITY MAKES NO WARRANTIES OF ANY KIND, EITHER EXPRESSED OR * IMPLIED, AS TO ANY MATTER INCLUDING, BUT NOT LIMITED TO, WARRANTY OF * FITNESS FOR PURPOSE OR MERCHANTABILITY, EXCLUSIVITY, OR RESULTS * OBTAINED FROM USE OF THE MATERIAL. CARNEGIE MELLON UNIVERSITY DOES * NOT MAKE ANY WARRANTY OF ANY KIND WITH RESPECT TO FREEDOM FROM PATENT, * TRADEMARK, OR COPYRIGHT INFRINGEMENT. * * This material has been approved for public release and unlimited * distribution. **/ /** * @file Executor.cpp * @author James Edmondson <jedmondson@gmail.com> * * Implementation of the Executor algorithm. The Executor algorithm executes * other algorithms. **/ #include <sstream> #include "gams/algorithms/Executor.h" #include "gams/algorithms/AlgorithmFactoryRepository.h" #include "gams/loggers/GlobalLogger.h" namespace knowledge = madara::knowledge; namespace containers = knowledge::containers; typedef knowledge::KnowledgeRecord KnowledgeRecord; typedef KnowledgeRecord::Integer Integer; typedef knowledge::KnowledgeMap KnowledgeMap; gams::algorithms::BaseAlgorithm * gams::algorithms::ExecutorFactory::create ( const madara::knowledge::KnowledgeMap & args, madara::knowledge::KnowledgeBase * knowledge, platforms::BasePlatform * platform, variables::Sensors * sensors, variables::Self * self, variables::Agents * agents) { madara_logger_ptr_log (gams::loggers::global_logger.get (), gams::loggers::LOG_MAJOR, "gams::algorithms::ExecutorFactory::create:" \ " creating Executor with %d args\n", args.size ()); BaseAlgorithm * result (0); if (knowledge && sensors && platform && self) { int repeat = 0; AlgorithmMetaDatas algorithms; KnowledgeMap::const_iterator size_found = args.find ("size"); if (size_found != args.end ()) { int size = (int) size_found->second.to_integer (); madara_logger_ptr_log (gams::loggers::global_logger.get (), gams::loggers::LOG_MINOR, "gams::algorithms::ExecutorFactory::create:" \ " Number of algorithms is %d\n", size); if (size > 0) { algorithms.resize (size); KnowledgeMap::const_iterator next = args.find ("0.algorithm"); std::string alg_prefix = "0.algorithm"; std::string args_prefix = "0.algorithm.args"; /** * iterate over all keys from 0.algorithm to size **/ for (; next != size_found; ++next) { if (next->first.size () > 0) { size_t last; // check for algorithm prefix of a number for (last = 0; last < next->first.size () && next->first[last] >= '0' && next->first[last] <= '9'; ++last) { }; // if this is an algorithm prefix if (last > 0) { std::string str_index = next->first.substr (0, last); size_t index = (size_t) (KnowledgeRecord (str_index).to_integer ()); const std::string alg_prefix = str_index + ".algorithm"; const std::string args_prefix = alg_prefix + ".args."; const std::string precond_prefix = str_index + ".precond"; // we have an algorithm definition if (next->first == alg_prefix) { madara_logger_ptr_log (gams::loggers::global_logger.get (), gams::loggers::LOG_MINOR, "gams::algorithms::ExecutorFactory::create:" \ " Algorithm %d id set to %s\n", (int)index, next->second.to_string ().c_str ()); algorithms[index].id = next->second.to_string (); } // end algorithm define else if (madara::utility::begins_with (next->first, args_prefix)) { std::string arg = madara::utility::strip_prefix ( next->first, args_prefix); madara_logger_ptr_log (gams::loggers::global_logger.get (), gams::loggers::LOG_MINOR, "gams::algorithms::ExecutorFactory::create:" \ " Algorithm %d added arg %s = %s\n", (int)index, arg.c_str (), next->second.to_string ().c_str ()); algorithms[index].args[arg] = next->second; } // end algorithm arg define else if (next->first == precond_prefix) { madara_logger_ptr_log (gams::loggers::global_logger.get (), gams::loggers::LOG_MINOR, "gams::algorithms::ExecutorFactory::create:" \ " Algorithm %d added precond %s\n", (int)index, next->second.to_string ().c_str ()); algorithms[index].precond = next->second.to_string (); } else if (madara::utility::ends_with (next->first, "max_time")) { madara_logger_ptr_log (gams::loggers::global_logger.get (), gams::loggers::LOG_MINOR, "gams::algorithms::ExecutorFactory::create:" \ " Algorithm %d setting max_time to %f seconds\n", (int)index, next->second.to_double ()); algorithms[index].max_time = next->second.to_double (); } else { madara_logger_ptr_log (gams::loggers::global_logger.get (), gams::loggers::LOG_ERROR, "gams::algorithms::ExecutorFactory::create:" \ " Unable to process %s = %s.\n", next->first.c_str (), next->second.to_string ().c_str ()); } // end unknown arg } // end algorithm arg index found else // begin non-index prefixed argument { if (next->first == "repeat") { repeat = (int)next->second.to_integer (); madara_logger_ptr_log (gams::loggers::global_logger.get (), gams::loggers::LOG_ERROR, "gams::algorithms::ExecutorFactory::create:" \ " Setting repeat to %d.\n", repeat); } } // end non-index prefixed argument } // end if args string is not empty } // end iteration over args madara_logger_ptr_log (gams::loggers::global_logger.get (), gams::loggers::LOG_ERROR, "gams::algorithms::ExecutorFactory::create:" \ " Creating Executor with %d algorithms and %d repeat.\n", (int)algorithms.size (), repeat); result = new Executor (algorithms, repeat, knowledge, platform, sensors, self, agents); } // end size > 0 else { madara_logger_ptr_log (gams::loggers::global_logger.get (), gams::loggers::LOG_ERROR, "gams::algorithms::ExecutorFactory::create:" \ " size <= 0. No algorithms to run.\n"); } } else { madara_logger_ptr_log (gams::loggers::global_logger.get (), gams::loggers::LOG_ERROR, "gams::algorithms::ExecutorFactory::create:" \ " no size found. Must set algorithm.size to the num of algorithms\n"); } } else { madara_logger_ptr_log (gams::loggers::global_logger.get (), gams::loggers::LOG_ERROR, "gams::algorithms::ExecutorFactory::create:" \ " knowledge, sensors, platform, self, or agents are invalid\n"); } return result; } gams::algorithms::Executor::Executor ( AlgorithmMetaDatas algorithms, int repeat, madara::knowledge::KnowledgeBase * knowledge, platforms::BasePlatform * platform, variables::Sensors * sensors, variables::Self * self, variables::Agents * agents) : BaseAlgorithm (knowledge, platform, sensors, self, agents), algorithms_ (algorithms), repeat_ (repeat), alg_index_ (0), cycles_ (0), current_ (0), precond_met_ (false), enforcer_ (0.0, 0.0) { status_.init_vars (*knowledge, "executor", self->agent.prefix); status_.init_variable_values (); madara_logger_ptr_log (gams::loggers::global_logger.get (), gams::loggers::LOG_MAJOR, "gams::algorithms::Executor::constr:" \ " Initialized with %d algorithms and %d repeat. Status stored at %s.\n", (int)algorithms.size (), repeat, status_.name.c_str ()); } gams::algorithms::Executor::~Executor () { } void gams::algorithms::Executor::operator= (Executor& rhs) { if (this != &rhs) { this->algorithms_ = rhs.algorithms_; this->repeat_ = rhs.repeat_; this->alg_index_ = rhs.alg_index_; this->cycles_ = rhs.cycles_; this->BaseAlgorithm::operator= (rhs); } } int gams::algorithms::Executor::analyze (void) { int result (OK); bool create_algorithm (false); if (!precond_met_ && status_.finished.is_false ()) { madara_logger_ptr_log (gams::loggers::global_logger.get (), gams::loggers::LOG_MINOR, "gams::algorithms::Executor::analyze:" \ " Cycle %d: Precondition for algorithm %d not met yet. Checking...\n", cycles_, (int)alg_index_); // if there is no precondition, set precond_met to true if (algorithms_[alg_index_].precond == "") { madara_logger_ptr_log (gams::loggers::global_logger.get (), gams::loggers::LOG_MINOR, "gams::algorithms::Executor::analyze:" \ " Cycle %d: Precondition empty for algorithm %d." \ " Changing precond_met...\n", cycles_, (int)alg_index_); precond_met_ = true; create_algorithm = true; } else { #ifndef _MADARA_NO_KARL_ precond_met_ = knowledge_->evaluate (algorithms_[alg_index_].precond).is_true (); #else // end karl support precond_met_ = true; #endif // end no karl support madara_logger_ptr_log (gams::loggers::global_logger.get (), gams::loggers::LOG_MAJOR, "gams::algorithms::Executor::analyze:" \ " Cycle %d: Precondition check for algorithm %d is %s.\n", cycles_, (int)alg_index_, precond_met_ ? "true" : "false"); create_algorithm = precond_met_; } } if (create_algorithm) { madara_logger_ptr_log (gams::loggers::global_logger.get (), gams::loggers::LOG_MAJOR, "gams::algorithms::Executor::analyze:" \ " Cycle %d: Precondition met for algorithm %d. Creating algorithm\n", cycles_, (int)alg_index_); current_ = algorithms::global_algorithm_factory ()->create ( algorithms_[alg_index_].id, algorithms_[alg_index_].args); if (algorithms_[alg_index_].max_time > 0) { madara_logger_ptr_log (gams::loggers::global_logger.get (), gams::loggers::LOG_MAJOR, "gams::algorithms::Executor::analyze:" \ " Cycle %d: Maximum time for algorithm %d set to %f\n", cycles_, (int)alg_index_, algorithms_[alg_index_].max_time); enforcer_.start (); enforcer_.set_duration (algorithms_[alg_index_].max_time); } } if (precond_met_ && status_.finished.is_false ()) { madara_logger_ptr_log (gams::loggers::global_logger.get (), gams::loggers::LOG_MAJOR, "gams::algorithms::Executor::analyze:" \ " Cycle %d: Analyzing algorithm %d\n", cycles_, (int)alg_index_); current_->analyze (); } else { madara_logger_ptr_log (gams::loggers::global_logger.get (), gams::loggers::LOG_MAJOR, "gams::algorithms::Executor::analyze:" \ " Cycle %d: Algorithm %d precond = %s, finished = %d\n", cycles_, (int)alg_index_, precond_met_ ? "true" : "false", (int)*status_.finished); } return result; } int gams::algorithms::Executor::execute (void) { int result (OK); if (precond_met_ && status_.finished.is_false ()) { madara_logger_ptr_log (gams::loggers::global_logger.get (), gams::loggers::LOG_MAJOR, "gams::algorithms::Executor::execute:" \ " Cycle %d: Calling algorithm %d execute\n", cycles_, (int)alg_index_); current_->execute (); // check if the algorithm status is finished or we've hit end time if (current_->get_algorithm_status ()->finished.is_true () || (algorithms_[alg_index_].max_time > 0 && enforcer_.is_done ())) { // if we are at the end of the algorithms list if (alg_index_ == algorithms_.size () - 1) { ++cycles_; if (cycles_ >= repeat_ && repeat_ >= 0) { madara_logger_ptr_log (gams::loggers::global_logger.get (), gams::loggers::LOG_MAJOR, "Executor::execute:" \ " Cycle %d: Algorithm is finished after %d cycles\n", cycles_, cycles_); result |= FINISHED; status_.finished = 1; } // end if finished else { // reset the move index if we are supposed to cycle alg_index_ = 0; precond_met_ = false; madara_logger_ptr_log (gams::loggers::global_logger.get (), gams::loggers::LOG_MAJOR, "Executor::execute:" \ " Cycle %d: Proceeding to algorithm 0 in next cycle.\n", cycles_); } // end if not finished } // end if move_index_ == end of locations else { // go to the next algorithm ++alg_index_; precond_met_ = false; madara_logger_ptr_log (gams::loggers::global_logger.get (), gams::loggers::LOG_MAJOR, "Execute::execute:" \ " Cycle %d: Proceeding to algorithm %d.\n", cycles_, (int)alg_index_); } // if not the end of the list } } else { madara_logger_ptr_log (gams::loggers::global_logger.get (), gams::loggers::LOG_MAJOR, "gams::algorithms::Executor::execute:" \ " Cycle %d: Algorithm %d precond = %s, finished = %d\n", cycles_, (int)alg_index_, precond_met_ ? "true" : "false", (int)*status_.finished); } return result; } int gams::algorithms::Executor::plan (void) { int result (OK); if (precond_met_ && status_.finished.is_false ()) { madara_logger_ptr_log (gams::loggers::global_logger.get (), gams::loggers::LOG_MAJOR, "gams::algorithms::Executor::plan:" \ " Cycle %d: Calling algorithm %d plan\n", cycles_, (int)alg_index_); current_->plan (); } else { madara_logger_ptr_log (gams::loggers::global_logger.get (), gams::loggers::LOG_MAJOR, "gams::algorithms::Executor::plan:" \ " Cycle %d: Algorithm %d precond = %s, finished = %d\n", cycles_, (int)alg_index_, precond_met_ ? "true" : "false", (int)*status_.finished); } return result; }
import os import sys import threading import socket import time parentdir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) sys.path.insert(0, parentdir) import shadowysocket class echoserver(): def __init__(self): self.conn = socket.socket() self.conn.bind(("127.0.0.1", 12300)) self.conn.listen(5) while True: conn, address = self.conn.accept() conn = shadowysocket.shadowysocket(conn) threading.Thread(target=self.echo, args=(conn, address)).start() def echo(self, conn, address): while True: data = conn.recv(4096) if len(data) == 0: break # connection close conn.sendall(data + address[0].encode('utf-8')) class echoclient(): def __init__(self): self.conn = socket.create_connection(("127.0.0.1", 12300)) self.conn = shadowysocket.shadowysocket(self.conn) def send(self, data): self.conn.sendall(data) def recv(self): return self.conn.recv(4096) def close(self): self.conn.close() def test_socket(): server = threading.Thread(target=echoserver) server.daemon = True server.start() for _ in range(3): client1 = echoclient() client2 = echoclient() client1.send(b"hello world") assert client1.recv() == b"hello world127.0.0.1" client2.send(b"another client") assert client2.recv() == b"another client127.0.0.1" client1.send(b"bye") assert client1.recv() == b"bye127.0.0.1" client1.close() client2.close() time.sleep(0.5) # wait for connection thread to die assert threading.active_count() == 2 # main+server if __name__ == "__main__": test_socket()
import { EntityRepository, Repository } from 'typeorm'; import OrderItemEntity from '../order-item.entity'; import { OrderItemCreateBody } from '../../orders/orders.dto'; import ItemEntity from '../../item/item.entity'; import OrderEntity from '../../orders/orders.entity'; @EntityRepository(OrderItemEntity) export default class OrderItemRepository extends Repository<OrderItemEntity> { async createOrderItem(order: OrderEntity, item: ItemEntity, itemCount: number) { const itemPrice = item.price; const orderPrice = itemPrice * itemCount; return await this.save( this.create({ order, item, orderPrice, count: itemCount, }), ); } }
// Copyright (c) Microsoft Corporation. // Licensed under the MIT license. using System.Collections.Generic; using Microsoft.PowerFx.Core.Public; using Microsoft.PowerFx.Core.Utils; using Microsoft.PowerFx.Syntax; using Xunit; namespace Microsoft.PowerFx.Core.Tests { public class FormulaSetTests { private class TestDependencyFinderVisitor : IdentityTexlVisitor { public readonly HashSet<string> _vars = new HashSet<string>(); public override void Visit(FirstNameNode node) { var name = node.Ident.Name.Value; _vars.Add(name); base.Visit(node); } } private class TestDependencyFinder : IDependencyFinder { public HashSet<string> FindDependencies(FormulaWithParameters formulaWithParameters) { var config = new PowerFxConfig(); var engine = new Engine(config); var checkResult = engine.Check(formulaWithParameters._expression, formulaWithParameters._schema); var v = new TestDependencyFinderVisitor(); checkResult._binding.Top.Accept(v); return v._vars; } } [Fact] public void TestFormulaSet() { var formulas = new Dictionary<string, FormulaWithParameters>() { { "D", new FormulaWithParameters("B + 1") }, { "H", new FormulaWithParameters("A + G") }, { "A", new FormulaWithParameters("15") }, { "B", new FormulaWithParameters("A + 1") }, { "E", new FormulaWithParameters("C + 1") }, { "G", new FormulaWithParameters("F + 1") }, { "C", new FormulaWithParameters("A + 1") }, { "F", new FormulaWithParameters("D + E") }, }; var set = new FormulaSet(new TestDependencyFinder()); set.Add(formulas); var indexMap = new Dictionary<string, int>(); var index = 0; foreach (var kvp in set.SortedFormulas) { indexMap[kvp.Key] = index; index += 1; } Assert.True(indexMap["A"] < indexMap["B"]); Assert.True(indexMap["A"] < indexMap["C"]); Assert.True(indexMap["B"] < indexMap["D"]); Assert.True(indexMap["C"] < indexMap["E"]); Assert.True(indexMap["D"] < indexMap["F"]); Assert.True(indexMap["E"] < indexMap["F"]); Assert.True(indexMap["F"] < indexMap["G"]); Assert.True(indexMap["A"] < indexMap["H"]); Assert.True(indexMap["G"] < indexMap["H"]); } } }
// Copyright (C) 2015 Wright State University // Author: Daniel P. Foose // This file is part of FreeIModPoly. // FreeIModPoly is distributed under two licenses, the GNU GPL v3 and the MIT License // Which license applies is up to your discretion. // GPL Statement: // FreeIModPoly is free software; you can redistribute it and/or modify it // under the terms of the GNU General Public License as published by // the Free Software Foundation; either version 3 of the License, or (at // your option) any later version. // // FreeIModPoly is distributed in the hope that it will be useful, but // WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // General Public License for more details. // // You should have received a copy of the GNU General Public License // along with Octave; see the file LICENSE. If not, see // <http://www.gnu.org/licenses/>. // MIT Statement: // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // // FreeIModPoly: A free software implementation of the Vancouver Raman Algorithm // Please cite DOI: 10.1366/000370207782597003 and this project (see CITATION) // The author of this implementation is not associated with the authors of the // algorithm. #include "freeimodpoly.h" using namespace std; using namespace arma; /// /// \brief FreeIModPoly::IModPoly Perform the Vancouver Raman Algorithm to correct baseline /// \param spectrum A vector containing the signal to be corrected /// \param abscissa The x-values of spectrum /// \param baseline Will contain the fitted baseline /// \param corrected Will contain the corrected spectrum /// \param poly_order Polynomial order for the baseline fits /// \param max_it Maximum number of iterations to perform. if set to 0 there is no maximum /// \param threshold A value specifying the upper limit of the error criterion /// \param err The final error criterion /// \return The number of iterations /// uword FreeIModPoly::IModPoly(const vec &spectrum, const vec &abscissa, vec &baseline, vec &corrected, double &err, const uword poly_order, const uword max_it, const double threshold) { if (poly_order == 0) throw invalid_argument("Polynomial order must be 1 (linear) or greater."); if (threshold >= 1 || threshold <= 0) throw invalid_argument("Threshold value must be between 0 and 1."); if (spectrum.n_rows != abscissa.n_rows) throw invalid_argument("Spectrum and abscissa must be the same size."); uword i = 1; bool no_max_it = (max_it == 0); mat X; vec coefs, fit; double dev; //perform first regresion (on spectrum without removed major peaks) X = Vandermonde(abscissa, poly_order); coefs = FreeIModPoly::OrdinaryLeastSquares(X, spectrum); fit = FreeIModPoly::CalcPoly(coefs, abscissa); dev = FreeIModPoly::CalcDev(spectrum, fit); cout << "CalcDev" << endl; double prev_dev = dev; //used in while loop critera //find major peak areas to remove and remove them uvec non_peak_ind = NonPeakInd(spectrum, dev); vec new_abscissa = abscissa(non_peak_ind); vec prev_fit = spectrum(non_peak_ind);//not a fit here, but in the loop. X = Vandermonde(new_abscissa, poly_order); uword rows = new_abscissa.n_rows; do{ //always perform at least one regression on the "shrunken" spectrum //Polynomial Fitting coefs = FreeIModPoly::OrdinaryLeastSquares(X, prev_fit); fit = FreeIModPoly::CalcPoly(coefs, new_abscissa); //Residual and dev calc (residual calculted inside CalcDev) dev = FreeIModPoly::CalcDev(prev_fit, fit); //error criteria err = FreeIModPoly::CalcErr(dev, prev_dev); //Reconstruction of model input fit += dev * ones(rows); //if a value in the previous fit is lower than this fit, take the previous uvec ind = find (prev_fit < fit); fit(ind) = prev_fit(ind); prev_fit = fit; prev_dev = dev; ++i; }while (err > threshold && (no_max_it || i <= max_it)); //calculate fit for all values in original abscissa baseline = FreeIModPoly::CalcPoly(coefs, abscissa); corrected = spectrum - baseline; return i; } /// /// \brief FreeIModPoly::CalcDev /// \param spectrum /// \param fit /// \return /// double FreeIModPoly::CalcDev(const vec &spectrum, const vec &fit) { using namespace arma; vec R = spectrum - fit; double R_avg = mean(R); vec centered = R - R_avg*ones(R.n_rows); centered = pow(centered, 2.0); return std::sqrt(sum(centered)/centered.n_rows); } /// /// \brief FreeIModPoly::NonPeakInd /// \param spectrum /// \param dev /// \return /// uvec FreeIModPoly::NonPeakInd(const vec &spectrum, const double dev) { using namespace arma; vec SUM = spectrum + dev * ones(spectrum.n_rows); return find(spectrum <= SUM); } /// /// \brief FreeIModPoly::CalcPoly Calculate the values of a polynomial /// \param coefs The polynomial coefficients ordered from 0th order to nth order /// \param x The values for which the polynomial is to be calculated /// \return The calculated values /// vec FreeIModPoly::CalcPoly(const vec &coefs, const vec &x) { vec y = coefs(0) + x*coefs(1); //0th and 1st power of x //this loop only used for powers where pow(x, power) needs to be calculated if (coefs.n_rows > 1){ for (uword i = 2; i < coefs.n_rows; ++i) y += coefs(i) * pow(x, i); } return y; } /// /// \brief FreeIModPoly::OrdinaryLeastSquares Perform Ordinary Least Squares /// \param X The design matrix of the regression /// \param y The response vector /// \return /// vec FreeIModPoly::OrdinaryLeastSquares(const mat &X, const vec &y) { mat Q, R; qr(Q, R, X); return solve(R, Q.t()) * y; } /// /// \brief FreeIModPoly::Vandermonde Build a Vandermonde matrix for OLS /// \param x A vector containing a signal /// \param poly_order The polynomial order /// \return A Vandermonde matrix of x for the polynomial of order poly_order /// mat FreeIModPoly::Vandermonde(const vec &x, const int poly_order) { mat X(x.n_rows, poly_order + 1); X.col(0) = ones(x.n_rows); //faster than pow(X, 0) X.col(1) = x; for (uword i = 2; i < X.n_cols; ++i) X.col(i) = pow(x, i); return X; } /// /// \brief FreeIModPoly::CalcErr Calculate the error criterion /// \param dev /// \param prev_dev /// \return /// double FreeIModPoly::CalcErr(const double &dev, const double &prev_dev) { return std::abs( (dev - prev_dev) / dev ); }
function escapeTitle(text) { return text .replace(/"/g, ' ') .replace(/'/g, " ") .replace(/</g, ' ') .replace(/>/g, ' ') .replace(/&/g, ' '); } function update_bookmarks() { chrome.bookmarks.search({}, (bookmarkItems) => { if (bookmarkItems) { var bookmarks = []; for (var i = 0; i < bookmarkItems.length; i++) { var item = bookmarkItems[i]; if (item.url) { bookmarks.push({content: item.url, description: escapeTitle(item.title)}); } } chrome.storage.local.set({"mrsagasu": bookmarks}); } }) } function escapeXML(text) { return text .replace(/"/g, '&quot;') .replace(/'/g, "&apos;") .replace(/</g, '&lt;') .replace(/>/g, '&gt;') .replace(/&/g, '&amp;'); } function escapeRegExp(string) { // $&はマッチした部分文字列全体 return string.replace(/[.*+?^=!:${}()|[\]\/\\]/g, '\\$&'); } chrome.runtime.onInstalled.addListener(update_bookmarks); chrome.bookmarks.onChanged.addListener(update_bookmarks); chrome.bookmarks.onRemoved.addListener(update_bookmarks); chrome.bookmarks.onCreated.addListener(update_bookmarks); chrome.omnibox.setDefaultSuggestion({ description: "Type a few character" }); chrome.omnibox.onInputChanged.addListener((text, suggest) => { chrome.storage.local.get("mrsagasu", (value) => { var bookmarks = value.mrsagasu; if (bookmarks) { var fuz = ""; for (var i = 0; i < text.length - 1; i++) { fuz += escapeRegExp(text.charAt(i)) + ".*?"; } fuz += escapeRegExp(text.charAt(text.length - 1)); var re = new RegExp(fuz, "i"); var sugs = []; bookmarks.forEach(value => { var mat = re.exec(value.description); if (mat) { var esd = value.description; var esc = escapeXML(value.content); var desc = "<dim>" + esd.slice(0, mat.index) + "</dim><match>" + esd.slice(mat.index, mat.index + mat[0].length) + "</match><dim>" + esd.slice(mat.index + mat[0].length) + "</dim> <url>" + esc + "</url>"; sugs.push({ content: value.content, description: desc, mathchlen: mat[0].length }); } }); sugs.sort((a, b) => { return a.mathchlen - b.mathchlen; }) var result = []; for (var i = 0; i < sugs.length && i < 7; i++) { result.push({content: sugs[i].content, description: sugs[i].description}); } suggest(result); } }); }); chrome.omnibox.onInputEntered.addListener((text, disposition) => { switch (disposition) { case "currentTab": chrome.tabs.update({url: text}); break; case "newForegroundTab": chrome.tabs.create({url: text}); break; case "newBackgroundTab": chrome.tabs.create({url: text, active: false}); break; } });
<?php namespace backend\controllers; use backend\models\CopyFlowForm; use backend\models\FlowForm; use backend\models\FlowPublishForm; use backend\models\FlowRelationForm; use backend\models\FlowSearch; use backend\models\ProductSearch; use common\models\AdministratorLog; use common\models\Flow; use Yii; use yii\bootstrap\ActiveForm; use yii\db\Query; use yii\filters\AccessControl; use yii\filters\ContentNegotiator; use yii\filters\VerbFilter; use yii\web\NotFoundHttpException; use yii\web\Response; class FlowController extends BaseController { /** * @var string|array the configuration for creating the serializer that formats the response data. */ public $serializer = 'yii\rest\Serializer'; /** * @inheritdoc */ public function behaviors() { return [ 'verbs' => [ 'class' => VerbFilter::className(), 'actions' => [ 'ajax-publish' => ['POST'], ], ], 'contentNegotiator' => [ 'class' => ContentNegotiator::className(), 'only' => [ 'ajax-validation', 'ajax-info', 'ajax-status', 'ajax-list', 'ajax-delete', ], 'formats' => [ 'application/json' => Response::FORMAT_JSON, ], ], 'access' => [ 'class' => AccessControl::className(), 'rules' => [ [ 'actions' => ['ajax-list'], 'allow' => true, ], [ 'actions' => ['list', 'info', 'products'], 'allow' => true, 'roles' => ['flow/list'], ], [ 'actions' => ['ajax-create', 'ajax-copy'], 'allow' => true, 'roles' => ['flow/create'], ], [ 'actions' => ['ajax-update', 'ajax-info', 'relation-flow'], 'allow' => true, 'roles' => ['flow/update'], ], [ 'actions' => ['ajax-delete'], 'allow' => true, 'roles' => ['flow/delete'], ], [ 'actions' => ['ajax-status'], 'allow' => true, 'roles' => ['flow/status'], ], [ 'actions' => ['ajax-publish'], 'allow' => true, 'roles' => ['flow/publish'], ], ], ], ]; } public function actionList() { $searchModel = new FlowSearch(); $dataProvider = $searchModel->search(Yii::$app->request->queryParams); /** @var Query $query */ $query = $dataProvider->query; $query->select(['id', 'name', 'status', 'is_publish', 'is_delete'])->orderBy('created_at DESC'); return $this->render('list', [ 'searchModel' => $searchModel, 'dataProvider' => $dataProvider ]); } public function actionAjaxList($keyword = null) { $query = Flow::activeQuery()->select(['id', 'name']); if(!empty($keyword)) { $query->andWhere(['like', 'name', $keyword]); } $data = $query->all(); return ['status' => 200, 'flows' => $this->serializeData($data)]; } public function actionProducts($flow_id) { $model = $this->findModel($flow_id); $searchModel = new ProductSearch(); $dataProvider = $searchModel->search(Yii::$app->request->queryParams); /** @var Query $query */ $query = $dataProvider->query; $query->andWhere(['flow_id' => $model->id]); $query->orderBy('created_at DESC'); return $this->render('products', [ 'flowModel' => $model, 'searchModel' => $searchModel, 'dataProvider' => $dataProvider ]); } public function actionInfo($flow_id) { $model = $this->findModel($flow_id); return $this->render('info', [ 'model' => $model, ]); } /** * 新增商品流程 */ public function actionAjaxCreate() { $model = new FlowForm(); if ($model->load(Yii::$app->request->post()) && $model->validate()) { $model = $model->save(); if($model){ Yii::$app->session->setFlash('success', '流程保存成功!'); if(Yii::$app->request->post('next') === 'save-next'){ if(Yii::$app->user->can('flow/list')){ return $this->redirect(['flow-node/list', 'flow_id' => $model->id]); } return $this->redirect(['list']); } return $this->redirect(['list']); } } else { $errors = $model->getFirstErrors(); Yii::$app->session->setFlash('error', reset($errors)); } return $this->render('list', ['model' => $model]); } /** * 复制商品流程 */ public function actionAjaxCopy() { $model = new CopyFlowForm(); if ($model->load(Yii::$app->request->post()) && $model->validate()) { $model = $model->copy(); if($model){ Yii::$app->session->setFlash('success', '流程复制成功!'); return $this->redirect(['list']); } } else { $errors = $model->getFirstErrors(); Yii::$app->session->setFlash('error', reset($errors)); } return $this->render('list'); } // 更新商品流程 public function actionAjaxUpdate($id) { $flow = $this->findModel($id); $model = new FlowForm(); if ($model->load(Yii::$app->request->post())) { if($model->update($flow)){ Yii::$app->session->setFlash('success', '更新成功!'); if(Yii::$app->request->post('next') === 'save-next'){ if(Yii::$app->user->can('flow/list')){ return $this->redirect(['flow-node/list', 'flow_id' => $flow->id]); } } return $this->redirect(['list']); } else { Yii::$app->session->setFlash('error', '更新失败!'); } } if ($model->hasErrors()) { Yii::$app->session->setFlash('error', '更新失败, 您的表单填写有误, 请检查!'); } return $this->redirect(['list']); } public function actionAjaxInfo($id) { $model = $this->findModel($id); return ['status' => 200, 'model' => $this->serializeData($model)]; } public function actionAjaxStatus() { $id = Yii::$app->request->post('id'); $status = Yii::$app->request->post('status'); $model = $this->findModel($id); $model->status = $status; if($model->validate(['status'])) { $model->save(false); //新增后台操作日志 AdministratorLog::logFlowAjaxStatus($model); return ['status' => 200]; } $errors = $model->getFirstErrors(); return ['status' => 400, 'message' => reset($errors)]; } public function actionAjaxDelete() { $id = Yii::$app->getRequest()->post('id'); $model = $this->findModel($id); if($model->validate(['is_delete'])) { $model->status = Flow::STATUS_DISABLED; $model->is_delete = Flow::DELETE_ACTIVE; $model->save(false); //新增后台操作日志 AdministratorLog::logFlowAjaxDelete($model); return ['status' => 200]; } $errors = $model->getFirstErrors(); return ['status' => 400, 'message' => reset($errors)]; } public function actionAjaxPublish($id) { $model = $this->findModel($id); $formModel = new FlowPublishForm(); $formModel->flow = $model; if($formModel->validate()) { if($formModel->publish()) { //新增后台操作日志 AdministratorLog::logFlowAjaxPublish($model); return $this->redirect(['flow-node/list', 'flow_id' => $model->id]); } } $errors = $formModel->getFirstErrors(); Yii::$app->session->setFlash('error', reset($errors)); return $this->redirect(['flow-node/list', 'flow_id' => $model->id]); } public function actionRelationFlow($flow_id) { $flow = $this->findModel($flow_id); $flowRelationModel = new FlowRelationForm(); if($flowRelationModel->load(Yii::$app->request->post()) && $flowRelationModel->remove()) { Yii::$app->session->setFlash('success', '取消关联成功!'); return $this->redirect(['products', 'flow_id' => $flow->id]); } else { if ($flowRelationModel->hasErrors()) { $errors = $flowRelationModel->getFirstErrors(); Yii::$app->session->setFlash('error', reset($errors)); } else { Yii::$app->session->setFlash('error', '您的操作有误!'); } } return $this->redirect(['products', 'flow_id' => $flow->id]); } /** * @param $id * @return Flow * @throws NotFoundHttpException */ private function findModel($id) { $model = Flow::findOne($id); if (null == $model) { throw new NotFoundHttpException('找不到指定的流程!'); } return $model; } /** * @param $data * @return mixed */ protected function serializeData($data) { return Yii::createObject($this->serializer)->serialize($data); } public function actionAjaxValidation() { $model = new FlowForm(); if (Yii::$app->request->isAjax && $model->load(Yii::$app->request->post())) { return ActiveForm::validate($model); } return []; } }
#### C++ Include cctype --- # The C Character Type Library > This header declares a set of functions to classify and transform individual characters. ### isalpha( c ) true if alphabetic: a-z or A-Z ```cpp isalpha('x') // true isalpha('6') // false isalpha('!') // false ``` ### isdigit( c ) true if digit: 0-9. ```cpp isdigit('x') // false isdigit('6') // true ``` ### isalnum( c ) Returns true if c is alphabetic or a numeric digit. Thus, returns true if either [[#isalpha c]] or [[#isdigit c]] would return true. ### isspace( c ) true if whitespace. ```cpp isspace(' ') // true isspace('\n') // true isspace('x') // false ``` ### toupper( c ) Uppercase version ```cpp toupper('a') // A toupper('A') // A toupper('3') // 3 ``` ### tolower( c ) Lowercase version ```cpp tolower('A') // a tolower('a') // a tolower('3') // 3 ``` ### isblank() Returns true if character c is a blank character. Blank characters include spaces and tabs. ```cpp isblank(myString[5]); // Returns true because that character is a space ' '. isblank(myString[0]); // Returns false because 'H' is not blank. ``` ### isxdigit() Returns true if c is a hexadecimal digit: 0-9, a-f, A-F. ```cpp isxdigit(myString[3]); // Returns true because '9' is a hexadecimal digit. isxdigit(myString[1]); // Returns true because 'e' is a hexadecimal digit. isxdigit(myString[6]); // Returns false because 'G' is not a hexadecimal digit. ``` ### ispunct() Returns true if c is a punctuation character. Punctuation characters include: `!"#$%&'()*+,-./:;<=>?@[\]^_{|}~` ```cpp ispunct(myString[4]); // Returns true because '!' is a punctuation character. ispunct(myString[6]); // Returns false because 'G' is not a punctuation character. ``` ### isprint() Returns true if c is a printable character. Printable characters include alphanumeric, punctuation, and space characters. ### iscntrl() Returns true if c is a control character. Control characters are all characters that are not printable.