text
stringlengths
1
1.05M
#ifndef INCLUDED_NETWORK_FLAG_STATE_CHANGED_H #define INCLUDED_NETWORK_FLAG_STATE_CHANGED_H #include "network/message.h" #include "network/message_handler_sub_system.h" #include "network/message_sender_system.h" #include "core/flag_state_changed_event.h" #include "core/ctf_program_state.h" #include "platform/export.h" namespace network { class FlagStateChangedMessage : public Message { friend class ::boost::serialization::access; public: DEFINE_MESSAGE_BASE( FlagStateChangedMessage ) ::ctf::FlagStateChangedEvent::Type mType; Team::Type mTeam; int32_t mCarrierGUID; int32_t mFlagGUID; FlagStateChangedMessage() : mType( ::ctf::FlagStateChangedEvent::Dropped ) , mTeam( Team::None ) , mCarrierGUID( -1 ) , mFlagGUID( -1 ) { } template<class Archive> void serialize( Archive& ar, const unsigned int version ); }; template<class Archive> void FlagStateChangedMessage::serialize( Archive& ar, const unsigned int version ) { ar& boost::serialization::base_object<Message>( *this ); ar& mType; ar& mTeam; ar& mCarrierGUID; ar& mFlagGUID; } class FlagStateChangedMessageHandlerSubSystem : public MessageHandlerSubSystem { public: DEFINE_SUB_SYSTEM_BASE( FlagStateChangedMessageHandlerSubSystem ) FlagStateChangedMessageHandlerSubSystem(); virtual void Init(); virtual void Execute( Message const& message ); virtual void Update( double DeltaTime ); }; class FlagStateChangedMessageSenderSystem : public MessageSenderSystem { AutoReg mOnFlagStateChanged; void OnFlagStateChanged( ::ctf::FlagStateChangedEvent const& Evt ); public: DEFINE_SYSTEM_BASE( FlagStateChangedMessageSenderSystem ) FlagStateChangedMessageSenderSystem(); virtual void Init(); virtual void Update( double DeltaTime ); }; } // namespace network REAPING2_CLASS_EXPORT_KEY2( network__FlagStateChangedMessage, network::FlagStateChangedMessage, "flag_state_changed" ); #endif//INCLUDED_NETWORK_FLAG_STATE_CHANGED_H //command: "classgenerator.exe" -g "message" -c "flag_state_changed" -e "ctf-flagStateChanged" -m "ctf::FlagStateChangedEvent::Type-type Team::Type-team int32_t-carrirGUID int32_t-flagGUID"
<filename>workspaces/ui-v2/src/hooks/useGroupedEndpoints.ts import { useMemo } from 'react'; import groupBy from 'lodash.groupby'; import { IEndpoint } from '<src>/types'; import { findLongestCommonPath } from '<src>/utils'; export const useGroupedEndpoints = <T extends IEndpoint>(endpoints: T[]) => { return useMemo(() => { const commonStart = findLongestCommonPath( endpoints.map((endpoint) => endpoint.pathParameters.map((pathParameter) => pathParameter.name) ) ); const endpointsWithGroups = endpoints.map((endpoint) => ({ ...endpoint, // If there is only one endpoint, split['/'][1] returns undefined since // commonStart.length === endpoint.fullPath.length group: endpoint.fullPath.slice(commonStart.length).split('/')[1] || '', })); return groupBy(endpointsWithGroups, 'group'); }, [endpoints]); };
package com.github.nenomm.ks.ktable; import org.apache.kafka.streams.kstream.KTable; import org.springframework.cloud.stream.annotation.Input; public interface KTableCustomInput { String INPUT = "kTableCustomInput"; @Input(INPUT) KTable<?, ?> ingestSomethn(); }
package pt.isel.pdm.li51n.g4.tmdbisel.data.models.TMDB; import android.os.Parcel; import android.os.Parcelable; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; public class MovieCollection implements Parcelable{ /** * */ public static final Creator<MovieCollection> CREATOR = new Creator<MovieCollection>() { @Override public MovieCollection createFromParcel(Parcel in) { return new MovieCollection(in); } @Override public MovieCollection[] newArray(int size) { return new MovieCollection[size]; } }; @SerializedName("id") @Expose private Integer id; @SerializedName("name") @Expose private String name; @SerializedName("poster_path") @Expose private String posterPath; @SerializedName("backdrop_path") @Expose private String backdropPath; public MovieCollection() { } protected MovieCollection(Parcel in) { id = in.readByte() == 0x00 ? null : in.readInt(); name = in.readString(); posterPath = in.readString(); backdropPath = in.readString(); } public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getPosterPath() { return posterPath; } public void setPosterPath(String posterPath) { this.posterPath = posterPath; } public String getBackdropPath() { return backdropPath; } public void setBackdropPath(String backdropPath) { this.backdropPath = backdropPath; } /** * Describe the kinds of special objects contained in this Parcelable's * marshalled representation. * * @return a bitmask indicating the set of special object types marshalled * by the Parcelable. */ @Override public int describeContents() { return 0; } /** * Flatten this object in to a Parcel. * * @param dest The Parcel in which the object should be written. * @param flags Additional flags about how the object should be written. * May be 0 or {@link #PARCELABLE_WRITE_RETURN_VALUE}. */ @Override public void writeToParcel(Parcel dest, int flags) { if (id == null) { dest.writeByte((byte) (0x00)); } else { dest.writeByte((byte) (0x01)); dest.writeInt(id); } dest.writeString(name); dest.writeString(posterPath); dest.writeString(backdropPath); } }
import * as path from "path"; import * as dotenv from "dotenv"; import {inject, singleton} from "../di"; import { ICascDirProvider, ICascDirProviderToken, IEnvProvider, IEnvProviderToken, } from "../interfaces"; @singleton(IEnvProviderToken) export class EnvProvider implements IEnvProvider { public constructor( @inject(ICascDirProviderToken) cascDirProvider: ICascDirProvider, ) { dotenv.config({ path: path.join(cascDirProvider.cascDir, ".env"), }); } public get env(): Readonly<NodeJS.ProcessEnv> { return process.env; } }
package com.example.android.githubrepolist.database; import android.content.Context; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteOpenHelper; public class DatabaseOpenHelper extends SQLiteOpenHelper { /** * There will be a singleton instance of {@link DatabaseOpenHelper} */ private static DatabaseOpenHelper mInstance; /** * Name of database */ private static final String DATABASE_NAME = "github.db"; /** * Database version */ private static final int DATABASE_VERSION =1; /** * Concrete factory method for {@link DatabaseOpenHelper}, use by * {@link DatabaseAdapter} class to get the instance of this class. * * @param context * @return */ public static DatabaseOpenHelper getMyOpenHelper(Context context) { if (mInstance == null) { mInstance = new DatabaseOpenHelper(context); } return mInstance; } /** * {@link DatabaseOpenHelper} constructor that defines the database name including * database version. * * @param context */ public DatabaseOpenHelper(Context context) { super(context, DATABASE_NAME, null, DATABASE_VERSION); } @Override public void onCreate(SQLiteDatabase db) { DatabaseTableHelper.createTable(db); } @Override public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { // TODO Auto-generated method stub if(newVersion>oldVersion){ //db.execSQL("DROP TABLE IF EXISTS push" ); DatabaseTableHelper.createTable(db); } } }
<gh_stars>1-10 import { OutputAsset, OutputChunk } from 'rollup'; type SimpleObject = Record<string, string | number>; type MakeAttributes = (file: OutputAsset | OutputChunk) => string; type Files = Array<OutputAsset | OutputChunk> | undefined; interface AssetsOptions { publicPath?: string attrs?: SimpleObject | null } interface JSAssetsOptions extends AssetsOptions { includeSafariFix?: true } function htmlAttributes(attributes: AssetsOptions['attrs']): string { if (!attributes) { return ''; } return Object.keys(attributes) .reduce((result, key) => `${result} ${key}="${attributes[key]}"`, ''); } export function generateTags(files: Files, tagName: string, makeAttrs: MakeAttributes): string { if (!files) { return ''; } return files .map((file) => `<${tagName} ${makeAttrs(file)}></${tagName}>`) .join(''); } export function safariFixScript() { // eslint-disable-next-line max-len return '<script>!function(){var e=document,t=e.createElement("script");if(!("noModule"in t)&&"onbeforeload"in t){var n=!1;e.addEventListener("beforeload",function(e){if(e.target===t)n=!0;else if(!e.target.hasAttribute("nomodule")||!n)return;e.preventDefault()},!0),t.type="module",t.src=".",e.head.appendChild(t),t.remove()}}();</script>'; } export function generateJs(files: Files, options: JSAssetsOptions = {}): string { const publicPath = options.publicPath || ''; const { type, ...defaultAttrs } = options.attrs || {} as SimpleObject; const scripts = generateTags(files, 'script', ({ fileName }) => { const typeAttr = fileName.startsWith('legacy/') ? 'nomodule defer' : 'type="module"'; return `${typeAttr} src="${publicPath}${fileName}"${htmlAttributes(defaultAttrs)}`; }); return (options.includeSafariFix ? safariFixScript() : '') + scripts; } export function generateCss(files: Files, { publicPath = '', attrs }: AssetsOptions = {}): string { return generateTags(files, 'link', ({ fileName }) => `href="${publicPath}${fileName}" rel="stylesheet"${htmlAttributes(attrs)}`); } type IndexHTMLOptions = { title?: string, publicPath?: string, files: { css?: OutputAsset[], js?: Array<OutputChunk | OutputAsset> }, attributes: { link?: SimpleObject | null, script?: SimpleObject | null, html?: SimpleObject | null } }; export const indexHTML = ({ attributes, files, publicPath, title, }: IndexHTMLOptions) => ` <!DOCTYPE html> <html${htmlAttributes(attributes.html)}> <head> <meta charset="utf-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <title>${title}</title> <meta name="viewport" content="width=device-width, initial-scale=1"> ${generateCss(files.css, { publicPath, attrs: attributes.link })} </head> <body> <div id="app"></div> ${generateJs(files.js, { publicPath, attrs: attributes.script })} </body> </html> `.trim();
/* Visit occurrence table. For every record in the patient registry, create a visit occurrence. Indatum empty for some dag_kiru rows. Use 01-01-1900 as default value. Unique visit identifier (visit_id) stored in the long tables. */ INSERT INTO visit_occurrence (visit_occurrence_id, person_id, visit_start_date, visit_end_date, visit_concept_id, visit_source_value, care_site_id, visit_type_concept_id) SELECT DISTINCT visit_id, lpnr, CASE WHEN indatuma IS NULL THEN to_date( '19000101', 'yyyymmdd') ELSE to_date( indatuma::varchar, 'yyyymmdd') END, CASE WHEN utdatuma IS NULL THEN to_date( '19000101', 'yyyymmdd') ELSE to_date( utdatuma::varchar, 'yyyymmdd') END, CASE visit_source_value WHEN 'sluten' THEN 9201 -- Inpatient visit WHEN 'oppen' THEN 9202 -- Outpatient visit WHEN 'dag kiru' THEN 9202 -- Outpatient visit Alternative: 45878057 ambulatory surgery ELSE 0 END as visit_concept_id, visit_source_value, care_site.care_site_id, 44818518 AS visit_type_concept_id -- 'Visit derived from EHR record' FROM ( SELECT lpnr, indatuma, utdatuma, sjukhus, visit_id, 'sluten' AS visit_source_value -- 'Outpatient' FROM etl_input.patient_sluten_long UNION ALL SELECT lpnr, indatuma, indatuma as utdatuma, sjukhus, visit_id, 'oppen' AS visit_source_value -- 'Inpatient' FROM etl_input.patient_oppen_long UNION ALL SELECT lpnr, indatuma, indatuma as utdatuma, sjukhus, visit_id, '<NAME>' AS visit_source_value -- 'Ambulant Surgery' FROM etl_input.patient_dag_kiru_long ) patient_reg -- It is possible that hospitals in patient registries -- are missing in care_site table. LEFT JOIN care_site care_site ON patient_reg.sjukhus = care_site.care_site_source_value ;
/** * Copyright (C) 2006-2021 Talend Inc. - www.talend.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.talend.sdk.component.studio; import static java.util.Optional.ofNullable; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Proxy; import org.eclipse.core.runtime.Platform; import org.eclipse.swt.SWT; import org.eclipse.swt.widgets.Composite; import org.osgi.framework.BundleContext; import org.osgi.framework.ServiceReference; import org.talend.core.GlobalServiceRegister; import org.talend.core.IService; import org.talend.core.model.components.IComponent; import org.talend.core.model.process.EComponentCategory; import org.talend.core.model.process.Element; import org.talend.core.model.process.INode; import org.talend.core.runtime.services.IGenericWizardService; import org.talend.sdk.component.studio.debounce.DebounceManager; import org.talend.sdk.component.studio.debounce.DebouncedAction; import org.talend.sdk.component.studio.metadata.TaCoKitCache; import org.talend.sdk.component.studio.service.AsciidoctorService; import org.talend.sdk.component.studio.service.ComponentService; import org.talend.sdk.component.studio.service.Configuration; import org.talend.sdk.component.studio.service.UiActionsThreadPool; import org.talend.sdk.component.studio.ui.composite.TaCoKitComposite; import org.talend.sdk.component.studio.ui.composite.problemmanager.ComponentViewProblemManager; import org.talend.sdk.component.studio.util.TaCoKitConst; import org.talend.sdk.component.studio.websocket.WebSocketClient; public final class Lookups { private Lookups() { throw new AssertionError(); } public static Runnable init() { try { final Field instance = GlobalServiceRegister.class.getDeclaredField("instance"); if (!instance.isAccessible()) { instance.setAccessible(true); } final Object originalInstance = instance.get(null); instance.set(null, EnrichedGlobalServiceRegister.clone(GlobalServiceRegister.class.cast(originalInstance))); return () -> { try { instance.set(null, originalInstance); } catch (final Exception e) { throw new IllegalStateException(e); } }; } catch (final Exception e) { throw new IllegalStateException(e); } } public static UiActionsThreadPool uiActionsThreadPool() { try { return lookup(UiActionsThreadPool.class); } catch (final Exception e) { // for tests return new UiActionsThreadPool(command -> new Thread(command).start()); } } public static DebounceManager debouncer() { try { return lookup(DebounceManager.class); } catch (final Exception e) { // for tests mainly return new DebounceManager() { @Override public DebouncedAction createAction() { return new DebouncedAction(this) { private Runnable task; @Override public synchronized void debounce(final Runnable task, final int timeoutMillis) { this.task = task; } @Override public void run() { ofNullable(task).ifPresent(Runnable::run); } }; } }; } } public static Configuration configuration() { try { return lookup(Configuration.class); } catch (final Exception e) { // for tests mainly return new Configuration(false, 1000); } } public static WebSocketClient client() { return lookup(WebSocketClient.class); } public static ComponentService service() { try { return lookup(ComponentService.class); } catch (final Exception e) { // for tests mainly return new ComponentService(s -> null); } } public static TaCoKitCache taCoKitCache() { return lookup(TaCoKitCache.class); } public static ProcessManager manager() { return lookup(ProcessManager.class); } private static <T> T lookup(final Class<T> type) { final BundleContext context = Platform.getBundle(TaCoKitConst.BUNDLE_ID).getBundleContext(); final ServiceReference<T> clientRef = context.getServiceReference(type); return context.getService(clientRef); } public static AsciidoctorService asciidoctor() { try { return lookup(AsciidoctorService.class); } catch (final Exception e) { // for tests mainly return new AsciidoctorService(); } } private static class EnrichedGlobalServiceRegister extends GlobalServiceRegister { private volatile IGenericWizardService wizardService; public static EnrichedGlobalServiceRegister clone(final GlobalServiceRegister instance) throws Exception { EnrichedGlobalServiceRegister enrichedRegister = new EnrichedGlobalServiceRegister(); Field[] fields = GlobalServiceRegister.class.getDeclaredFields(); for (Field field : fields) { field.setAccessible(true); field.set(enrichedRegister, field.get(instance)); } return enrichedRegister; } @Override public IService getService(final Class klass) { final IService service = super.getService(klass); if (klass == IGenericWizardService.class) { if (wizardService == null) { synchronized (this) { if (wizardService == null) { // final WizardRegistry customService = new WizardRegistry(); wizardService = IGenericWizardService.class .cast(Proxy.newProxyInstance(Thread.currentThread().getContextClassLoader(), service.getClass().getInterfaces(), (proxy, method, args) -> { try { switch (method.getName()) { case "creatDynamicComposite": if (args[1] != null && args[1] instanceof INode) { INode node = (INode) args[1]; // decide whether it is v0 or v1 IComponent component = node.getComponent(); if (component != null && "org.talend.sdk.component.studio.ComponentModel" .equals(component.getClass().getName())) { return creatComposite((Composite) args[0], (Element) args[1], (EComponentCategory) args[2], (boolean) args[3]); } else { // it is v0 component, so call GenericWizardService // original method return method.invoke(service, args); } } return null; default: return method.invoke(service, args); } } catch (final InvocationTargetException ite) { throw ite.getTargetException(); } })); } } } return wizardService; } return service; } private Composite creatComposite(final Composite parent, final Element element, final EComponentCategory category, final boolean isCompactView) { return new TaCoKitComposite(parent, SWT.H_SCROLL | SWT.V_SCROLL | SWT.NO_FOCUS, category, element, isCompactView, new ComponentViewProblemManager()); } } }
<reponame>raptorhere/neuropod # https://docs.bazel.build/versions/master/skylark/repository_rules.html def _impl(repository_ctx): # The `or` pattern below handles empty strings and unset env variables # Using a default value only handles unset env variables version = repository_ctx.os.environ.get("NEUROPOD_TORCH_VERSION") or "1.1.0" IS_MAC = repository_ctx.os.name.startswith("mac") IS_GPU = (repository_ctx.os.environ.get("NEUROPOD_IS_GPU") or None) != None CUDA_VERSION = repository_ctx.os.environ.get("NEUROPOD_CUDA_VERSION") or "10.0" # Get the torch cuda string (e.g. cpu, cu90, cu92, cu100) torch_cuda_string = "cu" + CUDA_VERSION.replace(".", "") if IS_GPU else "cpu" defines = ["TORCH_VERSION=" + version] # If this is a nightly build, we want to define a variable # to let our code know what nightly version this is # See https://github.com/pytorch/pytorch/issues/23094 if "dev" in version: version_base, version_date = version.split(".dev") defines.append("CAFFE2_NIGHTLY_VERSION=" + version_date) MAPPING = { # Linux CPU "1.1.0-linux-cpu": { "url": "https://download.pytorch.org/libtorch/cpu/libtorch-shared-with-deps-1.1.0.zip", "sha256": "c863a0073ff4c7b6feb958799c7dc3202b3449e86ff1cec9c85c7da9d1fe0218", }, "1.2.0-linux-cpu": { "url": "https://download.pytorch.org/libtorch/cpu/libtorch-shared-with-deps-1.2.0.zip", "sha256": "6b0cc8840e05e5e2742e5c59d75f8379f4eda8737aeb24b5ec653735315102b2", }, "1.3.0-linux-cpu": { "url": "https://download.pytorch.org/libtorch/cpu/libtorch-shared-with-deps-1.3.0%2Bcpu.zip", "sha256": "a1a4bfe2090c418150cf38b37e43b3238b9639806f0c3483097d073792c2e114", }, "1.4.0-linux-cpu": { "url": "https://download.pytorch.org/libtorch/cpu/libtorch-shared-with-deps-1.4.0%2Bcpu.zip", "sha256": "cf2d79574e08198419fd53d3b0edab3e12587649a22185431e3f5c8937177a47", }, "1.5.0-linux-cpu": { "url": "https://download.pytorch.org/libtorch/cpu/libtorch-shared-with-deps-1.5.0%2Bcpu.zip", "sha256": "db3545b0d2b144db4292c2f0bec236febec44aa658dd54f6b3532f2848c50c8a", }, # Linux GPU "1.1.0-linux-cu90": { "url": "https://download.pytorch.org/libtorch/cu90/libtorch-shared-with-deps-1.1.0.zip", "sha256": "57ff5faa79c9729f35a2a753717abcef8096cc5646a7b79ddcef2288be5281a9", }, "1.2.0-linux-cu100": { "url": "https://download.pytorch.org/libtorch/cu100/libtorch-shared-with-deps-1.2.0.zip", "sha256": "bd385169dd6137f532648398eeee8d6479be1f6b81314a4373800fcc72bb375d", }, "1.3.0-linux-cu100": { "url": "https://download.pytorch.org/libtorch/cu100/libtorch-shared-with-deps-1.3.0.zip", "sha256": "5943ed9d25f473f9baf4301fc6526f048f89061f38e8cf0cc01506c96ad58ed4", }, "1.4.0-linux-cu100": { "url": "https://download.pytorch.org/libtorch/cu100/libtorch-shared-with-deps-1.4.0%2Bcu100.zip", "sha256": "1557927c9929c8eb8caf8860d0ffdce39ae931af924f0fde859ad1dc0843575c", }, "1.5.0-linux-cu101": { "url": "https://download.pytorch.org/libtorch/cu101/libtorch-shared-with-deps-1.5.0%2Bcu101.zip", "sha256": "04c0fdb46ca1b74c39715d735a4906d08b976f1d57aef31a020eaf967a6a48b7", }, # Mac CPU "1.1.0-mac-cpu": { "url": "https://download.pytorch.org/libtorch/cpu/libtorch-macos-1.1.0.zip", "sha256": "2db31f6c7e69ea9142396d8ed0a7ad70dde2a9993cc8c23cc48c03ffeea13f0f", }, "1.2.0-mac-cpu": { "url": "https://download.pytorch.org/libtorch/cpu/libtorch-macos-1.2.0.zip", "sha256": "927cd63106d4055d4a415cf75b2ecffb430c27736b78f609350b57934883240f", }, "1.3.0-mac-cpu": { "url": "https://download.pytorch.org/libtorch/cpu/libtorch-macos-1.3.0.zip", "sha256": "c44050d28bf21676f68fa0f87caa27bc610cd9802c41b5c83e87295d22e048a4", }, "1.4.0-mac-cpu": { "url": "https://download.pytorch.org/libtorch/cpu/libtorch-macos-1.4.0.zip", "sha256": "84e9112b442ee1e3dc9e078d9066a855a2344ec566616cffbff1662e08cd8bf7", }, "1.5.0-mac-cpu": { "url": "https://download.pytorch.org/libtorch/cpu/libtorch-macos-1.5.0.zip", "sha256": "90bd7e5df2a73af1d80cdaa1403b6f5cc5ac9127be4bb5b7616bf32a868cf7d8", }, } download_mapping = MAPPING["{}-{}-{}".format( version, "mac" if IS_MAC else "linux", torch_cuda_string, )] download_url = download_mapping["url"] sha256 = download_mapping["sha256"] repository_ctx.download_and_extract(download_url, stripPrefix = "libtorch", sha256 = sha256) # Generate a build file based on the template repository_ctx.template( "BUILD.bazel", repository_ctx.path(Label(repository_ctx.attr.build_file_template)), substitutions = { "{TORCH_DEFINES}": "{}".format(defines), }, ) libtorch_repository = repository_rule( implementation = _impl, environ = [ "NEUROPOD_TORCH_VERSION", "NEUROPOD_IS_GPU", "NEUROPOD_CUDA_VERSION", ], attrs = {"build_file_template": attr.string(mandatory = True)}, )
macro_rules! assert_custom { ($body:expr, $expected:expr) => { if $body != $expected { panic!("Assertion failed: `{}` is not equal to {}", stringify!($body), stringify!($expected)); } }; } mod shared { pub mod api { pub const FORBIDDEN: &str = "FORBIDDEN"; } } fn main() { let body = "ALLOWED"; assert_custom!(body, shared::api::FORBIDDEN); }
package net.community.apps.tools.svn.wc.state; import java.util.Collection; import java.util.List; import net.community.chest.svnkit.core.wc.SVNLocalCopyData; import org.tmatesoft.svn.core.wc.SVNClientManager; /** * <P>Copyright 2009 as per GPLv2</P> * * <P>Base class helper for {@link StateChecker}-s implementations</P> * * @author <NAME>. * @since Aug 9, 2009 11:22:27 AM */ public abstract class AbstractStateChecker implements StateChecker { private final String _cmd; /* * @see net.community.apps.tools.svn.wc.WCFilesManagerPopupMenu.StateChecker#getActionCommand() */ @Override public final String getActionCommand () { return _cmd; } protected AbstractStateChecker (String cmd) throws IllegalArgumentException { if ((null == (_cmd=cmd)) || (cmd.length() <= 0)) throw new IllegalArgumentException("No action command provided"); } /* * @see net.community.apps.tools.svn.wc.state.StateChecker#actionPerformed(org.tmatesoft.svn.core.wc.SVNClientManager, java.util.List) */ @Override public Throwable actionPerformed ( final SVNClientManager mgr, final List<? extends SVNLocalCopyData> selValues) { if (null == mgr) throw new IllegalStateException("actionPerformed(" + getActionCommand() + ") no " + SVNClientManager.class.getSimpleName()); if ((null == selValues) || (selValues.size() <= 0)) return null; return new UnsupportedOperationException("actionPerformed(" + getActionCommand() + ") N/A"); } // returns null if more than one value selected and it is NOT versioned public static final SVNLocalCopyData checkVersionedLocalCopy ( final List<? extends SVNLocalCopyData> selValues) { final SVNLocalCopyData lclData= ((null == selValues) || (selValues.size() != 1)) ? null : selValues.get(0); if ((null == lclData) || (!lclData.isVersioned())) return null; return lclData; } // returns number of versioned instances public static final int countVersionedLocalCopy ( final Collection<? extends SVNLocalCopyData> selValues) { if ((null == selValues) || (selValues.size() <= 0)) return 0; int numVersioned=0; for (final SVNLocalCopyData lclData : selValues) { if ((null == lclData) || (!lclData.isVersioned())) continue; numVersioned++; } return numVersioned; } // returns 1st non-versioned value - null if all versioned or null/empty Collection public static final SVNLocalCopyData checkAllVersionedLocalCopy ( final Collection<? extends SVNLocalCopyData> selValues) { if ((null == selValues) || (selValues.size() <= 0)) return null; for (final SVNLocalCopyData lclData : selValues) { if (null == lclData) continue; if (!lclData.isVersioned()) return lclData; } return null; } // returns 1st versioned value - null if all un-versioned or null/empty Collection public static final SVNLocalCopyData checkNoVersionedLocalCopy ( final Collection<? extends SVNLocalCopyData> selValues) { if ((null == selValues) || (selValues.size() <= 0)) return null; for (final SVNLocalCopyData lclData : selValues) { if (null == lclData) continue; if (lclData.isVersioned()) return lclData; } return null; } }
<filename>inc/hwy/base.h // Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef HIGHWAY_HWY_BASE_H_ #define HIGHWAY_HWY_BASE_H_ // For SIMD module implementations and their callers, target-independent. #include <stddef.h> #include <stdint.h> #include <atomic> // Add to #if conditions to prevent IDE from graying out code. #if (defined __CDT_PARSER__) || (defined __INTELLISENSE__) || \ (defined Q_CREATOR_RUN) || (defined(__CLANGD__)) #define HWY_IDE 1 #else #define HWY_IDE 0 #endif //------------------------------------------------------------------------------ // Detect compiler using predefined macros #ifdef _MSC_VER #define HWY_COMPILER_MSVC _MSC_VER #else #define HWY_COMPILER_MSVC 0 #endif #ifdef __INTEL_COMPILER #define HWY_COMPILER_ICC __INTEL_COMPILER #else #define HWY_COMPILER_ICC 0 #endif #ifdef __GNUC__ #define HWY_COMPILER_GCC (__GNUC__ * 100 + __GNUC_MINOR__) #else #define HWY_COMPILER_GCC 0 #endif // Clang can masquerade as MSVC/GCC, in which case both are set. #ifdef __clang__ #define HWY_COMPILER_CLANG (__clang_major__ * 100 + __clang_minor__) #else #define HWY_COMPILER_CLANG 0 #endif // More than one may be nonzero, but we want at least one. #if !HWY_COMPILER_MSVC && !HWY_COMPILER_ICC && !HWY_COMPILER_GCC && \ !HWY_COMPILER_CLANG #error "Unsupported compiler" #endif //------------------------------------------------------------------------------ // Compiler-specific definitions #define HWY_STR_IMPL(macro) #macro #define HWY_STR(macro) HWY_STR_IMPL(macro) #if HWY_COMPILER_MSVC #include <intrin.h> #define HWY_RESTRICT __restrict #define HWY_INLINE __forceinline #define HWY_NOINLINE __declspec(noinline) #define HWY_FLATTEN #define HWY_NORETURN __declspec(noreturn) #define HWY_LIKELY(expr) (expr) #define HWY_UNLIKELY(expr) (expr) #define HWY_PRAGMA(tokens) __pragma(tokens) #define HWY_DIAGNOSTICS(tokens) HWY_PRAGMA(warning(tokens)) #define HWY_DIAGNOSTICS_OFF(msc, gcc) HWY_DIAGNOSTICS(msc) #define HWY_MAYBE_UNUSED #define HWY_HAS_ASSUME_ALIGNED 0 #if (_MSC_VER >= 1700) #define HWY_MUST_USE_RESULT _Check_return_ #else #define HWY_MUST_USE_RESULT #endif #else #define HWY_RESTRICT __restrict__ #define HWY_INLINE inline __attribute__((always_inline)) #define HWY_NOINLINE __attribute__((noinline)) #define HWY_FLATTEN __attribute__((flatten)) #define HWY_NORETURN __attribute__((noreturn)) #define HWY_LIKELY(expr) __builtin_expect(!!(expr), 1) #define HWY_UNLIKELY(expr) __builtin_expect(!!(expr), 0) #define HWY_PRAGMA(tokens) _Pragma(#tokens) #define HWY_DIAGNOSTICS(tokens) HWY_PRAGMA(GCC diagnostic tokens) #define HWY_DIAGNOSTICS_OFF(msc, gcc) HWY_DIAGNOSTICS(gcc) // Encountered "attribute list cannot appear here" when using the C++17 // [[maybe_unused]], so only use the old style attribute for now. #define HWY_MAYBE_UNUSED __attribute__((unused)) #define HWY_MUST_USE_RESULT __attribute__((warn_unused_result)) #endif // !HWY_COMPILER_MSVC //------------------------------------------------------------------------------ // Builtin/attributes #ifdef __has_builtin #define HWY_HAS_BUILTIN(name) __has_builtin(name) #else #define HWY_HAS_BUILTIN(name) 0 #endif #ifdef __has_attribute #define HWY_HAS_ATTRIBUTE(name) __has_attribute(name) #else #define HWY_HAS_ATTRIBUTE(name) 0 #endif // Enables error-checking of format strings. #if HWY_HAS_ATTRIBUTE(__format__) #define HWY_FORMAT(idx_fmt, idx_arg) \ __attribute__((__format__(__printf__, idx_fmt, idx_arg))) #else #define HWY_FORMAT(idx_fmt, idx_arg) #endif // Returns a void* pointer which the compiler then assumes is N-byte aligned. // Example: float* HWY_RESTRICT aligned = (float*)HWY_ASSUME_ALIGNED(in, 32); // // The assignment semantics are required by GCC/Clang. ICC provides an in-place // __assume_aligned, whereas MSVC's __assume appears unsuitable. #if HWY_HAS_BUILTIN(__builtin_assume_aligned) #define HWY_ASSUME_ALIGNED(ptr, align) __builtin_assume_aligned((ptr), (align)) #else #define HWY_ASSUME_ALIGNED(ptr, align) (ptr) /* not supported */ #endif // Clang and GCC require attributes on each function into which SIMD intrinsics // are inlined. Support both per-function annotation (HWY_ATTR) for lambdas and // automatic annotation via pragmas. #if HWY_COMPILER_CLANG #define HWY_PUSH_ATTRIBUTES(targets_str) \ HWY_PRAGMA(clang attribute push(__attribute__((target(targets_str))), \ apply_to = function)) #define HWY_POP_ATTRIBUTES HWY_PRAGMA(clang attribute pop) #elif HWY_COMPILER_GCC #define HWY_PUSH_ATTRIBUTES(targets_str) \ HWY_PRAGMA(GCC push_options) HWY_PRAGMA(GCC target targets_str) #define HWY_POP_ATTRIBUTES HWY_PRAGMA(GCC pop_options) #else #define HWY_PUSH_ATTRIBUTES(targets_str) #define HWY_POP_ATTRIBUTES #endif //------------------------------------------------------------------------------ // Detect architecture using predefined macros #if defined(__i386__) || defined(_M_IX86) #define HWY_ARCH_X86_32 1 #else #define HWY_ARCH_X86_32 0 #endif #if defined(__x86_64__) || defined(_M_X64) #define HWY_ARCH_X86_64 1 #else #define HWY_ARCH_X86_64 0 #endif #if HWY_ARCH_X86_32 || HWY_ARCH_X86_64 #define HWY_ARCH_X86 1 #else #define HWY_ARCH_X86 0 #endif #if defined(__powerpc64__) || defined(_M_PPC) #define HWY_ARCH_PPC 1 #else #define HWY_ARCH_PPC 0 #endif #if defined(__arm__) || defined(_M_ARM) || defined(__aarch64__) #define HWY_ARCH_ARM 1 #else #define HWY_ARCH_ARM 0 #endif // There isn't yet a standard __wasm or __wasm__. #ifdef __EMSCRIPTEN__ #define HWY_ARCH_WASM 1 #else #define HWY_ARCH_WASM 0 #endif #if HWY_ARCH_X86 + HWY_ARCH_PPC + HWY_ARCH_ARM + HWY_ARCH_WASM != 1 #error "Must detect exactly one platform" #endif //------------------------------------------------------------------------------ // Macros #define HWY_API static HWY_INLINE HWY_FLATTEN HWY_MAYBE_UNUSED #define HWY_CONCAT_IMPL(a, b) a##b #define HWY_CONCAT(a, b) HWY_CONCAT_IMPL(a, b) #define HWY_MIN(a, b) ((a) < (b) ? (a) : (b)) #define HWY_MAX(a, b) ((a) < (b) ? (b) : (a)) // Alternative for asm volatile("" : : : "memory"), which has no effect. #define HWY_FENCE std::atomic_thread_fence(std::memory_order_acq_rel) // 4 instances of a given literal value, useful as input to LoadDup128. #define HWY_REP4(literal) literal, literal, literal, literal #define HWY_ABORT(format, ...) \ ::hwy::Abort(__FILE__, __LINE__, format, ##__VA_ARGS__) // Always enabled. #define HWY_ASSERT(condition) \ do { \ if (!(condition)) { \ HWY_ABORT("Assert %s", #condition); \ } \ } while (0) // Only for "debug" builds #if !defined(NDEBUG) || defined(ADDRESS_SANITIZER) || \ defined(MEMORY_SANITIZER) || defined(THREAD_SANITIZER) #define HWY_DASSERT(condition) HWY_ASSERT(condition) #else #define HWY_DASSERT(condition) \ do { \ } while (0) #endif //------------------------------------------------------------------------------ namespace hwy { // See also HWY_ALIGNMENT - aligned_allocator aligns to the larger of that and // the vector size, whose upper bound is specified here. #if HWY_ARCH_X86 static constexpr size_t kMaxVectorSize = 64; // AVX-512 #define HWY_ALIGN_MAX alignas(64) #else static constexpr size_t kMaxVectorSize = 16; #define HWY_ALIGN_MAX alignas(16) #endif HWY_NORETURN void HWY_FORMAT(3, 4) Abort(const char* file, int line, const char* format, ...); template <typename T> constexpr bool IsFloat() { return T(1.25) != T(1); } template <typename T> constexpr bool IsSigned() { return T(0) > T(-1); } // Largest/smallest representable integer values. template <typename T> constexpr T LimitsMax() { return IsSigned<T>() ? T((1ULL << (sizeof(T) * 8 - 1)) - 1) : static_cast<T>(~0ull); } template <typename T> constexpr T LimitsMin() { return IsSigned<T>() ? T(-1) - LimitsMax<T>() : T(0); } // Manual control of overload resolution (SFINAE). template <bool Condition, class T> struct EnableIfT {}; template <class T> struct EnableIfT<true, T> { using type = T; }; template <bool Condition, class T = void> using EnableIf = typename EnableIfT<Condition, T>::type; template <typename T1, typename T2> constexpr inline T1 DivCeil(T1 a, T2 b) { return (a + b - 1) / b; } // Works for any `align`; if a power of two, compiler emits ADD+AND. constexpr inline size_t RoundUpTo(size_t what, size_t align) { return DivCeil(what, align) * align; } // Undefined results for x == 0. HWY_API size_t Num0BitsBelowLS1Bit_Nonzero32(const uint32_t x) { #ifdef _MSC_VER unsigned long index; _BitScanForward(&index, x); return index; #else return static_cast<size_t>(__builtin_ctz(x)); #endif } HWY_API size_t PopCount(uint64_t x) { #if HWY_COMPILER_CLANG || HWY_COMPILER_GCC return static_cast<size_t>(__builtin_popcountll(x)); #elif HWY_COMPILER_MSVC && HWY_ARCH_X86_64 return _mm_popcnt_u64(x); #elif HWY_COMPILER_MSVC return _mm_popcnt_u32(uint32_t(x)) + _mm_popcnt_u32(uint32_t(x >> 32)); #else x -= ((x >> 1) & 0x55555555U); x = (((x >> 2) & 0x33333333U) + (x & 0x33333333U)); x = (((x >> 4) + x) & 0x0F0F0F0FU); x += (x >> 8); x += (x >> 16); x += (x >> 32); x = x & 0x0000007FU; return (unsigned int)x; #endif } // The source/destination must not overlap/alias. template <size_t kBytes, typename From, typename To> HWY_API void CopyBytes(const From* from, To* to) { #if HWY_COMPILER_MSVC const uint8_t* HWY_RESTRICT from_bytes = reinterpret_cast<const uint8_t*>(from); uint8_t* HWY_RESTRICT to_bytes = reinterpret_cast<uint8_t*>(to); for (size_t i = 0; i < kBytes; ++i) { to_bytes[i] = from_bytes[i]; } #else // Avoids horrible codegen on Clang (series of PINSRB) __builtin_memcpy(to, from, kBytes); #endif } } // namespace hwy #endif // HIGHWAY_HWY_BASE_H_
<filename>ui/src/main/java/org/moskito/control/ui/resource/accumulators/AccumulatorChartsListResponse.java package org.moskito.control.ui.resource.accumulators; import org.moskito.control.ui.bean.ChartBean; import org.moskito.control.ui.resource.ControlReplyObject; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import java.util.ArrayList; import java.util.List; /** * This is a container bean for accumulator chart beans. * @author strel */ @XmlRootElement public class AccumulatorChartsListResponse extends ControlReplyObject { /** * Accumulator chart beans list. */ @XmlElement() private List<ChartBean> charts; public AccumulatorChartsListResponse() { this.charts = new ArrayList<>(); } public List<ChartBean> getCharts() { return charts; } public void setCharts(List<ChartBean> charts) { this.charts = charts; } }
import { Injectable, Input } from '@angular/core'; @Injectable({ providedIn: 'root' }) export class ArrayService { @Input() list: number[]; smallestElement(): number { let min = this.list[0]; for (let i=1; i<this.list.length; i++){ if (this.list[i] < min){ min = this.list[i]; } } return min; } }
#!/usr/bin/env bash chmod u+x ./workernode/target/pack/bin/worker-node ./workernode/target/pack/bin/worker-node worker.conf
<filename>src/android/UltraUnionpay.java package com.ott.cordova.plugin; import android.content.Intent; import org.apache.cordova.CordovaPlugin; import org.apache.cordova.CallbackContext; import org.apache.cordova.LOG; import org.apache.cordova.PluginResult; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import com.unionpay.UPPayAssistEx; /** * This class echoes a string called from JavaScript. */ public class UltraUnionpay extends CordovaPlugin { private CallbackContext currentCallbackContext; @Override public boolean execute(String action, JSONArray args, CallbackContext callbackContext) throws JSONException { if (action.equals("pay")) { String jsonMsg = args.getString(0); JSONObject payInfo = new JSONObject(jsonMsg); this.pay(payInfo, callbackContext); return true; } return false; } private void pay(JSONObject payInfo, CallbackContext callbackContext) throws JSONException { this.currentCallbackContext = callbackContext; String tn = payInfo.getString("tn"); String mode = payInfo.getString("mode"); LOG.d("支付数据:", "\ntn" + tn + "\nmode" + mode); cordova.setActivityResultCallback(this); UPPayAssistEx.startPay(cordova.getActivity(), null, null, tn, mode); } public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (data == null) { return; } String payResult; try { payResult = data.getExtras().getString("pay_result"); }catch (NullPointerException e){ payResult = ""; } JSONObject pluginResultInfo = new JSONObject(); try { pluginResultInfo.put("code", payResult.toLowerCase()); } catch (JSONException e) { e.printStackTrace(); } if (payResult.equalsIgnoreCase("success")) { try { pluginResultInfo.put("msg", "支付成功"); if(data.hasExtra("result_data")){ pluginResultInfo.put("successExtraData", data.getExtras().getString("result_data")); } } catch (JSONException e) { e.printStackTrace(); } PluginResult pluginResult = new PluginResult(PluginResult.Status.OK, pluginResultInfo); this.currentCallbackContext.sendPluginResult(pluginResult); } else if (payResult.equalsIgnoreCase("fail")) { try { pluginResultInfo.put("msg", "支付失败"); } catch (JSONException e) { e.printStackTrace(); } PluginResult pluginResult = new PluginResult(PluginResult.Status.ERROR, pluginResultInfo); this.currentCallbackContext.sendPluginResult(pluginResult); } else if (payResult.equalsIgnoreCase("cancel")) { try { pluginResultInfo.put("msg", "支付取消"); } catch (JSONException e) { e.printStackTrace(); } PluginResult pluginResult = new PluginResult(PluginResult.Status.ERROR, pluginResultInfo); this.currentCallbackContext.sendPluginResult(pluginResult); } } }
<reponame>eitrtechnologies/idem_provider_azurerm # -*- coding: utf-8 -*- """ Azure Resource Manager (ARM) Compute Proximity Placement Group State Module .. versionadded:: 4.0.0 :maintainer: <<EMAIL>> :configuration: This module requires Azure Resource Manager credentials to be passed via acct. Note that the authentication parameters are case sensitive. Required provider parameters: if using username and password: * ``subscription_id`` * ``username`` * ``password`` if using a service principal: * ``subscription_id`` * ``tenant`` * ``client_id`` * ``secret`` Optional provider parameters: **cloud_environment**: Used to point the cloud driver to different API endpoints, such as Azure GovCloud. Possible values: * ``AZURE_PUBLIC_CLOUD`` (default) * ``AZURE_CHINA_CLOUD`` * ``AZURE_US_GOV_CLOUD`` * ``AZURE_GERMAN_CLOUD`` Example acct setup for Azure Resource Manager authentication: .. code-block:: yaml azurerm: default: subscription_id: 3287abc8-f98a-c678-3bde-326766fd3617 tenant: ABCDEFAB-1234-ABCD-1234-ABCDEFABCDEF client_id: ABCDEFAB-1234-ABCD-1234-ABCDEFABCDEF secret: XXXXXXXXXXXXXXXXXXXXXXXX cloud_environment: AZURE_PUBLIC_CLOUD user_pass_auth: subscription_id: 3287abc8-f98a-c678-3bde-326766fd3617 username: fletch password: <PASSWORD> The authentication parameters can also be passed as a dictionary of keyword arguments to the ``connection_auth`` parameter of each state, but this is not preferred and could be deprecated in the future. """ # Python libs from __future__ import absolute_import from dict_tools import differ import logging log = logging.getLogger(__name__) TREQ = { "present": {"require": ["states.azurerm.resource.group.present",]}, } async def present( hub, ctx, name, resource_group, group_type="standard", tags=None, connection_auth=None, **kwargs, ): """ .. versionadded:: 4.0.0 Ensures the specified proximity placement group exists. :param name: The name of the proximity placement group. :param resource_group: The name of the resource group. :param group_type: The type of the proximity placement group. Possible values include: "standard", "ultra". Defaults to "standard". :param tags: A dictionary of strings can be passed as tag metadata to the proximity placement group object. :param connection_auth: A dict with subscription and authentication parameters to be used in connecting to the Azure Resource Manager API. Example usage: .. code-block:: yaml Ensure proximity placement group exists: azurerm.compute.proximity_placement_group.present: - name: test_ppg - resource_group: test_group - group_type: test_type - tags: contact_name: <NAME> """ ret = {"name": name, "result": False, "comment": "", "changes": {}} action = "create" if not isinstance(connection_auth, dict): if ctx["acct"]: connection_auth = ctx["acct"] else: ret[ "comment" ] = "Connection information must be specified via acct or connection_auth dictionary!" return ret ppg = await hub.exec.azurerm.compute.proximity_placement_group.get( ctx, name, resource_group, azurerm_log_level="info", **connection_auth ) if "error" not in ppg: action = "update" tag_changes = differ.deep_diff(ppg.get("tags", {}), tags or {}) if tag_changes: ret["changes"]["tags"] = tag_changes if not ret["changes"]: ret["result"] = True ret["comment"] = "Proximity placement group {0} is already present.".format( name ) return ret if ctx["test"]: ret["result"] = None ret["comment"] = "Proximity placement group {0} would be updated.".format( name ) return ret if ctx["test"]: ret["comment"] = "Proximity placement group {0} would be created.".format(name) ret["result"] = None return ret ppg_kwargs = kwargs.copy() ppg_kwargs.update(connection_auth) ppg = await hub.exec.azurerm.compute.proximity_placement_group.create_or_update( ctx=ctx, name=name, resource_group=resource_group, group_type=group_type, tags=tags, **ppg_kwargs, ) if action == "create": ret["changes"] = {"old": {}, "new": ppg} if "error" not in ppg: ret["result"] = True ret["comment"] = f"Proximity placement group {name} has been {action}d." return ret ret["comment"] = "Failed to {0} proximity placement group {1}! ({2})".format( action, name, ppg.get("error") ) if not ret["result"]: ret["changes"] = {} return ret async def absent(hub, ctx, name, resource_group, connection_auth=None, **kwargs): """ .. versionadded:: 4.0.0 Ensures the specified proximity placement group does not exist. :param name: The name of the proximity placement group. :param resource_group: The name of the resource group. :param connection_auth: A dict with subscription and authentication parameters to be used in connecting to the Azure Resource Manager API. Example usage: .. code-block:: yaml Ensure proximity placement group absent: azurerm.compute.proximity_placement_group.absent: - name: test_ppg - resource_group: test_group """ ret = {"name": name, "result": False, "comment": "", "changes": {}} if not isinstance(connection_auth, dict): if ctx["acct"]: connection_auth = ctx["acct"] else: ret[ "comment" ] = "Connection information must be specified via acct or connection_auth dictionary!" return ret ppg = await hub.exec.azurerm.compute.proximity_placement_group.get( ctx, name, resource_group, azurerm_log_level="info", **connection_auth ) if "error" in ppg: ret["result"] = True ret["comment"] = "Proximity placement group {0} was not found.".format(name) return ret if ctx["test"]: ret["comment"] = "Proximity placement group {0} would be deleted.".format(name) ret["result"] = None ret["changes"] = { "old": ppg, "new": {}, } return ret deleted = await hub.exec.azurerm.compute.proximity_placement_group.delete( ctx, name, resource_group, **connection_auth ) if deleted: ret["result"] = True ret["comment"] = "Proximity placement group {0} has been deleted.".format(name) ret["changes"] = {"old": ppg, "new": {}} return ret ret["comment"] = "Failed to delete proximity placement group {0}!".format(name) return ret
# wget -qO- https://raw.github.com/progrium/dokku/v0.2.3/bootstrap.sh | sudo DOKKU_TAG=v0.2.3 bash VERSION=$(lsb_release -sr) OLD_VERSION="12.04" if [ "$VERSION" == "$OLD_VERSION" ]; then # required for 12.04 sudo apt-get install -y python-software-properties else # required workaround for bug on 14.04 wget -qO- https://raw.github.com/progrium/dokku/$OPTION1/bootstrap.sh | sudo DOKKU_TAG=v0.2.3 bash fi wget -qO- https://raw.github.com/progrium/dokku/$OPTION1/bootstrap.sh | sudo DOKKU_TAG=v0.2.3 bash
#!/usr/bin/env bash # Copyright (c) 2015-present, Facebook, Inc. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. function print_help { echo "Usage: ${0} [OPTIONS]" echo "" echo "OPTIONS:" echo " --node-version <version> the node version to use while testing [6]" echo " --git-branch <branch> the git branch to checkout for testing [the current one]" echo " --test-suite <suite> which test suite to use ('simple', installs', 'kitchensink', 'all') ['all']" echo " --interactive gain a bash shell after the test run" echo " --help print this message and exit" echo "" } cd $(dirname $0) node_version=6 current_git_branch=`git rev-parse --abbrev-ref HEAD` git_branch=${current_git_branch} test_suite=all interactive=false while [ "$1" != "" ]; do case $1 in "--node-version") shift node_version=$1 ;; "--git-branch") shift git_branch=$1 ;; "--test-suite") shift test_suite=$1 ;; "--interactive") interactive=true ;; "--help") print_help exit 0 ;; esac shift done test_command="./tasks/e2e-simple.sh && ./tasks/e2e-kitchensink.sh && ./tasks/e2e-installs.sh && ./tasks/e2e-monorepos.sh" case ${test_suite} in "all") ;; "simple") test_command="./tasks/e2e-simple.sh" ;; "kitchensink") test_command="./tasks/e2e-kitchensink.sh" ;; "installs") test_command="./tasks/e2e-installs.sh" ;; "monorepos") test_command="./tasks/e2e-monorepos.sh" ;; *) ;; esac read -r -d '' apply_changes <<- CMD cd /var/jzkit-cli git config --global user.name "Create React App" git config --global user.email "cra@email.com" git stash save -u git stash show -p > patch git diff 4b825dc642cb6eb9a060e54bf8d69288fbee4904 stash^3 >> patch git stash pop cd - mv /var/jzkit-cli/patch . git apply patch rm patch CMD if [ ${git_branch} != ${current_git_branch} ]; then apply_changes='' fi read -r -d '' command <<- CMD echo "prefix=~/.npm" > ~/.npmrc mkdir ~/.npm export PATH=\$PATH:~/.npm/bin set -x git clone /var/jzkit-cli jzkit-cli --branch ${git_branch} cd jzkit-cli ${apply_changes} node --version npm --version set +x ${test_command} && echo -e "\n\e[1;32m✔ Job passed\e[0m" || echo -e "\n\e[1;31m✘ Job failes\e[0m" $([[ ${interactive} == 'true' ]] && echo 'bash') CMD docker run \ --env CI=true \ --env NPM_CONFIG_QUIET=true \ --tty \ --user node \ --volume ${PWD}/..:/var/jzkit-cli \ --workdir /home/node \ $([[ ${interactive} == 'true' ]] && echo '--interactive') \ node:${node_version} \ bash -c "${command}"
import json from os import walk, makedirs from os.path import exists, join, basename from shutil import copyfile from collections import defaultdict from argparse import ArgumentParser from lxml import etree from tqdm import tqdm NUM_CLASSES = 12 VIDEO_FILE_EXTENSIONS = 'webm' XML_FRAGMENT_TEMPLATE = '<annotations>\n{}</annotations>' def load_annot(input_dir): out_data = dict() for root, dirs, files in walk(input_dir): if len(files) == 0: continue local_data = defaultdict(dict) for f in files: file_name_parts = f.split('.') file_name = '.'.join(file_name_parts[:-1]) file_extension = file_name_parts[-1] record = local_data[file_name] if file_extension == 'xml': try: annot_str = repair_annot(join(root, f)) annot = parse_annot(annot_str) except: annot = None record['annot'] = annot if record['annot'] is None: print(f' * invalid: {basename(root)}/{f}') elif file_extension in VIDEO_FILE_EXTENSIONS: name_components = file_name.split('-') assert len(name_components) == 5, f'Incorrect naming: {file_name}' record['label'] = name_components[4] record['user_id'] = name_components[3] record['video_name'] = f record['video_path'] = join(root, f) filtered_data = {k: v for k, v in local_data.items() if 'video_path' in v and 'annot' in v} out_data.update(filtered_data) return out_data def repair_annot(file_path): content = '' enable_collecting = False with open(file_path, encoding='unicode_escape') as input_stream: for line in input_stream: if '<track id=\"0\" label=\"person\" source=\"manual\">' in line: enable_collecting = True elif '</track>' in line: content += line break if enable_collecting: content += line return XML_FRAGMENT_TEMPLATE.format(content) def parse_annot(xml_fragment): root = etree.XML(xml_fragment.encode('utf-8')) tracks = [] for element in root: if element.tag != 'track': continue all_frame_ids, valid_frame_ids = [], [] for bbox in element: frame_id = int(bbox.attrib['frame']) all_frame_ids.append(frame_id) actions = [] for action in bbox: if action.tag == 'attribute' and action.attrib['name'] == 'sign_action': actions.append(action.text) assert len(actions) == 1,\ f'Expected single action per frame but got {len(actions)} actions' action = actions[0] valid_frame = action == 'yes' if valid_frame: valid_frame_ids.append(frame_id) if len(valid_frame_ids) > 0: tracks.append(dict( video_start=min(all_frame_ids), video_end=max(all_frame_ids) + 1, clip_start=min(valid_frame_ids), clip_end=max(valid_frame_ids) + 1, )) if len(tracks) == 0: return None else: assert len(tracks) == 1, f'Expected single track per video but got {len(tracks)} tracks' return tracks[0] def dump_annot(annot, out_path): with open(out_path, 'w') as output_stream: json.dump(annot, output_stream) def copy_videos(annot, out_dir): for record in tqdm(annot, desc='Copying videos', leave=False): input_file_path = record['video_path'] output_file_path = join(out_dir, record['video_name']) if not exists(output_file_path): copyfile(input_file_path, output_file_path) def main(): parser = ArgumentParser() parser.add_argument('--input_dir', '-i', type=str, required=True) parser.add_argument('--output_dir', '-o', type=str, required=True) args = parser.parse_args() assert exists(args.input_dir) if not exists(args.output_dir): makedirs(args.output_dir) data = load_annot(args.input_dir) user_ids = set([record['user_id'] for record in data.values()]) print(f'Loaded {len(data)} records ({len(user_ids)} unique users).') out_annot_path = join(args.output_dir, 'videos_info.json') dump_annot(data, out_annot_path) print(f'Annotation has been dumped to {out_annot_path}') out_videos_dir = join(args.output_dir, 'videos') if not exists(out_videos_dir): makedirs(out_videos_dir) copy_videos(data.values(), out_videos_dir) print(f'Videos have been copied to {out_videos_dir}') if __name__ == '__main__': main()
package documentdb // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is regenerated. // APIType enumerates the values for api type. type APIType string const ( // APITypeCassandra ... APITypeCassandra APIType = "Cassandra" // APITypeGremlin ... APITypeGremlin APIType = "Gremlin" // APITypeGremlinV2 ... APITypeGremlinV2 APIType = "GremlinV2" // APITypeMongoDB ... APITypeMongoDB APIType = "MongoDB" // APITypeSQL ... APITypeSQL APIType = "Sql" // APITypeTable ... APITypeTable APIType = "Table" ) // PossibleAPITypeValues returns an array of possible values for the APIType const type. func PossibleAPITypeValues() []APIType { return []APIType{APITypeCassandra, APITypeGremlin, APITypeGremlinV2, APITypeMongoDB, APITypeSQL, APITypeTable} } // AuthenticationMethod enumerates the values for authentication method. type AuthenticationMethod string const ( // AuthenticationMethodCassandra ... AuthenticationMethodCassandra AuthenticationMethod = "Cassandra" // AuthenticationMethodNone ... AuthenticationMethodNone AuthenticationMethod = "None" ) // PossibleAuthenticationMethodValues returns an array of possible values for the AuthenticationMethod const type. func PossibleAuthenticationMethodValues() []AuthenticationMethod { return []AuthenticationMethod{AuthenticationMethodCassandra, AuthenticationMethodNone} } // BackupPolicyType enumerates the values for backup policy type. type BackupPolicyType string const ( // BackupPolicyTypeContinuous ... BackupPolicyTypeContinuous BackupPolicyType = "Continuous" // BackupPolicyTypePeriodic ... BackupPolicyTypePeriodic BackupPolicyType = "Periodic" ) // PossibleBackupPolicyTypeValues returns an array of possible values for the BackupPolicyType const type. func PossibleBackupPolicyTypeValues() []BackupPolicyType { return []BackupPolicyType{BackupPolicyTypeContinuous, BackupPolicyTypePeriodic} } // BackupStorageRedundancy enumerates the values for backup storage redundancy. type BackupStorageRedundancy string const ( // BackupStorageRedundancyGeo ... BackupStorageRedundancyGeo BackupStorageRedundancy = "Geo" // BackupStorageRedundancyLocal ... BackupStorageRedundancyLocal BackupStorageRedundancy = "Local" // BackupStorageRedundancyZone ... BackupStorageRedundancyZone BackupStorageRedundancy = "Zone" ) // PossibleBackupStorageRedundancyValues returns an array of possible values for the BackupStorageRedundancy const type. func PossibleBackupStorageRedundancyValues() []BackupStorageRedundancy { return []BackupStorageRedundancy{BackupStorageRedundancyGeo, BackupStorageRedundancyLocal, BackupStorageRedundancyZone} } // CompositePathSortOrder enumerates the values for composite path sort order. type CompositePathSortOrder string const ( // CompositePathSortOrderAscending ... CompositePathSortOrderAscending CompositePathSortOrder = "ascending" // CompositePathSortOrderDescending ... CompositePathSortOrderDescending CompositePathSortOrder = "descending" ) // PossibleCompositePathSortOrderValues returns an array of possible values for the CompositePathSortOrder const type. func PossibleCompositePathSortOrderValues() []CompositePathSortOrder { return []CompositePathSortOrder{CompositePathSortOrderAscending, CompositePathSortOrderDescending} } // ConflictResolutionMode enumerates the values for conflict resolution mode. type ConflictResolutionMode string const ( // ConflictResolutionModeCustom ... ConflictResolutionModeCustom ConflictResolutionMode = "Custom" // ConflictResolutionModeLastWriterWins ... ConflictResolutionModeLastWriterWins ConflictResolutionMode = "LastWriterWins" ) // PossibleConflictResolutionModeValues returns an array of possible values for the ConflictResolutionMode const type. func PossibleConflictResolutionModeValues() []ConflictResolutionMode { return []ConflictResolutionMode{ConflictResolutionModeCustom, ConflictResolutionModeLastWriterWins} } // ConnectorOffer enumerates the values for connector offer. type ConnectorOffer string const ( // ConnectorOfferSmall ... ConnectorOfferSmall ConnectorOffer = "Small" ) // PossibleConnectorOfferValues returns an array of possible values for the ConnectorOffer const type. func PossibleConnectorOfferValues() []ConnectorOffer { return []ConnectorOffer{ConnectorOfferSmall} } // CreatedByType enumerates the values for created by type. type CreatedByType string const ( // CreatedByTypeApplication ... CreatedByTypeApplication CreatedByType = "Application" // CreatedByTypeKey ... CreatedByTypeKey CreatedByType = "Key" // CreatedByTypeManagedIdentity ... CreatedByTypeManagedIdentity CreatedByType = "ManagedIdentity" // CreatedByTypeUser ... CreatedByTypeUser CreatedByType = "User" ) // PossibleCreatedByTypeValues returns an array of possible values for the CreatedByType const type. func PossibleCreatedByTypeValues() []CreatedByType { return []CreatedByType{CreatedByTypeApplication, CreatedByTypeKey, CreatedByTypeManagedIdentity, CreatedByTypeUser} } // CreateMode enumerates the values for create mode. type CreateMode string const ( // CreateModeDefault ... CreateModeDefault CreateMode = "Default" // CreateModeRestore ... CreateModeRestore CreateMode = "Restore" ) // PossibleCreateModeValues returns an array of possible values for the CreateMode const type. func PossibleCreateModeValues() []CreateMode { return []CreateMode{CreateModeDefault, CreateModeRestore} } // CreateModeBasicDatabaseAccountCreateUpdateProperties enumerates the values for create mode basic database // account create update properties. type CreateModeBasicDatabaseAccountCreateUpdateProperties string const ( // CreateModeBasicDatabaseAccountCreateUpdatePropertiesCreateModeDatabaseAccountCreateUpdateProperties ... CreateModeBasicDatabaseAccountCreateUpdatePropertiesCreateModeDatabaseAccountCreateUpdateProperties CreateModeBasicDatabaseAccountCreateUpdateProperties = "DatabaseAccountCreateUpdateProperties" // CreateModeBasicDatabaseAccountCreateUpdatePropertiesCreateModeDefault ... CreateModeBasicDatabaseAccountCreateUpdatePropertiesCreateModeDefault CreateModeBasicDatabaseAccountCreateUpdateProperties = "Default" // CreateModeBasicDatabaseAccountCreateUpdatePropertiesCreateModeRestore ... CreateModeBasicDatabaseAccountCreateUpdatePropertiesCreateModeRestore CreateModeBasicDatabaseAccountCreateUpdateProperties = "Restore" ) // PossibleCreateModeBasicDatabaseAccountCreateUpdatePropertiesValues returns an array of possible values for the CreateModeBasicDatabaseAccountCreateUpdateProperties const type. func PossibleCreateModeBasicDatabaseAccountCreateUpdatePropertiesValues() []CreateModeBasicDatabaseAccountCreateUpdateProperties { return []CreateModeBasicDatabaseAccountCreateUpdateProperties{CreateModeBasicDatabaseAccountCreateUpdatePropertiesCreateModeDatabaseAccountCreateUpdateProperties, CreateModeBasicDatabaseAccountCreateUpdatePropertiesCreateModeDefault, CreateModeBasicDatabaseAccountCreateUpdatePropertiesCreateModeRestore} } // DatabaseAccountKind enumerates the values for database account kind. type DatabaseAccountKind string const ( // DatabaseAccountKindGlobalDocumentDB ... DatabaseAccountKindGlobalDocumentDB DatabaseAccountKind = "GlobalDocumentDB" // DatabaseAccountKindMongoDB ... DatabaseAccountKindMongoDB DatabaseAccountKind = "MongoDB" // DatabaseAccountKindParse ... DatabaseAccountKindParse DatabaseAccountKind = "Parse" ) // PossibleDatabaseAccountKindValues returns an array of possible values for the DatabaseAccountKind const type. func PossibleDatabaseAccountKindValues() []DatabaseAccountKind { return []DatabaseAccountKind{DatabaseAccountKindGlobalDocumentDB, DatabaseAccountKindMongoDB, DatabaseAccountKindParse} } // DatabaseAccountOfferType enumerates the values for database account offer type. type DatabaseAccountOfferType string const ( // DatabaseAccountOfferTypeStandard ... DatabaseAccountOfferTypeStandard DatabaseAccountOfferType = "Standard" ) // PossibleDatabaseAccountOfferTypeValues returns an array of possible values for the DatabaseAccountOfferType const type. func PossibleDatabaseAccountOfferTypeValues() []DatabaseAccountOfferType { return []DatabaseAccountOfferType{DatabaseAccountOfferTypeStandard} } // DataType enumerates the values for data type. type DataType string const ( // DataTypeLineString ... DataTypeLineString DataType = "LineString" // DataTypeMultiPolygon ... DataTypeMultiPolygon DataType = "MultiPolygon" // DataTypeNumber ... DataTypeNumber DataType = "Number" // DataTypePoint ... DataTypePoint DataType = "Point" // DataTypePolygon ... DataTypePolygon DataType = "Polygon" // DataTypeString ... DataTypeString DataType = "String" ) // PossibleDataTypeValues returns an array of possible values for the DataType const type. func PossibleDataTypeValues() []DataType { return []DataType{DataTypeLineString, DataTypeMultiPolygon, DataTypeNumber, DataTypePoint, DataTypePolygon, DataTypeString} } // DefaultConsistencyLevel enumerates the values for default consistency level. type DefaultConsistencyLevel string const ( // DefaultConsistencyLevelBoundedStaleness ... DefaultConsistencyLevelBoundedStaleness DefaultConsistencyLevel = "BoundedStaleness" // DefaultConsistencyLevelConsistentPrefix ... DefaultConsistencyLevelConsistentPrefix DefaultConsistencyLevel = "ConsistentPrefix" // DefaultConsistencyLevelEventual ... DefaultConsistencyLevelEventual DefaultConsistencyLevel = "Eventual" // DefaultConsistencyLevelSession ... DefaultConsistencyLevelSession DefaultConsistencyLevel = "Session" // DefaultConsistencyLevelStrong ... DefaultConsistencyLevelStrong DefaultConsistencyLevel = "Strong" ) // PossibleDefaultConsistencyLevelValues returns an array of possible values for the DefaultConsistencyLevel const type. func PossibleDefaultConsistencyLevelValues() []DefaultConsistencyLevel { return []DefaultConsistencyLevel{DefaultConsistencyLevelBoundedStaleness, DefaultConsistencyLevelConsistentPrefix, DefaultConsistencyLevelEventual, DefaultConsistencyLevelSession, DefaultConsistencyLevelStrong} } // IndexingMode enumerates the values for indexing mode. type IndexingMode string const ( // IndexingModeConsistent ... IndexingModeConsistent IndexingMode = "consistent" // IndexingModeLazy ... IndexingModeLazy IndexingMode = "lazy" // IndexingModeNone ... IndexingModeNone IndexingMode = "none" ) // PossibleIndexingModeValues returns an array of possible values for the IndexingMode const type. func PossibleIndexingModeValues() []IndexingMode { return []IndexingMode{IndexingModeConsistent, IndexingModeLazy, IndexingModeNone} } // IndexKind enumerates the values for index kind. type IndexKind string const ( // IndexKindHash ... IndexKindHash IndexKind = "Hash" // IndexKindRange ... IndexKindRange IndexKind = "Range" // IndexKindSpatial ... IndexKindSpatial IndexKind = "Spatial" ) // PossibleIndexKindValues returns an array of possible values for the IndexKind const type. func PossibleIndexKindValues() []IndexKind { return []IndexKind{IndexKindHash, IndexKindRange, IndexKindSpatial} } // KeyKind enumerates the values for key kind. type KeyKind string const ( // KeyKindPrimary ... KeyKindPrimary KeyKind = "primary" // KeyKindPrimaryReadonly ... KeyKindPrimaryReadonly KeyKind = "primaryReadonly" // KeyKindSecondary ... KeyKindSecondary KeyKind = "secondary" // KeyKindSecondaryReadonly ... KeyKindSecondaryReadonly KeyKind = "secondaryReadonly" ) // PossibleKeyKindValues returns an array of possible values for the KeyKind const type. func PossibleKeyKindValues() []KeyKind { return []KeyKind{KeyKindPrimary, KeyKindPrimaryReadonly, KeyKindSecondary, KeyKindSecondaryReadonly} } // ManagedCassandraProvisioningState enumerates the values for managed cassandra provisioning state. type ManagedCassandraProvisioningState string const ( // ManagedCassandraProvisioningStateCanceled ... ManagedCassandraProvisioningStateCanceled ManagedCassandraProvisioningState = "Canceled" // ManagedCassandraProvisioningStateCreating ... ManagedCassandraProvisioningStateCreating ManagedCassandraProvisioningState = "Creating" // ManagedCassandraProvisioningStateDeleting ... ManagedCassandraProvisioningStateDeleting ManagedCassandraProvisioningState = "Deleting" // ManagedCassandraProvisioningStateFailed ... ManagedCassandraProvisioningStateFailed ManagedCassandraProvisioningState = "Failed" // ManagedCassandraProvisioningStateSucceeded ... ManagedCassandraProvisioningStateSucceeded ManagedCassandraProvisioningState = "Succeeded" // ManagedCassandraProvisioningStateUpdating ... ManagedCassandraProvisioningStateUpdating ManagedCassandraProvisioningState = "Updating" ) // PossibleManagedCassandraProvisioningStateValues returns an array of possible values for the ManagedCassandraProvisioningState const type. func PossibleManagedCassandraProvisioningStateValues() []ManagedCassandraProvisioningState { return []ManagedCassandraProvisioningState{ManagedCassandraProvisioningStateCanceled, ManagedCassandraProvisioningStateCreating, ManagedCassandraProvisioningStateDeleting, ManagedCassandraProvisioningStateFailed, ManagedCassandraProvisioningStateSucceeded, ManagedCassandraProvisioningStateUpdating} } // NetworkACLBypass enumerates the values for network acl bypass. type NetworkACLBypass string const ( // NetworkACLBypassAzureServices ... NetworkACLBypassAzureServices NetworkACLBypass = "AzureServices" // NetworkACLBypassNone ... NetworkACLBypassNone NetworkACLBypass = "None" ) // PossibleNetworkACLBypassValues returns an array of possible values for the NetworkACLBypass const type. func PossibleNetworkACLBypassValues() []NetworkACLBypass { return []NetworkACLBypass{NetworkACLBypassAzureServices, NetworkACLBypassNone} } // NodeState enumerates the values for node state. type NodeState string const ( // NodeStateJoining ... NodeStateJoining NodeState = "Joining" // NodeStateLeaving ... NodeStateLeaving NodeState = "Leaving" // NodeStateMoving ... NodeStateMoving NodeState = "Moving" // NodeStateNormal ... NodeStateNormal NodeState = "Normal" // NodeStateStopped ... NodeStateStopped NodeState = "Stopped" ) // PossibleNodeStateValues returns an array of possible values for the NodeState const type. func PossibleNodeStateValues() []NodeState { return []NodeState{NodeStateJoining, NodeStateLeaving, NodeStateMoving, NodeStateNormal, NodeStateStopped} } // NodeStatus enumerates the values for node status. type NodeStatus string const ( // NodeStatusDown ... NodeStatusDown NodeStatus = "Down" // NodeStatusUp ... NodeStatusUp NodeStatus = "Up" ) // PossibleNodeStatusValues returns an array of possible values for the NodeStatus const type. func PossibleNodeStatusValues() []NodeStatus { return []NodeStatus{NodeStatusDown, NodeStatusUp} } // OperationType enumerates the values for operation type. type OperationType string const ( // OperationTypeCreate ... OperationTypeCreate OperationType = "Create" // OperationTypeDelete ... OperationTypeDelete OperationType = "Delete" // OperationTypeReplace ... OperationTypeReplace OperationType = "Replace" // OperationTypeSystemOperation ... OperationTypeSystemOperation OperationType = "SystemOperation" ) // PossibleOperationTypeValues returns an array of possible values for the OperationType const type. func PossibleOperationTypeValues() []OperationType { return []OperationType{OperationTypeCreate, OperationTypeDelete, OperationTypeReplace, OperationTypeSystemOperation} } // PartitionKind enumerates the values for partition kind. type PartitionKind string const ( // PartitionKindHash ... PartitionKindHash PartitionKind = "Hash" // PartitionKindMultiHash ... PartitionKindMultiHash PartitionKind = "MultiHash" // PartitionKindRange ... PartitionKindRange PartitionKind = "Range" ) // PossiblePartitionKindValues returns an array of possible values for the PartitionKind const type. func PossiblePartitionKindValues() []PartitionKind { return []PartitionKind{PartitionKindHash, PartitionKindMultiHash, PartitionKindRange} } // PrimaryAggregationType enumerates the values for primary aggregation type. type PrimaryAggregationType string const ( // PrimaryAggregationTypeAverage ... PrimaryAggregationTypeAverage PrimaryAggregationType = "Average" // PrimaryAggregationTypeLast ... PrimaryAggregationTypeLast PrimaryAggregationType = "Last" // PrimaryAggregationTypeMaximum ... PrimaryAggregationTypeMaximum PrimaryAggregationType = "Maximum" // PrimaryAggregationTypeMinimum ... PrimaryAggregationTypeMinimum PrimaryAggregationType = "Minimum" // PrimaryAggregationTypeNone ... PrimaryAggregationTypeNone PrimaryAggregationType = "None" // PrimaryAggregationTypeTotal ... PrimaryAggregationTypeTotal PrimaryAggregationType = "Total" ) // PossiblePrimaryAggregationTypeValues returns an array of possible values for the PrimaryAggregationType const type. func PossiblePrimaryAggregationTypeValues() []PrimaryAggregationType { return []PrimaryAggregationType{PrimaryAggregationTypeAverage, PrimaryAggregationTypeLast, PrimaryAggregationTypeMaximum, PrimaryAggregationTypeMinimum, PrimaryAggregationTypeNone, PrimaryAggregationTypeTotal} } // PublicNetworkAccess enumerates the values for public network access. type PublicNetworkAccess string const ( // PublicNetworkAccessDisabled ... PublicNetworkAccessDisabled PublicNetworkAccess = "Disabled" // PublicNetworkAccessEnabled ... PublicNetworkAccessEnabled PublicNetworkAccess = "Enabled" ) // PossiblePublicNetworkAccessValues returns an array of possible values for the PublicNetworkAccess const type. func PossiblePublicNetworkAccessValues() []PublicNetworkAccess { return []PublicNetworkAccess{PublicNetworkAccessDisabled, PublicNetworkAccessEnabled} } // ResourceIdentityType enumerates the values for resource identity type. type ResourceIdentityType string const ( // ResourceIdentityTypeNone ... ResourceIdentityTypeNone ResourceIdentityType = "None" // ResourceIdentityTypeSystemAssigned ... ResourceIdentityTypeSystemAssigned ResourceIdentityType = "SystemAssigned" // ResourceIdentityTypeSystemAssignedUserAssigned ... ResourceIdentityTypeSystemAssignedUserAssigned ResourceIdentityType = "SystemAssigned,UserAssigned" // ResourceIdentityTypeUserAssigned ... ResourceIdentityTypeUserAssigned ResourceIdentityType = "UserAssigned" ) // PossibleResourceIdentityTypeValues returns an array of possible values for the ResourceIdentityType const type. func PossibleResourceIdentityTypeValues() []ResourceIdentityType { return []ResourceIdentityType{ResourceIdentityTypeNone, ResourceIdentityTypeSystemAssigned, ResourceIdentityTypeSystemAssignedUserAssigned, ResourceIdentityTypeUserAssigned} } // RestoreMode enumerates the values for restore mode. type RestoreMode string const ( // RestoreModePointInTime ... RestoreModePointInTime RestoreMode = "PointInTime" ) // PossibleRestoreModeValues returns an array of possible values for the RestoreMode const type. func PossibleRestoreModeValues() []RestoreMode { return []RestoreMode{RestoreModePointInTime} } // RoleDefinitionType enumerates the values for role definition type. type RoleDefinitionType string const ( // RoleDefinitionTypeBuiltInRole ... RoleDefinitionTypeBuiltInRole RoleDefinitionType = "BuiltInRole" // RoleDefinitionTypeCustomRole ... RoleDefinitionTypeCustomRole RoleDefinitionType = "CustomRole" ) // PossibleRoleDefinitionTypeValues returns an array of possible values for the RoleDefinitionType const type. func PossibleRoleDefinitionTypeValues() []RoleDefinitionType { return []RoleDefinitionType{RoleDefinitionTypeBuiltInRole, RoleDefinitionTypeCustomRole} } // ServerVersion enumerates the values for server version. type ServerVersion string const ( // ServerVersionFourFullStopZero ... ServerVersionFourFullStopZero ServerVersion = "4.0" // ServerVersionThreeFullStopSix ... ServerVersionThreeFullStopSix ServerVersion = "3.6" // ServerVersionThreeFullStopTwo ... ServerVersionThreeFullStopTwo ServerVersion = "3.2" ) // PossibleServerVersionValues returns an array of possible values for the ServerVersion const type. func PossibleServerVersionValues() []ServerVersion { return []ServerVersion{ServerVersionFourFullStopZero, ServerVersionThreeFullStopSix, ServerVersionThreeFullStopTwo} } // ServiceSize enumerates the values for service size. type ServiceSize string const ( // ServiceSizeCosmosD16s ... ServiceSizeCosmosD16s ServiceSize = "Cosmos.D16s" // ServiceSizeCosmosD4s ... ServiceSizeCosmosD4s ServiceSize = "Cosmos.D4s" // ServiceSizeCosmosD8s ... ServiceSizeCosmosD8s ServiceSize = "Cosmos.D8s" ) // PossibleServiceSizeValues returns an array of possible values for the ServiceSize const type. func PossibleServiceSizeValues() []ServiceSize { return []ServiceSize{ServiceSizeCosmosD16s, ServiceSizeCosmosD4s, ServiceSizeCosmosD8s} } // ServiceStatus enumerates the values for service status. type ServiceStatus string const ( // ServiceStatusCreating ... ServiceStatusCreating ServiceStatus = "Creating" // ServiceStatusDeleting ... ServiceStatusDeleting ServiceStatus = "Deleting" // ServiceStatusError ... ServiceStatusError ServiceStatus = "Error" // ServiceStatusRunning ... ServiceStatusRunning ServiceStatus = "Running" // ServiceStatusStopped ... ServiceStatusStopped ServiceStatus = "Stopped" // ServiceStatusUpdating ... ServiceStatusUpdating ServiceStatus = "Updating" ) // PossibleServiceStatusValues returns an array of possible values for the ServiceStatus const type. func PossibleServiceStatusValues() []ServiceStatus { return []ServiceStatus{ServiceStatusCreating, ServiceStatusDeleting, ServiceStatusError, ServiceStatusRunning, ServiceStatusStopped, ServiceStatusUpdating} } // ServiceType enumerates the values for service type. type ServiceType string const ( // ServiceTypeDataTransfer ... ServiceTypeDataTransfer ServiceType = "DataTransfer" // ServiceTypeSQLDedicatedGateway ... ServiceTypeSQLDedicatedGateway ServiceType = "SqlDedicatedGateway" ) // PossibleServiceTypeValues returns an array of possible values for the ServiceType const type. func PossibleServiceTypeValues() []ServiceType { return []ServiceType{ServiceTypeDataTransfer, ServiceTypeSQLDedicatedGateway} } // ServiceTypeBasicServiceResourceProperties enumerates the values for service type basic service resource // properties. type ServiceTypeBasicServiceResourceProperties string const ( // ServiceTypeBasicServiceResourcePropertiesServiceTypeDataTransferServiceResourceProperties ... ServiceTypeBasicServiceResourcePropertiesServiceTypeDataTransferServiceResourceProperties ServiceTypeBasicServiceResourceProperties = "DataTransferServiceResourceProperties" // ServiceTypeBasicServiceResourcePropertiesServiceTypeServiceResourceProperties ... ServiceTypeBasicServiceResourcePropertiesServiceTypeServiceResourceProperties ServiceTypeBasicServiceResourceProperties = "ServiceResourceProperties" // ServiceTypeBasicServiceResourcePropertiesServiceTypeSQLDedicatedGatewayServiceResourceProperties ... ServiceTypeBasicServiceResourcePropertiesServiceTypeSQLDedicatedGatewayServiceResourceProperties ServiceTypeBasicServiceResourceProperties = "SqlDedicatedGatewayServiceResourceProperties" ) // PossibleServiceTypeBasicServiceResourcePropertiesValues returns an array of possible values for the ServiceTypeBasicServiceResourceProperties const type. func PossibleServiceTypeBasicServiceResourcePropertiesValues() []ServiceTypeBasicServiceResourceProperties { return []ServiceTypeBasicServiceResourceProperties{ServiceTypeBasicServiceResourcePropertiesServiceTypeDataTransferServiceResourceProperties, ServiceTypeBasicServiceResourcePropertiesServiceTypeServiceResourceProperties, ServiceTypeBasicServiceResourcePropertiesServiceTypeSQLDedicatedGatewayServiceResourceProperties} } // SpatialType enumerates the values for spatial type. type SpatialType string const ( // SpatialTypeLineString ... SpatialTypeLineString SpatialType = "LineString" // SpatialTypeMultiPolygon ... SpatialTypeMultiPolygon SpatialType = "MultiPolygon" // SpatialTypePoint ... SpatialTypePoint SpatialType = "Point" // SpatialTypePolygon ... SpatialTypePolygon SpatialType = "Polygon" ) // PossibleSpatialTypeValues returns an array of possible values for the SpatialType const type. func PossibleSpatialTypeValues() []SpatialType { return []SpatialType{SpatialTypeLineString, SpatialTypeMultiPolygon, SpatialTypePoint, SpatialTypePolygon} } // TriggerOperation enumerates the values for trigger operation. type TriggerOperation string const ( // TriggerOperationAll ... TriggerOperationAll TriggerOperation = "All" // TriggerOperationCreate ... TriggerOperationCreate TriggerOperation = "Create" // TriggerOperationDelete ... TriggerOperationDelete TriggerOperation = "Delete" // TriggerOperationReplace ... TriggerOperationReplace TriggerOperation = "Replace" // TriggerOperationUpdate ... TriggerOperationUpdate TriggerOperation = "Update" ) // PossibleTriggerOperationValues returns an array of possible values for the TriggerOperation const type. func PossibleTriggerOperationValues() []TriggerOperation { return []TriggerOperation{TriggerOperationAll, TriggerOperationCreate, TriggerOperationDelete, TriggerOperationReplace, TriggerOperationUpdate} } // TriggerType enumerates the values for trigger type. type TriggerType string const ( // TriggerTypePost ... TriggerTypePost TriggerType = "Post" // TriggerTypePre ... TriggerTypePre TriggerType = "Pre" ) // PossibleTriggerTypeValues returns an array of possible values for the TriggerType const type. func PossibleTriggerTypeValues() []TriggerType { return []TriggerType{TriggerTypePost, TriggerTypePre} } // Type enumerates the values for type. type Type string const ( // TypeTypeBackupPolicy ... TypeTypeBackupPolicy Type = "BackupPolicy" // TypeTypeContinuous ... TypeTypeContinuous Type = "Continuous" // TypeTypePeriodic ... TypeTypePeriodic Type = "Periodic" ) // PossibleTypeValues returns an array of possible values for the Type const type. func PossibleTypeValues() []Type { return []Type{TypeTypeBackupPolicy, TypeTypeContinuous, TypeTypePeriodic} } // UnitType enumerates the values for unit type. type UnitType string const ( // UnitTypeBytes ... UnitTypeBytes UnitType = "Bytes" // UnitTypeBytesPerSecond ... UnitTypeBytesPerSecond UnitType = "BytesPerSecond" // UnitTypeCount ... UnitTypeCount UnitType = "Count" // UnitTypeCountPerSecond ... UnitTypeCountPerSecond UnitType = "CountPerSecond" // UnitTypeMilliseconds ... UnitTypeMilliseconds UnitType = "Milliseconds" // UnitTypePercent ... UnitTypePercent UnitType = "Percent" // UnitTypeSeconds ... UnitTypeSeconds UnitType = "Seconds" ) // PossibleUnitTypeValues returns an array of possible values for the UnitType const type. func PossibleUnitTypeValues() []UnitType { return []UnitType{UnitTypeBytes, UnitTypeBytesPerSecond, UnitTypeCount, UnitTypeCountPerSecond, UnitTypeMilliseconds, UnitTypePercent, UnitTypeSeconds} }
<filename>openshift/ftest-oc-proxy/src/test/java/org/arquillian/cube/openshift/ftest/HelloWorldIT.java package org.arquillian.cube.openshift.ftest; import java.net.URISyntaxException; import java.net.URL; import java.util.List; import org.arquillian.cube.kubernetes.impl.utils.CommandExecutor; import org.arquillian.cube.openshift.impl.requirement.RequiresOpenshift; import org.arquillian.cube.requirement.ArquillianConditionalRunner; import org.junit.AfterClass; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import static org.assertj.core.api.Assertions.assertThat; // tag::client_cli_execution[] @Category(RequiresOpenshift.class) @RequiresOpenshift @RunWith(ArquillianConditionalRunner.class) public class HelloWorldIT { private static CommandExecutor commandExecutor = new CommandExecutor(); @Test public void should_be_able_get_namespace_using_kubectl() { // when final List<String> output = commandExecutor.execCommand("kubectl get ns -o jsonpath='{..name}'"); final String firstLine = output.get(0); final String[] namespaces = firstLine.substring(1, firstLine.length() - 1).split(" "); // then assertThat(namespaces).contains("default"); } @Test public void should_be_able_deploy_resources_using_oc() { // given String commandToExecute = "oc create -f " + getResource("openshift.json"); // when final List<String> resources = commandExecutor.execCommand(commandToExecute); // then assertThat(resources).contains("service \"hello-world\" created", "deployment \"hello-world\" created"); } @AfterClass public static void deleteDeployment() { String commandToExecute = "oc delete -f " + getResource("openshift.json"); final List<String> strings = commandExecutor.execCommand(commandToExecute); assertThat(strings).contains("service \"hello-world\" deleted", "deployment \"hello-world\" deleted"); } private static String getResource(String resourceName) { final URL resource = Thread.currentThread().getContextClassLoader().getResource(resourceName); if (resource == null) { throw new IllegalStateException("Expected " + resourceName + " to be on the classpath"); } try { return resource.toURI().getPath(); } catch (URISyntaxException e) { throw new RuntimeException(e); } } } // end::client_cli_execution[]
function changeButtonAppearance() { const submitBtn = document.getElementById('submitBtn'); submitBtn.addEventListener('click', function() { submitBtn.classList.remove('btn-success'); submitBtn.classList.add('btn-danger'); submitBtn.textContent = 'Submitted'; }); } changeButtonAppearance();
private void checkLocalSlot3(string slot) { try { string filePath = @".modmanager/mods/local/" + slot + "/package.txt"; if (System.IO.File.Exists(filePath)) { string slotPackage = System.IO.File.ReadAllText(filePath); if (slotPackage.Trim() == "0") { button9.Enabled = false; } else { button9.Enabled = true; } } else { // Handle the case when the package file does not exist // For example, display an error message or log the issue // Here, we'll disable the button as a fallback button9.Enabled = false; } } catch (Exception ex) { // Handle any exceptions that may occur during file reading or button manipulation // For example, display an error message or log the exception // Here, we'll disable the button as a fallback button9.Enabled = false; } }
# >>> conda initialize >>> # !! Contents within this block are managed by 'conda init' !! __conda_setup="$('/opt/conda/bin/conda' 'shell.bash' 'hook' 2> /dev/null)" if [ $? -eq 0 ]; then eval "$__conda_setup" else if [ -f "/opt/conda/etc/profile.d/conda.sh" ]; then . "/opt/conda/etc/profile.d/conda.sh" else export PATH="/opt/conda/bin:$PATH" fi fi unset __conda_setup # <<< conda initialize <<<
package org.xtwy.netty.constants; public class CommonConstant { public final static String ATTRIBUTE_KEY="Attribute_key"; }
<reponame>Modelata/fire import 'reflect-metadata'; /** * Sets userCollectionPath attribute of the targetted AuthDAO * * @param path user collection path */ export function UsersCollectionPath(path: string): any { // eslint-disable-next-line @typescript-eslint/ban-types return (target: Object) => { Reflect.defineMetadata('usersCollectionPath', path, target); }; }
<gh_stars>0 package wordcount; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.Arrays; import java.util.LinkedList; import java.util.List; import org.apache.hadoop.io.Writable; public class BroadcastValue implements Writable { /** * The keys which are "encoded" in this packet */ List<String> words; /** * Encoded value */ int value; public BroadcastValue() {} /** * Copy Constructor */ public BroadcastValue(BroadcastValue other) { this.words = new LinkedList<>(other.words); this.value = other.value; } public void set(List<String> words, int value) { this.words = words; this.value = value; } public int get() { return value; } @Override public void write(DataOutput out) throws IOException { int numKeys = words.size(); out.writeInt(numKeys); for (String key : words) { out.writeUTF(key); } out.writeInt(value); } @Override public void readFields(DataInput in) throws IOException { int numKeys = in.readInt(); words = new LinkedList<String>(); for (int index = 0; index < numKeys; index ++) { words.add(in.readUTF()); } value = in.readInt(); } @Override public String toString() { return "Broadcast: " + Arrays.toString(words.toArray()); } }
import sys import random import math class Node: def __init__(self, val=0, next=None): self.val = val self.next = next class slist: """ singly linked list class """ def __init__(self): self._first = None self._last = None def build_slist_from_list(self, a): """ building a singly linked list from a given list :param a: :return: """ for i in a: self._append(i) def prepend(self, item): # case 1: when list is empty if self._first is None and self._last is None: node = Node(val=item) self._first = node self._last = node else: #case 2: when slist has at list one node. node = Node(val=item) node.next = self._first self._first = node def _append(self, item): """ appending list :param item: :return: """ # case 1: when list os empty if self._first is None and self._last is None: node = Node(val=item) self._first = node self._last = node else: # case 2 when list of not empty node = Node(val=item) self._last.next = node self._last = node def find(self, item): """ find item from list. :param item: :return: """ tmp = self._first while tmp is not None: data = tmp.val if (data == item): return True tmp = tmp.next return False def delete(self, item): """ delete item from a list :param item: :return: """ ptr1 = self._first ptr2 = self._first # case 1: only single node and delete the targeted if len(self) == 1 and self.find(item): self._first = None self._last = None # node at first position elif ptr1.val == item and ptr1 == self._first: tmp = self._first tmp = tmp.next self._first = tmp # other than first position else: while ptr1 is not None: data = ptr1.val if data == item: ptr2.next = ptr1.next ptr1.next = ptr2.next ptr2 = ptr1 ptr1 = ptr1.next self._last = ptr1 def delete_last(self): ptr1 = self._first prvs = self._first while ptr1.next is not None: prvs = ptr1 ptr1 = ptr1.next if prvs.next is not None: prvs.next = None self._last = prvs else: self._first = None self._last = None def reverse(self): pass def __len__(self): """ return length of singly linked list :return: """ l_len = 0 tmp = self._first while tmp is not None: l_len += 1 tmp = tmp.next return l_len def __str__(self): """ print singly linked list as str :return: """ l_str = "" tmp = self._first while tmp != None: data = tmp.val l_str += str(data) l_str += "->" tmp = tmp.next l_str += "None" return l_str if __name__ == '__main__': s = slist() a = [x for x in range(0, 10)] s.build_slist_from_list(a) print(s) s.delete_last() print(s)
#!/bin/sh kill -9 `ps -ef | grep /bms | grep -v 'grep' | awk '{print $2}'` exit
#!/bin/bash missing_vars=( ) [ -z "$PUBLIC_URL" ] && missing_vars+=( "PUBLIC_URL" ) [ -z "$GOOGLE_ANALYTICS_ID" ] && missing_vars+=( "GOOGLE_ANALYTICS_ID" ) [ -z "$AMPLITUDE_KEY" ] && missing_vars+=( "AMPLITUDE_KEY" ) if [ ${#missing_vars[@]} -gt 0 ]; then echo "Missing environment variables: ${missing_vars[*]}" exit 1 fi # Create config files using templates and env vars j2 -f env -o /config/nginx/site-confs/meet.conf /config/templates/meet.conf.j2 j2 -f env -o /config/config.js /config/templates/config.js.j2 # Start nginx exec nginx -c /config/nginx/nginx.conf
<reponame>ivarbanas/example $(document).ready(function() { $('#search_box').click(function(e) { var $region = $('#' + $(this).attr('aria-controls')); $region.slideToggle(100, function() { if ($region.attr('aria-expanded') == 'false') { $region.attr('aria-expanded', 'true'); $region.focus(); } else { $region.attr('aria-expanded', 'false'); } }); e.stopPropagation(); return false; }); });
import datetime class Guest: def __init__(self, user, last_used): self.user = user self.last_used = last_used @classmethod def create_guest(cls, user): return cls(user=user, last_used=datetime.datetime.now()) # Example usage class User: def __init__(self, username): self.username = username user_obj = User("JohnDoe") guest_obj = Guest.create_guest(user_obj) print(guest_obj.user.username) # Output: JohnDoe print(guest_obj.last_used) # Output: Current timestamp
#!/bin/sh # CYBERWATCH SAS - 2017 # # Security fix for CESA-2014:0866 # # Security announcement date: 2014-07-09 18:25:19 UTC # Script generation date: 2017-01-01 21:11:06 UTC # # Operating System: CentOS 6 # Architecture: x86_64 # # Vulnerable packages fix on version: # - libsmbclient.i686:3.6.9-169.el6_5 # - libsmbclient-devel.i686:3.6.9-169.el6_5 # - samba-common.i686:3.6.9-169.el6_5 # - samba-winbind-clients.i686:3.6.9-169.el6_5 # - samba-winbind-devel.i686:3.6.9-169.el6_5 # - libsmbclient.x86_64:3.6.9-169.el6_5 # - libsmbclient-devel.x86_64:3.6.9-169.el6_5 # - samba.x86_64:3.6.9-169.el6_5 # - samba-client.x86_64:3.6.9-169.el6_5 # - samba-common.x86_64:3.6.9-169.el6_5 # - samba-doc.x86_64:3.6.9-169.el6_5 # - samba-domainjoin-gui.x86_64:3.6.9-169.el6_5 # - samba-swat.x86_64:3.6.9-169.el6_5 # - samba-winbind.x86_64:3.6.9-169.el6_5 # - samba-winbind-clients.x86_64:3.6.9-169.el6_5 # - samba-winbind-devel.x86_64:3.6.9-169.el6_5 # - samba-winbind-krb5-locator.x86_64:3.6.9-169.el6_5 # # Last versions recommanded by security team: # - libsmbclient.i686:3.6.23-30.el6_7 # - libsmbclient-devel.i686:3.6.23-30.el6_7 # - samba-common.i686:3.6.23-30.el6_7 # - samba-winbind-clients.i686:3.6.23-30.el6_7 # - samba-winbind-devel.i686:3.6.23-30.el6_7 # - libsmbclient.x86_64:3.6.23-30.el6_7 # - libsmbclient-devel.x86_64:3.6.23-30.el6_7 # - samba.x86_64:3.6.23-30.el6_7 # - samba-client.x86_64:3.6.23-30.el6_7 # - samba-common.x86_64:3.6.23-30.el6_7 # - samba-doc.x86_64:3.6.23-30.el6_7 # - samba-domainjoin-gui.x86_64:3.6.23-30.el6_7 # - samba-swat.x86_64:3.6.23-30.el6_7 # - samba-winbind.x86_64:3.6.23-30.el6_7 # - samba-winbind-clients.x86_64:3.6.23-30.el6_7 # - samba-winbind-devel.x86_64:3.6.23-30.el6_7 # - samba-winbind-krb5-locator.x86_64:3.6.23-30.el6_7 # # CVE List: # - CVE-2014-0244 # - CVE-2014-3493 # # More details: # - https://www.cyberwatch.fr/vulnerabilites # # Licence: Released under The MIT License (MIT), See LICENSE FILE sudo yum install libsmbclient.i686-3.6.23 -y sudo yum install libsmbclient-devel.i686-3.6.23 -y sudo yum install samba-common.i686-3.6.23 -y sudo yum install samba-winbind-clients.i686-3.6.23 -y sudo yum install samba-winbind-devel.i686-3.6.23 -y sudo yum install libsmbclient.x86_64-3.6.23 -y sudo yum install libsmbclient-devel.x86_64-3.6.23 -y sudo yum install samba.x86_64-3.6.23 -y sudo yum install samba-client.x86_64-3.6.23 -y sudo yum install samba-common.x86_64-3.6.23 -y sudo yum install samba-doc.x86_64-3.6.23 -y sudo yum install samba-domainjoin-gui.x86_64-3.6.23 -y sudo yum install samba-swat.x86_64-3.6.23 -y sudo yum install samba-winbind.x86_64-3.6.23 -y sudo yum install samba-winbind-clients.x86_64-3.6.23 -y sudo yum install samba-winbind-devel.x86_64-3.6.23 -y sudo yum install samba-winbind-krb5-locator.x86_64-3.6.23 -y
#!/usr/bin/env bash # -----------private-------------------------- #sudo add-apt-repository -y ppa:danielrichter2007/grub-customizer #sudo add-apt-repository -y ppa:jtaylor/keepass #sudo add-apt-repository -y ppa:openshot.developers/ppa # Set time to use local-time so Ubuntu and windows can co-exists # https://www.howtogeek.com/323390/how-to-fix-windows-and-linux-showing-different-times-when-dual-booting/ timedatectl set-local-rtc 1 --adjust-system-clock sudo apt-get update sudo apt-get install -y mono-complete \ keepass2 \ grub-customizer \ xournal \ geany \ git \ git-flow \ meld \ bash-completion \ bash-builtins \ mate-utils \ curl \ jq \ terminator \ putty \ gtkterm \ openshot-qt \ pinta \ gnome-tweaks \ htop \ # https://forum.kee.pm/t/installing-kee-with-keepassrpc-for-keepass-password-safe-instructions/23 curl -s https://api.github.com/repos/kee-org/keepassrpc/releases/latest | jq -r ".assets[] | select(.name | test(\"KeePassRPC.plgx\")) | .browser_download_url" | xargs sudo curl -s -L -o "/usr/lib/keepass2/Plugins/KeePassRPC.plgx" #wget https://github.com/kee-org/keepassrpc/releases/download/v1.9.0/KeePassRPC.plgx #sudo mv KeePassRPC.plgx /usr/lib/keepass2/Plugins/ #wget https://downloads.slack-edge.com/linux_releases/slack-desktop-4.0.2-amd64.deb #sudo apt-get install -y ./slack-desktop-*.deb #rm slack-desktop-*.deb # run this guide #https://docs.google.com/document/d/1eESvpygRr0zT-MD2NPkzh30kqdcwkLM8BtZNxdL4gXc/edit# #run bootstrap #copy ssh-keys #run bor cli # bash-tools claes #git clone git@github.com:claesjaeger/.bash_tools.git
<filename>src/main/java/frc/robot/autonomous/routines/test/Fender.java // Copyright (c) FIRST and other WPILib contributors. // Open Source Software; you can modify and/or share it under the terms of // the WPILib BSD license file in the root directory of this project. package frc.robot.autonomous.routines.test; import java.util.List; import edu.wpi.first.math.geometry.Pose2d; import edu.wpi.first.math.geometry.Rotation2d; import edu.wpi.first.math.trajectory.Trajectory; import edu.wpi.first.wpilibj2.command.InstantCommand; import edu.wpi.first.wpilibj2.command.SequentialCommandGroup; import frc.robot.autonomous.Trajectories; import frc.robot.autonomous.common.IntakePath; import frc.robot.subsystems.DrivetrainSubsystem; import frc.robot.subsystems.intake.IntakePistonsSubsystem; import frc.robot.subsystems.intake.IntakeSubsystem; public class Fender extends SequentialCommandGroup{ /** Creates a new BlueFive. */ public Fender( DrivetrainSubsystem drivetrain, IntakeSubsystem intake, IntakePistonsSubsystem pistons ) { addCommands( new InstantCommand(() -> drivetrain.resetOdometry(PATH_TO_BALL_2.getInitialPose()), drivetrain), //Reset Position new IntakePath(PATH_TO_BALL_2, drivetrain, intake, pistons), drivetrain.new TrajectoryFollowerCommand(PATH_TO_FENDER) ); } private Trajectory PATH_TO_BALL_2 = Trajectories.generateTrajectory(3,2,List.of( new Pose2d(7.821, 1.922, Rotation2d.fromDegrees(-89.018)), new Pose2d(7.784, 0.7, Rotation2d.fromDegrees(-89.018)) ), false, "Blue Five TWO PATH_TO_BALL_2" ); private Trajectory PATH_TO_FENDER = Trajectories.generateTrajectory(4,4,List.of( new Pose2d(7.784, 0.9, Rotation2d.fromDegrees(-89.018)), new Pose2d(8.2, 3, Rotation2d.fromDegrees(230)) ), true, "Blue Five TWO PATH_TO_BALL_2" ); }
class Usuario { constructor(nombre, edad, admin) { this.nombre = nombre; this.edad = edad; this.admin = admin; } } function authenticateUser(rut, pass, api_key) { if (api_key === "valid_api_key") { // Simulating user data retrieval from a database const usuario = new Usuario("John Doe", 25, false); // Example user data if (rut === 123456 && pass === "password123") { if (usuario.admin) { rol = "admin"; } else if (usuario.edad < 18) { rol = "minor"; } else { rol = "user"; } } else { rol = "invalid credentials"; } } else { rol = "unauthorized"; } } // Example usage let rut = 123456; let pass = "password123"; let api_key = "valid_api_key"; let rol; authenticateUser(rut, pass, api_key); console.log(rol); // Output: "user" (based on the example user data)
#!/bin/bash startDate=`date -d "2018-06-01 10:00"` weeks=12 judgeId=584a3ac1-690e-41b5-913f-b51cca824a45 roomId=74cc0d1e-0e1e-4f6e-9ae6-7fd9d223fb81 echo "1 DAY" curl -o /dev/null -s -w '%{time_total} :%{http_code} %{url_effective}\n' "http://localhost:8091/search?from=2018-06-25%2010:00&to=2018-06-25%2012:00&duration=600" curl -o /dev/null -s -w '%{time_total} :%{http_code} %{url_effective}\n' "http://localhost:8091/search?from=2018-06-25%2010:00&to=2018-06-25%2012:00&duration=600&judgeId=$judgeId" curl -o /dev/null -s -w '%{time_total} :%{http_code} %{url_effective}\n' "http://localhost:8091/search?from=2018-06-25%2010:00&to=2018-06-25%2012:00&duration=600&judgeId=$judgeId&roomId=$roomId" echo "start $(date -d "$startDate" +%Y-%m-%d)" echo "weeks $weeks" for ((i_week=1; i_week<=$weeks; i_week++)); do echo "$i_week WEEK" endDate=`date -d "$startDate +$i_week weeks"` curl -o /dev/null -s -w '%{time_total} :%{http_code} %{url_effective}\n' "http://localhost:8091/search?from=$(date -d "$startDate" +%Y-%m-%d)%2010:00&to=$(date -d "$endDate" +%Y-%m-%d)%2012:00&duration=600" curl -o /dev/null -s -w '%{time_total} :%{http_code} %{url_effective}\n' "http://localhost:8091/search?from=$(date -d "$startDate" +%Y-%m-%d)%2010:00&to=$(date -d "$endDate" +%Y-%m-%d)%2012:00&duration=600&judgeId=$judgeId" curl -o /dev/null -s -w '%{time_total} :%{http_code} %{url_effective}\n' "http://localhost:8091/search?from=$(date -d "$startDate" +%Y-%m-%d)%2010:00&to=$(date -d "$endDate" +%Y-%m-%d)%2012:00&duration=600&judgeId=$judgeId&roomId=$roomId" echo $(date -d "$endDate" +%Y-%m-%d) done
<reponame>pskopek/elytron-subsystem /* * JBoss, Home of Professional Open Source. * Copyright 2017 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wildfly.extension.elytron; import static javax.xml.stream.XMLStreamConstants.END_ELEMENT; import static org.jboss.as.controller.PersistentResourceXMLDescription.builder; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.ADD; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.OP; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.OP_ADDR; import static org.jboss.as.controller.parsing.ParseUtils.isNoNamespaceAttribute; import static org.jboss.as.controller.parsing.ParseUtils.missingRequired; import static org.jboss.as.controller.parsing.ParseUtils.requireNoAttributes; import static org.jboss.as.controller.parsing.ParseUtils.requireNoContent; import static org.jboss.as.controller.parsing.ParseUtils.requireSingleAttribute; import static org.jboss.as.controller.parsing.ParseUtils.unexpectedAttribute; import static org.jboss.as.controller.parsing.ParseUtils.unexpectedElement; import static org.wildfly.extension.elytron.ElytronDescriptionConstants.AGGREGATE_SECURITY_EVENT_LISTENER; import static org.wildfly.extension.elytron.ElytronDescriptionConstants.AUDIT_LOGGING; import static org.wildfly.extension.elytron.ElytronDescriptionConstants.FILE_AUDIT_LOG; import static org.wildfly.extension.elytron.ElytronDescriptionConstants.NAME; import static org.wildfly.extension.elytron.ElytronDescriptionConstants.SECURITY_EVENT_LISTENER; import static org.wildfly.extension.elytron.ElytronDescriptionConstants.SECURITY_EVENT_LISTENERS; import static org.wildfly.extension.elytron.ElytronDescriptionConstants.SYSLOG_AUDIT_LOG; import static org.wildfly.extension.elytron.ElytronSubsystemParser.verifyNamespace; import java.util.List; import javax.xml.stream.XMLStreamException; import org.jboss.as.controller.PathAddress; import org.jboss.as.controller.PathElement; import org.jboss.as.controller.PersistentResourceXMLDescription; import org.jboss.dmr.ModelNode; import org.jboss.staxmapper.XMLExtendedStreamReader; import org.jboss.staxmapper.XMLExtendedStreamWriter; /** * XML Handling for the audit logging resources. * * @author <a href="mailto:<EMAIL>"><NAME></a> */ class AuditLoggingParser { private final PersistentResourceXMLDescription fileAuditLogParser = builder(PathElement.pathElement(FILE_AUDIT_LOG), null) .setUseElementsForGroups(false) .addAttributes(AuditResourceDefinitions.PATH, FileAttributeDefinitions.RELATIVE_TO, AuditResourceDefinitions.SYNCHRONIZED, AuditResourceDefinitions.FORMAT) .build(); private final PersistentResourceXMLDescription syslogAuditLogParser = builder(PathElement.pathElement(SYSLOG_AUDIT_LOG), null) .setUseElementsForGroups(false) .addAttributes(AuditResourceDefinitions.SERVER_ADDRESS, AuditResourceDefinitions.PORT, AuditResourceDefinitions.TRANSPORT, AuditResourceDefinitions.FORMAT, AuditResourceDefinitions.HOST_NAME) .build(); void readAuditLogging(ModelNode parentAddressNode, XMLExtendedStreamReader reader, List<ModelNode> operations) throws XMLStreamException { requireNoAttributes(reader); while (reader.hasNext() && reader.nextTag() != END_ELEMENT) { verifyNamespace(reader); String localName = reader.getLocalName(); PathAddress parentAddress = PathAddress.pathAddress(parentAddressNode); switch (localName) { case AGGREGATE_SECURITY_EVENT_LISTENER: readAggregateSecurityEventListener(parentAddress.toModelNode(), reader, operations); break; case FILE_AUDIT_LOG: fileAuditLogParser.parse(reader, parentAddress, operations); break; case SYSLOG_AUDIT_LOG: syslogAuditLogParser.parse(reader, parentAddress, operations); break; default: throw unexpectedElement(reader); } } } private void readAggregateSecurityEventListener(ModelNode parentAddress, XMLExtendedStreamReader reader, List<ModelNode> operations) throws XMLStreamException { ModelNode addEventListener = new ModelNode(); addEventListener.get(OP).set(ADD); String name = null; final int count = reader.getAttributeCount(); for (int i = 0; i < count; i++) { final String value = reader.getAttributeValue(i); if (!isNoNamespaceAttribute(reader, i)) { throw unexpectedAttribute(reader, i); } else { String attribute = reader.getAttributeLocalName(i); switch (attribute) { case NAME: name = value; break; default: throw unexpectedAttribute(reader, i); } } } if (name == null) { throw missingRequired(reader, NAME); } addEventListener.get(OP_ADDR).set(parentAddress).add(AGGREGATE_SECURITY_EVENT_LISTENER, name); operations.add(addEventListener); while (reader.hasNext() && reader.nextTag() != END_ELEMENT) { verifyNamespace(reader); String localName = reader.getLocalName(); if (SECURITY_EVENT_LISTENER.equals(localName) == false) { throw unexpectedElement(reader); } requireSingleAttribute(reader, NAME); String listenerName = reader.getAttributeValue(0); AuditResourceDefinitions.REFERENCES.parseAndAddParameterElement(listenerName, addEventListener, reader); requireNoContent(reader); } } private void writeAggregateSecurityEventListener(ModelNode subsystem, XMLExtendedStreamWriter writer) throws XMLStreamException { if (subsystem.hasDefined(AGGREGATE_SECURITY_EVENT_LISTENER)) { ModelNode aggregateSecurityEventListener = subsystem.require(AGGREGATE_SECURITY_EVENT_LISTENER); for (String name : aggregateSecurityEventListener.keys()) { ModelNode aggregateListener = aggregateSecurityEventListener.require(name); writer.writeStartElement(AGGREGATE_SECURITY_EVENT_LISTENER); writer.writeAttribute(NAME, name); List<ModelNode> listenerReferences = aggregateListener.get(SECURITY_EVENT_LISTENERS).asList(); for (ModelNode currentReference : listenerReferences) { writer.writeStartElement(SECURITY_EVENT_LISTENER); writer.writeAttribute(NAME, currentReference.asString()); writer.writeEndElement(); } writer.writeEndElement(); } } } void writeAuditLogging(ModelNode subsystem, XMLExtendedStreamWriter writer) throws XMLStreamException { if (shouldWrite(subsystem) == false) { return; } writer.writeStartElement(AUDIT_LOGGING); writeAggregateSecurityEventListener(subsystem, writer); fileAuditLogParser.persist(writer, subsystem); syslogAuditLogParser.persist(writer, subsystem); writer.writeEndElement(); } private boolean shouldWrite(ModelNode subsystem) { return subsystem.hasDefined(AGGREGATE_SECURITY_EVENT_LISTENER) || subsystem.hasDefined(FILE_AUDIT_LOG) || subsystem.hasDefined(SYSLOG_AUDIT_LOG); } }
// Copyright 2016 <NAME> // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package response import "github.com/ajityagaty/go-kairosdb/builder" type GroupResult struct { Name string `json:"name,omitempty"` } type Results struct { Name string `json:"name,omitempty"` DataPoints []builder.DataPoint `json:"values,omitempty"` Tags map[string][]string `json:"tags,omitempty"` Group []GroupResult `json:"group_by,omitempty"` } type Queries struct { SampleSize int64 `json:"sample_size,omitempty"` ResultsArr []Results `json:"results,omitempty"` } type QueryResponse struct { *Response QueriesArr []Queries `json:"queries",omitempty` } func NewQueryResponse(code int) *QueryResponse { qr := &QueryResponse{ Response: &Response{}, } qr.SetStatusCode(code) return qr }
<reponame>lutzer/mazi-board<filename>src/node/database/database.js 'use strict'; /* * @Author: <NAME>, Design Research Lab, Universität der Künste Berlin * @Date: 2016-05-04 12:55:03 * @Last Modified by: lutzer * @Last Modified time: 2016-05-04 17:41:19 */ var mongoose = require('mongoose'); var Utils = r_require('/utils/utils'); module.exports = { db : false, connect : function(callback) { if (mongoose.connection.readyState == 0) { //not yet connected this.db = mongoose.connect(Config.database, function(err) { if (callback) callback(err); }); } else { if (callback) callback(); } }, disconnect: function() { if (this.db) this.db.disconnect(); } } // Connect to database
package main import "fmt" func diff(original, new string) { o, n := loadSQL(original, new) do := parseSQL(o, "original") dn := parseSQL(n, "new") // Start with the original, look for changed fields and keys for h, t := range do.Tables { // Go through the fields for i, f := range t.Fields { // Find in the new db foundTable := false for j, tn := range dn.Tables { if tn.Name == t.Name { // Find the field // fmt.Println("FOund ", t.Name) foundTable = true for k, fn := range tn.Fields { if fn.Name == f.Name { do.Tables[h].Done = true do.Tables[h].Fields[i].Done = true dn.Tables[j].Done = true dn.Tables[j].Fields[k].Done = true if fn.Details != f.Details { c := change{ OriginalField: f, NewField: fn, } do.Tables[h].Changes = append(do.Tables[h].Changes, c) } } } } } if !foundTable { do.Tables[h].Drop = true } } // We have gone through the fields once.. now we go through again // looking for fields that were not in the new db if !do.Tables[h].Drop { for i, f := range t.Fields { if !f.Done { c := change{ OriginalField: f, } do.Tables[h].Changes = append(do.Tables[h].Changes, c) do.Tables[h].Fields[i].Done = true } } } // Now check the new db for fields that have not been taken care of for j, tn := range dn.Tables { if tn.Name == t.Name { // Find the field for k, fn := range tn.Fields { if !fn.Done { c := change{ NewField: fn, } do.Tables[h].Changes = append(do.Tables[h].Changes, c) dn.Tables[j].Fields[k].Done = true } } } } } // Show changes if true { for _, t := range do.Tables { if t.Drop { fmt.Println("\n-- :::::::::: Changes for ", t.Name) fmt.Println("-- .....................................................") fmt.Println("-- Change : DROP ", t.Name) fmt.Printf("DROP TABLE %s;\n", t.Name) } else { if len(t.Changes) > 0 { fmt.Println("\n-- :::::::::: Changes for ", t.Name) for _, c := range t.Changes { fmt.Println("-- .....................................................") if len(c.OriginalField.Name) > 0 { fmt.Println("-- Column : ", c.OriginalField.Name) } else { fmt.Println("-- Column : ", c.NewField.Name) } fmt.Println("-- Original : ", c.OriginalField.Details) fmt.Println("-- New : ", c.NewField.Details) fmt.Println(c.SQL(t.Name)) } } } } } }
#!/bin/bash # Init source "$(dirname $(readlink -f $0))/lib_update.bash" clone "vim" --github "vim/vim" --ver "master" $@ # Build ./configure --with-features=huge --enable-multibyte --enable-rubyinterp \ --enable-largefile --enable-pythoninterp --with-x \ --with-python-config-dir="${LOCAL_PREFIX}/lib/python2.7/config" \ --enable-python3interp --with-python3-config-dir="${LOCAL_PREFIX}/lib/python3.5/config" \ --enable-perlinterp --enable-luainterp --with-luajit --enable-fail-if-missing \ --enable-cscope --prefix="${LOCAL_PREFIX}" make VIMRUNTIMEDIR="${LOCAL_PREFIX}/share/vim/vim74" make install
#!/bin/sh # function3.sh . ./common.lib echo $STD_MSG rename .html .html-bak
#!/bin/bash # This script generates release zips into _output/releases. It requires the openshift/origin-release # image to be built prior to executing this command via hack/build-base-images.sh. # NOTE: only committed code is built. set -o errexit set -o nounset set -o pipefail OS_ROOT=$(dirname "${BASH_SOURCE}")/.. source "${OS_ROOT}/hack/common.sh" # Go to the top of the tree. cd "${OS_ROOT}" context="${OS_ROOT}/_output/buildenv-context" # Clean existing output. rm -rf "${OS_LOCAL_RELEASEPATH}" rm -rf "${OS_LOCAL_BINPATH}" rm -rf "${context}" mkdir -p "${context}" mkdir -p "${OS_OUTPUT}" # Generate version definitions. os::build::get_version_vars os::build::save_version_vars "${context}/os-version-defs" echo "++ Building release ${OS_GIT_VERSION}" # Create the input archive. git archive --format=tar -o "${context}/archive.tar" HEAD tar -rf "${context}/archive.tar" -C "${context}" os-version-defs gzip -f "${context}/archive.tar" # Perform the build and release in Docker. cat "${context}/archive.tar.gz" | docker run -i --cidfile="${context}/cid" openshift/origin-release docker cp $(cat ${context}/cid):/go/src/github.com/openshift/origin/_output/local/releases "${OS_OUTPUT}" echo "${OS_GIT_COMMIT}" > "${OS_LOCAL_RELEASEPATH}/.commit" # Copy the linux release archives release back to the local _output/local/go/bin directory. os::build::detect_local_release_tars "linux" mkdir -p "${OS_LOCAL_BINPATH}" tar mxzf "${OS_PRIMARY_RELEASE_TAR}" -C "${OS_LOCAL_BINPATH}" tar mxzf "${OS_IMAGE_RELEASE_TAR}" -C "${OS_LOCAL_BINPATH}" os::build::make_openshift_binary_symlinks
def filter_list(list): even_list = [] odd_list = [] for number in list: if number % 2 == 0: even_list.append(number) else: odd_list.append(number) return even_list,odd_list even,odd = filter_list([12,4,5,8,10,22]) print("Even numbers list :",even) print("Odd numbers list :",odd)
<reponame>Rachel4858/Algorithm function findHappyNumber(n, set) { // 19 let numArray = n.toString().split(''); //['1', '9'] let sum = 0; for (let i = 0; i < numArray.length; ++i) { sum += numArray[i] * numArray[i]; } if (sum === 1) return true; else { if (set.has(sum)) { return false; } set.add(sum); return findHappyNumber(sum, set); } } function solution(n) { return findHappyNumber(n, new Set()); } console.log(solution(50));
<filename>src/com/idylwood/utils/OptimizationUtils.java /* * ==================================================== * Copyright (C) 2013 by Idylwood Technologies, LLC. All rights reserved. * * Developed at Idylwood Technologies, LLC. * Permission to use, copy, modify, and distribute this * software is freely granted, provided that this notice * is preserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * The License should have been distributed to you with the source tree. * If not, it can be found at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Authors: <NAME>, <NAME> * Date: 2013 * ==================================================== */ package com.idylwood.utils; import org.apache.commons.math3.linear.MatrixUtils; import org.apache.commons.math3.linear.RealMatrix; import org.apache.commons.math3.linear.RealVector; import org.apache.commons.math3.linear.SingularValueDecomposition; // Holder class for new stuff which hasn't had the dependencies gotten rid of yet. public final class OptimizationUtils { private OptimizationUtils() {} /** * Solves for Markowitz optimal portfolio by means of lagrange multiplier. * Returns null if it does not exist (the matrix is singular) * Otherwise it attempts to find the lowest variance portfolio for * the given <code>portfolio_return</code> * @param covariance Precalculated covariance matrix * @param returns Precalculated vector of returns * @param portfolio_return Return to optimize risk for * @author <NAME> * @return */ static final double[] MarkowitzSolve(final double[][] covariance, final double[] returns, final double portfolio_return) { if (covariance.length!=covariance[0].length) throw new IllegalArgumentException("Covariance needs to be square matrix"); if (returns.length!=covariance.length) throw new IllegalArgumentException("Returns must be same length as covariance"); /* for (int i = 0; i < covariance.length; i++) MathUtils.printArray(covariance[i]); System.out.println(); MathUtils.printArray(returns); System.out.println(); */ final int timePoints = covariance.length; final double[][] lagrangeMatrix = new double[timePoints+2][timePoints+2]; //b as in Ax = b final double[] b = new double[timePoints+2]; for(int i = 0; i < timePoints; i++) { for(int j = 0; j < timePoints; j++) { lagrangeMatrix[i][j] = 2*covariance[i][j]; b[i] = 0; // this is like riskTolerance*returns[i]; but since // returns[i]*weights[i] = portfolio_return it will go away in the derivative } } for(int j = 0; j<timePoints; j++) { lagrangeMatrix[timePoints][j] = returns[j]; lagrangeMatrix[timePoints+1][j] = 1; lagrangeMatrix[j][timePoints] = returns[j]; lagrangeMatrix[j][timePoints+1] = 1; } b[timePoints] = portfolio_return; //**** what is the constraint on total expected return? b[timePoints + 1] = 1; /* // Print out lagrangeMatrix augmented with b vector for(int i=0; i<timePoints+2; i++) { for(int j=0; j<timePoints+2;j++) { System.out.print(lagrangeMatrix[i][j] + " "); } System.out.println(b[i]); } */ // TODO use Gaussian elimination solver, may be faster // TODO maybe refactor to use idylblas RealMatrix lagrangeReal = MatrixUtils.createRealMatrix(lagrangeMatrix); RealVector bReal = MatrixUtils.createRealVector(b); SingularValueDecomposition svd = new SingularValueDecomposition(lagrangeReal); if (!svd.getSolver().isNonSingular()) return null; RealVector solution = svd.getSolver().solve(bReal); final double weights[] = new double[timePoints]; // last two elements of solution are just lagrange multipliers for (int i = 0; i < weights.length; i++) weights[i] = solution.getEntry(i); // put these in some test class if (!MathUtils.fuzzyEquals(1,MathUtils.sum(weights))) throw new RuntimeException(); if (!MathUtils.fuzzyEquals(portfolio_return,MathUtils.linearCombination(returns, weights))) throw new RuntimeException(); //The following calculates the risk(variance) for the weights found // final double risk = MathUtils.linearCombination(MathUtils.matrixMultiply(covariance, weights),weights); return weights; } /* private static final class MarkowitzFunction implements org.apache.commons.math3.analysis.MultivariateFunction { final double[][] S; final double[]R; final double q; MarkowitzFunction(final double[][] S, final double[]R, final double q) { this.S = S; this.R = R; this.q = q; } // this is gonna be so effing slow! haha // see http://en.wikipedia.org/wiki/Modern_portfolio_theory @Override public final double value(double[] w) // w weights, S cov matrix, R expected returns, q risk tolerance { final double foo = MathUtils.linearCombination(w,MathUtils.matrixMultiply(S,w)); final double bar = MathUtils.linearCombination(R,w) * q; return foo - bar; } } // TODO make this work static final double[] cvxSolve(final double[][] S, final double []R, final double q) { ObjectiveFunction f = new ObjectiveFunction(new MarkowitzFunction(S,R,q)); final double[] startPoint = new double[S.length]; java.util.Arrays.fill(startPoint,1.0/S.length); final double[] ret = new NonLinearConjugateGradientOptimizer( NonLinearConjugateGradientOptimizer.Formula.FLETCHER_REEVES, new SimpleValueChecker(.1,.1) ) .optimize(f, GoalType.MINIMIZE, new InitialGuess(startPoint) ) .getPoint(); return ret; } // returns double[] x such that T(c)*x is maximized, A*x <= b, and x>=0 // TODO debug! static final double[] lpSolve(final double[][] A, final double[] b, final double[] c) { // TODO implement Feynman's simplex solver! // all apache stuff LinearObjectiveFunction f = new LinearObjectiveFunction(c, 0); List<LinearConstraint> constraints = new ArrayList<LinearConstraint>(A.length); for (int i = 0; i < A.length; ++i) constraints.add(new LinearConstraint(A[i],Relationship.GEQ,b[i])); final double ret[] = new SimplexSolver() .optimize(f, new LinearConstraintSet(constraints), GoalType.MINIMIZE, new NonNegativeConstraint(false), new MaxIter(100) ) .getPoint(); return ret; } */ }
(function(Models, Types) { "use strict"; // Constructor var FindClose2Request = function() { this.types_ = new Types(); this.searchId_ = null; }; // Public functions FindClose2Request.prototype.setSearchId = function(searchId) { this.searchId_ = searchId; }; FindClose2Request.prototype.createSmbParametersArrayBuffer = function() { var buffer = new ArrayBuffer(2); var array = new Uint8Array(buffer); // Search ID this.types_.setFixed2BytesValue(this.searchId_, array, 0); return buffer; }; FindClose2Request.prototype.createSmbDataArrayBuffer = function() { return new ArrayBuffer(0); }; // Export Models.FindClose2Request = FindClose2Request; })(SmbClient.Smb1.Models, SmbClient.Types);
<filename>src/sentry/static/sentry/app/components/alert.tsx import {css} from '@emotion/core'; import PropTypes from 'prop-types'; import React from 'react'; import classNames from 'classnames'; import styled from '@emotion/styled'; import InlineSvg from 'app/components/inlineSvg'; import space from 'app/styles/space'; // exporting it down with alertStyles caused error 'Props' is not defined no-undef export type Props = { type?: 'muted' | 'info' | 'warning' | 'success' | 'error' | 'beta'; iconSize?: string; icon?: string | React.ReactNode; system?: boolean; }; type AlertProps = Omit<React.HTMLProps<HTMLDivElement>, keyof Props> & Props; type AlertThemeProps = { backgroundLight: string; border: string; iconColor: string; }; const DEFAULT_TYPE = 'info'; const getAlertColorStyles = ({ backgroundLight, border, iconColor, }: AlertThemeProps) => css` background: ${backgroundLight}; border: 1px solid ${border}; svg { color: ${iconColor}; } `; const getSystemAlertColorStyles = ({ backgroundLight, border, iconColor, }: AlertThemeProps) => css` background: ${backgroundLight}; border: 0; border-radius: 0; border-bottom: 1px solid ${border}; svg { color: ${iconColor}; } `; const alertStyles = ({theme, type = DEFAULT_TYPE, system}: Props & {theme: any}) => css` display: flex; margin: 0 0 ${space(3)}; padding: ${space(1.5)} ${space(2)}; font-size: 15px; box-shadow: ${theme.dropShadowLight}; border-radius: ${theme.borderRadius}; background: ${theme.gray100}; border: 1px solid ${theme.borderDark}; a:not([role='button']) { color: ${theme.textColor}; border-bottom: 1px dotted ${theme.textColor}; } ${getAlertColorStyles(theme.alert[type])}; ${system && getSystemAlertColorStyles(theme.alert[type])}; `; const IconWrapper = styled('span')` display: flex; margin-right: ${space(1)}; /* Give the wrapper an explicit height so icons are line height with the * (common) line height. */ height: 22px; align-items: center; `; const StyledTextBlock = styled('span')` line-height: 1.5; flex-grow: 1; position: relative; margin: auto; `; const Alert = styled( ({ type, icon, iconSize, children, className, system: _system, // don't forward to `div` ...props }: AlertProps) => ( <div className={classNames(type ? `ref-${type}` : '', className)} {...props}> {icon && ( <IconWrapper> {typeof icon === 'string' ? <InlineSvg src={icon} size={iconSize!} /> : icon} </IconWrapper> )} <StyledTextBlock>{children}</StyledTextBlock> </div> ) )<AlertProps>` ${alertStyles} `; Alert.propTypes = { type: PropTypes.oneOf(['muted', 'info', 'warning', 'success', 'error', 'beta']), iconSize: PropTypes.string, icon: PropTypes.oneOfType([PropTypes.string, PropTypes.node]), system: PropTypes.bool, }; Alert.defaultProps = { type: DEFAULT_TYPE, iconSize: '20px', }; export {alertStyles}; export default Alert;
pkill -f movies
def getans(n): S=set() for i in range(n): k=raw_input() if k in S: print "YES" else: print "NO" S.add(k) getans(int(raw_input()))
class Person: def __init__(self, name, age): self.name = name self.age = age def getInformation(self): return "Name: "+ self.name + ", Age: " + str(self.age) person = Person("John", 19) print(person.getInformation())
<reponame>FunMusicalIdeas/zdaubyaos // tslint:disable no-reaching-imports export { applyIntensityScalar } from './applyIntensityScalar' export { applyPitchIndexShift } from './applyPitchIndexShift' export { computeNote } from '../features'
/******************************************************************************* * Copyright 2019 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. * * Contributors: * <NAME> - Initial contribution and API ******************************************************************************/ package com.sa.security; import java.io.IOException; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.springframework.security.core.AuthenticationException; import org.springframework.security.web.AuthenticationEntryPoint; import org.springframework.stereotype.Component; /** * The <code>RestAuthenticationEntryPoint</code> is for send response when secure resource is accessed with out * authentication. * * @author <NAME> * @version 1.0 * @since 1.0 */ @Component public class RestAuthenticationEntryPoint implements AuthenticationEntryPoint { @Override public void commence(HttpServletRequest req, HttpServletResponse resp, AuthenticationException ex) throws IOException, ServletException { resp.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Unauthorized"); } }
<reponame>sthagen/aquasecurity-trivy<filename>pkg/scanner/local/mock_applier.go // Code generated by mockery v1.0.0. DO NOT EDIT. package local import ( types "github.com/aquasecurity/trivy/pkg/fanal/types" mock "github.com/stretchr/testify/mock" ) // MockApplier is an autogenerated mock type for the Applier type type MockApplier struct { mock.Mock } type ApplierApplyLayersArgs struct { ArtifactID string ArtifactIDAnything bool BlobIDs []string BlobIDsAnything bool } type ApplierApplyLayersReturns struct { Detail types.ArtifactDetail Err error } type ApplierApplyLayersExpectation struct { Args ApplierApplyLayersArgs Returns ApplierApplyLayersReturns } func (_m *MockApplier) ApplyApplyLayersExpectation(e ApplierApplyLayersExpectation) { var args []interface{} if e.Args.ArtifactIDAnything { args = append(args, mock.Anything) } else { args = append(args, e.Args.ArtifactID) } if e.Args.BlobIDsAnything { args = append(args, mock.Anything) } else { args = append(args, e.Args.BlobIDs) } _m.On("ApplyLayers", args...).Return(e.Returns.Detail, e.Returns.Err) } func (_m *MockApplier) ApplyApplyLayersExpectations(expectations []ApplierApplyLayersExpectation) { for _, e := range expectations { _m.ApplyApplyLayersExpectation(e) } } // ApplyLayers provides a mock function with given fields: artifactID, blobIDs func (_m *MockApplier) ApplyLayers(artifactID string, blobIDs []string) (types.ArtifactDetail, error) { ret := _m.Called(artifactID, blobIDs) var r0 types.ArtifactDetail if rf, ok := ret.Get(0).(func(string, []string) types.ArtifactDetail); ok { r0 = rf(artifactID, blobIDs) } else { r0 = ret.Get(0).(types.ArtifactDetail) } var r1 error if rf, ok := ret.Get(1).(func(string, []string) error); ok { r1 = rf(artifactID, blobIDs) } else { r1 = ret.Error(1) } return r0, r1 }
<filename>src/screens/Home/components/Section/index.js import React, { PureComponent } from 'react'; import PropTypes from 'prop-types'; import { View } from 'react-native'; import { TextInput, Text } from 'react-native-paper'; import Picker from './components/Picker'; import styles from './styles'; class Section extends PureComponent { validateInput = (newText) => { const { handleValueChange } = this.props; /* * Verificamos se o valor digitado não é numéricamente negativo, ou seja, * se o texto inclui "-". A prop handleValueChange é executada com parâmetro * '' em caso positivo, e em caso negativo, é executada com parâmetro newText. */ if (Number.isNaN(newText)) return handleValueChange(''); return handleValueChange(newText); } // Renderização condicional render() { const { title, type, value, handlePickerChange, text, result, } = this.props; return ( <> <Text style={styles.title}> {title} </Text> <View style={styles.pickerContainer}> <Picker name={type === 'input' ? 'primary' : 'secondary'} selectedValue={value} onValueChange={handlePickerChange} /> { // Dependendo do valor da prop "type, temos um TextInput ou um Text. type === 'input' ? ( <TextInput style={styles.measure} dense mode="flat" keyboardType="numeric" value={text} onChangeText={this.validateInput} /> ) : (<Text style={styles.measure}>{result}</Text>) } </View> </> ); } } Section.propTypes = { title: PropTypes.string.isRequired, type: PropTypes.string.isRequired, value: PropTypes.oneOfType([PropTypes.string, PropTypes.number]).isRequired, handlePickerChange: PropTypes.func.isRequired, handleValueChange: PropTypes.func, text: PropTypes.string, result: PropTypes.string, }; export default Section;
// // JWPerformanceTestViewController.h // IconFont // // Created by <NAME> on 10/10/14. // Copyright (c) 2014 Taodiandian. All rights reserved. // #import <UIKit/UIKit.h> @interface JWPerformanceTestViewController : UIViewController @end
const _ = require('lodash'); const chai = require('chai'); const { expect } = chai; chai.use(require('chai-as-promised')); const errors = require('balena-errors'); const { getRequest } = require('balena-request'); const mockServer = require('mockttp').getLocal(); const request = getRequest({}); const register = require('../build/register').getRegisterDevice({ request }); const PROVISIONING_KEY = 'abcd'; describe('Device Register:', function () { describe('.generateUniqueKey()', function () { it('should return a string that has a length of 32 (16 bytes)', function () { const uniqueKey = register.generateUniqueKey(); expect(uniqueKey).to.be.a('string').that.has.lengthOf(32); }); it('should generate different unique key each time', function () { const uniqueKeys = _.times(3, register.generateUUID); expect(uniqueKeys[0]).to.not.equal(uniqueKeys[1]); expect(uniqueKeys[0]).to.not.equal(uniqueKeys[2]); expect(uniqueKeys[1]).to.not.equal(uniqueKeys[2]); }); }); describe('.register()', function () { before(() => mockServer.start().then(() => mockServer.post('/device/register').thenCallback(function (req) { if (req.headers.authorization !== `Bearer ${PROVISIONING_KEY}`) { throw new Error( `No or incorrect authorization header: ${req.headers.authorization}`, ); } const { user } = JSON.parse(req.body.text); switch (user) { case 1: return { status: 401, body: 'Unauthorized', }; case 2: return { status: 201, json: { id: 999, }, }; default: throw new Error(`Unrecognised user for mocking '${user}'`); } }), ), ); after(() => mockServer.stop()); describe('given the post operation is unsuccessful', function () { it('should return an error to the callback', function () { return register .register({ userId: 1, applicationId: 10350, uuid: register.generateUniqueKey(), deviceType: 'raspberry-pi', deviceApiKey: register.generateUniqueKey(), provisioningApiKey: PROVISIONING_KEY, apiEndpoint: mockServer.url, }) .then(() => { throw new Error('Succeeded'); }) .catch(function (error) { expect(error).to.be.instanceof(errors.BalenaRequestError); expect(error).to.have.a.property( 'message', 'Request error: Unauthorized', ); }); }); it('should return a rejected promise', function () { const promise = register.register({ userId: 1, applicationId: 10350, uuid: register.generateUniqueKey(), deviceType: 'raspberry-pi', deviceApiKey: register.generateUniqueKey(), provisioningApiKey: PROVISIONING_KEY, apiEndpoint: mockServer.url, }); return expect(promise).to.eventually.be.rejectedWith( Error, 'Unauthorized', ); }); }); describe('given the post operation is successful', function () { it('should return the resulting device info', function () { return register .register({ userId: 2, applicationId: 10350, uuid: register.generateUniqueKey(), deviceType: 'raspberry-pi', deviceApiKey: register.generateUniqueKey(), provisioningApiKey: PROVISIONING_KEY, apiEndpoint: mockServer.url, }) .then(function (deviceInfo) { expect(deviceInfo).to.deep.equal({ id: 999, }); }); }); it('should return a promise that resolves to the device info', function () { const promise = register.register({ userId: 2, applicationId: 10350, uuid: register.generateUniqueKey(), deviceType: 'raspberry-pi', deviceApiKey: register.generateUniqueKey(), provisioningApiKey: PROVISIONING_KEY, apiEndpoint: mockServer.url, }); return expect(promise).to.eventually.deep.equal({ id: 999 }); }); }); }); });
#!/opt/tigersh-deps-0.1/bin/bash # based on templates/build-from-source.sh v6 # Install ncurses / ncursesw on OS X Tiger / PowerPC. # Note: this file builds both the ncurses and ncursesw packages. if test -n "$(echo $0 | grep 'ncursesw')" ; then package=ncursesw else package=ncurses fi version=6.3 upstream=https://ftp.gnu.org/gnu/ncurses/ncurses-$version.tar.gz set -e -o pipefail PATH="/opt/tigersh-deps-0.1/bin:$PATH" TIGERSH_MIRROR=${TIGERSH_MIRROR:-https://leopard.sh} if test -n "$(echo -n $0 | grep '\.ppc64\.sh$')" ; then ppc64=".ppc64" fi pkgspec=$package-$version$ppc64 echo -n -e "\033]0;tiger.sh $pkgspec ($(tiger.sh --cpu))\007" if tiger.sh --install-binpkg $pkgspec ; then exit 0 fi echo -e "${COLOR_CYAN}Building${COLOR_NONE} $pkgspec from source." >&2 set -x if ! test -e /usr/bin/gcc ; then tiger.sh xcode-2.5 fi echo -n -e "\033]0;tiger.sh $pkgspec ($(tiger.sh --cpu))\007" tiger.sh --unpack-dist ncurses-$version cd /tmp/ncurses-$version CFLAGS=$(tiger.sh -mcpu -O) CXXFLAGS=$(tiger.sh -mcpu -O) if test -n "$ppc64" ; then CFLAGS="-m64 $CFLAGS" CXXFLAGS="-m64 $CXXFLAGS" fi # Note: ncurses needs the directory for .pc files to already exist: mkdir -p /opt/$pkgspec/lib/pkgconfig if test "$package" = "ncursesw" ; then enable_widec="--enable-widec" fi /usr/bin/time ./configure -C --prefix=/opt/$pkgspec \ --with-manpage-format=normal \ --with-shared \ --without-debug \ $enable_widec \ CFLAGS="$CFLAGS" \ CXXFLAGS="$CXXFLAGS" # --enable-pc-files \ # --with-pkg-config-libdir=/opt/$pkgspec/lib/pkgconfig \ /usr/bin/time make $(tiger.sh -j) V=1 # Note: no 'make check' available. make install tiger.sh --linker-check $pkgspec tiger.sh --arch-check $pkgspec $ppc64 if test -e config.cache ; then mkdir -p /opt/$pkgspec/share/tiger.sh/$pkgspec gzip -9 config.cache mv config.cache.gz /opt/$pkgspec/share/tiger.sh/$pkgspec/ fi
#!/usr/bin/env bash # Copyright 2019 The Go Cloud Development Kit Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Starts a local Vault instance via Docker. # https://coderwall.com/p/fkfaqq/safer-bash-scripts-with-set-euxo-pipefail set -euo pipefail echo "Starting Vault Server..." docker rm -f dev-vault &> /dev/null || : docker run --cap-add=IPC_LOCK -d --name=dev-vault -e 'VAULT_DEV_ROOT_TOKEN_ID=faketoken' -p 8200:8200 vault:1.6.0 &> /dev/null echo "...done. Run \"docker rm -f dev-vault\" to clean up the container." echo
const express = require('express'); const router = express.Router(); const userModel = require('../models/userModel'); // Route to Handle User Registration router.post("/register", (req, res)=>{ let {name, email, password} = req.body; userModel.create({name, email, password}) .then(user=>{ res.json({status:user.email + ' Registered!'}); }) .catch(err=>{ res.send('error: ' +err) }) }); // Route to Handle User Login router.post("/login", (req, res)=>{ let {email, password} = req.body; userModel.findOne({email}, (err, user)=>{ if(err) console.log(err); else{ if(!user) res.status(400).send('User Not Found!'); else if(user.password != password) res.status(401).send('Wrong Password!'); else res.json(user); } }); }); // Route to Retrieve User Details router.get("/user/:id", (req, res)=>{ let {id} = req.params; userModel.findOne({_id: id}, (err, user)=>{ if(err) console.log(err); else res.json(user); }); }); // Route to Delete a User router.delete("/user/:id", (req, res)=>{ let {id} = req.params; userModel.findByIdAndDelete(id, (err, user)=>{ if(err) console.log(err); else res.json({status: user.name + ' Deleted!'}); }); }); module.exports = router;
const MongoClient = require("mongodb").MongoClient; const mongoUrl = "mongodb://tfb-database:27017"; const dbName = "hello_world"; let client; async function getCollection(name) { if (!client) { client = await MongoClient.connect( mongoUrl, { useNewUrlParser: true } ); } const db = client.db(dbName); return db.collection(name); } async function allFortunes() { const collection = await getCollection("fortune"); const fortunes = await collection.find({}, { projection: { _id: 0 } }); return fortunes.toArray(); } async function getWorld(id) { const collection = await getCollection("world"); return collection.findOne({ id }, { projection: { _id: 0 } }); } async function saveWorlds(worlds) { const collection = await getCollection("world"); const bulk = collection.initializeUnorderedBulkOp(); worlds.forEach(world => { bulk.find({ id: world.id }).updateOne(world); }); return bulk.execute(); } module.exports = { getWorld, saveWorlds, allFortunes };
package DAO; import models.Ranger; import models.Sighting; import models.SightingEndangeredSpecies; import java.util.List; public interface RangerDAO { //List List<Ranger> getAllRangers(); void addRanger(Ranger ranger); Ranger getRangerById(int id); List<Sighting> getSightingsByRangerId(int id); List<SightingEndangeredSpecies> getEndangeredSightingsByRangerId(int id); }
#pragma once #include <vector> #include <string> #include <Windows.h> #include "string_utils.h" std::vector<std::string> getFilesInDirectory(std::string folder) { using std::vector; using std::string; vector<string> names; string search_path = folder + "/*.*"; WIN32_FIND_DATA fd; HANDLE hFind = FindFirstFile(search_path.c_str(), &fd); if (hFind != INVALID_HANDLE_VALUE) { do { // read all (real) files in current folder // , delete '!' read other 2 default folder . and .. if (!(fd.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY)) { names.push_back(fd.cFileName); } } while (FindNextFile(hFind, &fd)); FindClose(hFind); } return names; } bool doesDirectoryExist(std::string folderPath) { DWORD dwAttrib = GetFileAttributes(folderPath.c_str()); return (dwAttrib != INVALID_FILE_ATTRIBUTES && (dwAttrib & FILE_ATTRIBUTE_DIRECTORY)); } bool deleteFile(std::string filepath) { if (!DeleteFile(filepath.c_str())) { DWORD err = GetLastError(); if (err == ERROR_FILE_NOT_FOUND) { return false; } if (err == ERROR_FILE_READ_ONLY) { return false; } } return true; } bool deleteDirectory(std::string dirPath) { return false; } bool makeDirectory(std::string directoryPath) { return static_cast<bool>(CreateDirectory(directoryPath.c_str(), 0)); } std::string makeFullPath(std::string path) { char buf[1024]; TCHAR** lppPart = { NULL }; GetFullPathName(path.c_str(), 1024, buf, lppPart); return buf; } bool makeDirectoryRecursive(std::string directoryPath) { std::vector<std::string> pathSplit; splitString(directoryPath, pathSplit, "\\"); std::string curPath = pathSplit[0] + "\\"; for (uint32_t i = 1; i < pathSplit.size(); ++i) { curPath += pathSplit[i] + "\\"; if (!doesDirectoryExist(curPath)) { makeDirectory(curPath); } } return true; } bool doesFilExist(std::string folderPath) { DWORD dwAttrib = GetFileAttributes(folderPath.c_str()); return (dwAttrib != INVALID_FILE_ATTRIBUTES && !(dwAttrib & FILE_ATTRIBUTE_DIRECTORY)); }
# frozen_string_literal: true module Neo4j module Driver class AuthTokens class << self def basic(username, password) Internal::Validator.require_non_nil_credentials!(username, password) Bolt::Auth.basic(username, password, nil) end def none Bolt::Auth.none end end end end end
#!/bin/sh # Copyright (c) 2014-2015 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. if [ -z "$OSSLSIGNCODE" ]; then OSSLSIGNCODE=osslsigncode fi if [ ! -n "$1" ]; then echo "usage: $0 <osslcodesign args>" echo "example: $0 -key codesign.key" exit 1 fi OUT=signature-win.tar.gz SRCDIR=unsigned WORKDIR=./.tmp OUTDIR="${WORKDIR}/out" OUTSUBDIR="${OUTDIR}/win" TIMESERVER=http://timestamp.comodoca.com CERTFILE="win-codesign.cert" mkdir -p "${OUTSUBDIR}" basename -a `ls -1 "${SRCDIR}"/*-unsigned.exe` | while read UNSIGNED; do echo Signing "${UNSIGNED}" "${OSSLSIGNCODE}" sign -certs "${CERTFILE}" -t "${TIMESERVER}" -in "${SRCDIR}/${UNSIGNED}" -out "${WORKDIR}/${UNSIGNED}" "$@" "${OSSLSIGNCODE}" extract-signature -in "${WORKDIR}/${UNSIGNED}" -out "${OUTSUBDIR}/${UNSIGNED}.pem" && rm "${WORKDIR}/${UNSIGNED}" done rm -f "${OUT}" tar -C "${OUTDIR}" -czf "${OUT}" . rm -rf "${WORKDIR}" echo "Created ${OUT}"
// Basic functionality: // Comments // Starter // Call a method // Self-method // Nested method // Assignment export default ` title TTT 1 @Lambda <<A>> A // comments at the beginning should be ignored group "B C" {@EC2 B @ECS C} "C 2" "B 1" // This is comment // // |col1|col2| // |----|----| // |val1|val2| @Starter("User") new B RET ret = A.methodA(a, b) { // A comment for self interaction<br> // \`POST /order\` <br> // [X](http://x.x) RET ret2 = selfMethod() { // A sync interaction after a self interaction B.method() { A.method() return B } "B 1"->"C 2".syncMethod(from, abitrary, source, B) } // A comment for creation b = new B() "b:B".method() // A comment for async self B->B: Self Async // A comment for async message B->C: Async Message par { A.method B.method } // A comment for alt if (X) { new X() { return smallX } return X A->B: message self { return C } C: self B.alternative() { // A comment for creation a = new AHasAVerylongnamesoitislong() { // A comment for creation b = new B() // Right to left C.method() { // Further right to left B.method() } self() } } =====divider===== } else if (Y) { C.method() par { A.method B.method } } else { D.method() { return D } // A comment for loop forEach(Z) { Z.method() { return Z } } } }`
# Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: MIT-0 set -e if [ -z $1 ] ; then echo "Please provide stack name for SAM template" exit 1 fi sam build -t ./template/template.yaml sam deploy --stack-name $1 --capabilities CAPABILITY_IAM --no-confirm-changeset BUCKET_NAME=$(aws cloudformation describe-stacks --stack-name $1 --query 'Stacks[0].Outputs[?OutputKey==`S3DataBucket`].OutputValue' --output text) aws s3 cp item-demand-time.csv s3://${BUCKET_NAME}/data/item-demand-time.csv
<gh_stars>0 import React from "react"; import PropTypes from "prop-types"; import CsrfTokenInput from "../utils/authentication-utils"; import { toKeyValueArray } from "../utils/dictionary-utils"; import { restCreate } from "../api/rest-api-consumation"; import capitalizeFirstLetter from "../utils/string-utils"; export { CreateForm as default }; export const CreateForm = ({ id, dataState, csrfToken, createFields }) => { const resourceUrl = dataState["resource"].url; const setStateCallback = dataState["resource"].callback; const foreignKeyValue = window.sessionStorage.getItem("foreignKey"); const fields = toKeyValueArray(createFields); const onSubmit = async (event) => { event.preventDefault(); // prevent default behavior of form submit const [ , // status json, ] = await restCreate({ url: resourceUrl, data: new FormData(event.target), }); setStateCallback((oldStatus) => [...oldStatus, json]); }; return ( <form onSubmit={onSubmit} id={id}> <CsrfTokenInput csrfToken={csrfToken} /> <input type="hidden" name="foreignKey" value={foreignKeyValue == null ? "" : foreignKeyValue} /> {fields.map((field) => { return <FormField key={field[0]} field={field} />; })} </form> ); }; CreateForm.propTypes = { id: PropTypes.string, dataState: PropTypes.object, resourceUrl: PropTypes.string, setStateCallback: PropTypes.func, csrfToken: PropTypes.string, createFields: PropTypes.object, }; const FormField = ({ field }) => { const fieldName = field[0]; const inputId = `id${fieldName}`; const label = capitalizeFirstLetter(fieldName.replace(/_/g, " ")); return ( <div className="mb-3 row"> <label className="col-form-label px-3" htmlFor={inputId}> {label} </label> <FormFieldTag inputId={inputId} field={field} /> </div> ); }; FormField.propTypes = { field: PropTypes.array, }; const FormFieldTag = ({ inputId, field }) => { const fieldName = field[0]; const fieldProps = field[1]; const fieldTagType = fieldProps.htmlTag; if (fieldTagType === "textarea") { return ( <textarea id={inputId} name={fieldName} type={fieldProps.type} className=" form-control mx-3" autoComplete="on" /> ); } return ( <input id={inputId} name={fieldName} type={fieldProps.type} className="form-control mx-3" autoComplete="on" /> ); }; FormFieldTag.propTypes = { inputId: PropTypes.string, field: PropTypes.array, };
/* * Tencent is pleased to support the open source community by making 蓝鲸 available. * Copyright (C) 2017-2018 THL A29 Limited, a Tencent company. All rights reserved. * Licensed under the MIT License (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * http://opensource.org/licenses/MIT * Unless required by applicable law or agreed to in writing, software distributed under * the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language governing permissions and * limitations under the License. */ package types import ( "configcenter/src/framework/core/types" ) type CreateSetCtx struct { BaseCtx SetID int64 Set types.MapStr } type CreateSetResult struct { BaseResp `json:",inline"` Data struct { ID int64 `json:"id"` } `json:"data"` } type DeleteSetCtx struct { BaseCtx BusinessID int64 SetID int64 } type UpdateSetCtx struct { BaseCtx BusinessID int64 SetID int64 Set types.MapStr } type ListSetCtx struct { BaseCtx Tenancy string BusinessID int64 SetID int64 Filter Query } type ListSetResult struct { BaseResp `json:",inline"` Data ListInfo }
#!/bin/bash #SBATCH --job-name="rfm_IMB_Uniband__2_job" #SBATCH --ntasks=4 #SBATCH --ntasks-per-node=2 #SBATCH --output=rfm_IMB_Uniband__2_job.out #SBATCH --error=rfm_IMB_Uniband__2_job.err #SBATCH --time=0:10:0 #SBATCH --exclusive module load gcc/9.3.0-5abm3xg module load openmpi/4.0.3-qpsxmnc export SLURM_MPI_TYPE=pmix_v2 export UCX_NET_DEVICES=mlx5_0:1 module load intel-mpi-benchmarks/2019.5-dwg5q6j srun IMB-MPI1 uniband -npmin 1
#! /bin/bash #set -o xtrace set -o errexit set -o pipefail set -o nounset #------------------------------------------------------------------------------ # Helper functions #------------------------------------------------------------------------------ function log() { echo -e "\n+" "$@" } function get_latest_macports_version() { local _auth_header if [[ -n "${GITHUB_TOKEN:-}" ]]; then _auth_header=(--header "authorization: Bearer $GITHUB_TOKEN") fi /usr/bin/curl \ --fail --silent "${_auth_header[@]}" \ --header 'Accept: application/vnd.github.v3+json' \ 'https://api.github.com/repos/macports/macports-base/releases/latest' \ | /usr/bin/python3 -c "import sys, json; print(json.load(sys.stdin)['tag_name'])" \ | /usr/bin/sed -e 's/^v//' } function get_macos_version() { local _version local _version_parts _version=$(/usr/bin/sw_vers -productVersion) IFS='.' read -r -a _version_parts <<< "$_version" if [[ "${_version_parts[0]}" -lt 11 ]]; then echo "${_version_parts[0]}.${_version_parts[1]}" else echo "${_version_parts[0]}" fi } function get_macos_friendly_name() { # Thank you: https://apple.stackexchange.com/a/333470 /usr/bin/awk \ '/SOFTWARE LICENSE AGREEMENT FOR macOS/' \ '/System/Library/CoreServices/Setup Assistant.app/Contents/Resources/en.lproj/OSXSoftwareLicense.rtf' \ | /usr/bin/awk -F 'macOS ' '{print $NF}' \ | /usr/bin/awk '{print substr($0, 0, length($0)-1)}' \ | /usr/bin/tr -d '[:space:]' } function get_macports_pkg_file_name() { echo "MacPorts-$(get_latest_macports_version)-$(get_macos_version)-$(get_macos_friendly_name).pkg" } #------------------------------------------------------------------------------ # Main #------------------------------------------------------------------------------ log 'Disabeling Spotlight Search indexing' /usr/bin/sudo mdutil -a -i off log 'Installation information...' MACPORTS_PKG_FILE="$(get_macports_pkg_file_name)" echo MACPORTS_PKG_FILE="$MACPORTS_PKG_FILE" log 'Downloading MacPorts...' /usr/bin/curl \ --fail \ --location \ --remote-name \ --show-error \ --silent \ "https://distfiles.macports.org/MacPorts/$MACPORTS_PKG_FILE" log 'Installing MacPorts...' /usr/bin/sudo /usr/sbin/installer -pkg "$MACPORTS_PKG_FILE" -target / export PATH="/opt/local/libexec/gnubin:/opt/local/bin:/opt/local/sbin:$PATH" hash -r log 'Running MacPorts selfupdate/upgrade...' /usr/bin/sudo /opt/local/bin/port selfupdate /usr/bin/sudo /opt/local/bin/port upgrade outdated log 'Install minimum required MacPorts...' /usr/bin/sudo /opt/local/bin/port install \ coreutils \ curl-ca-bundle \ pip_select \ py39-certifi \ py39-distlib \ py39-pip \ py39-setuptools \ py39-virtualenv \ py39-wheel \ python39 \ python3_select \ python_select /usr/bin/sudo /opt/local/bin/port select python3 python39 /usr/bin/sudo /opt/local/bin/port select python python39 /usr/bin/sudo /opt/local/bin/port select pip3 pip39 /usr/bin/sudo /opt/local/bin/port select pip pip39
<reponame>iarthstar/purescript-sketch "use strict"; exports.copyToPasteboard = (str) => () => { const paste = NSPasteboard.generalPasteboard(); paste.clearContents(); paste.setString_forType(str, "public.utf8-plain-text"); return {}; };
// Algorithm to find the largest rectangular submatrix of 1's in a given binary matrix // Step 1: Create an auxiliary matrix to store the max size of the submatrix // Auxiliary matrix // 0 0 0 0 0 // 0 0 1 0 0 // 0 1 1 1 0 // 0 0 1 2 3 // 0 0 1 2 0 // Step 2: Iterate over the auxiliary matrix and find the maximal value int maxSize = 0; for (int i = 0; i < rows; i++) { for (int j = 0; j < columns; j++) { if (auxMatrix[i][j] > maxSize) maxSize = auxMatrix[i][j]; } } // Step 3: The maximum size is the size of the largest rectangular submatrix System.out.println("Size of the largest rectangular submatrix of 1's is: " + maxSize);
<filename>client/admin/users/members/edit/blocks/hb/hb.js 'use strict'; // Make sure that hb field is always present in this form, // true => 'on', false => '' // // It's not there by default unless checked, and server won't change // any fields that aren't present in input. // N.wire.before('admin.users.members.edit:submit', function get_hb(data) { data.fields.hb = data.fields.hb || ''; });
def fx(nums): return sum(map(lambda x : x*x if x % 2==0 else 0, nums)) print(fx([3, 4, 5, 6, 7, 8, 9, 10])) # Output: 240
<gh_stars>1-10 /** * */ package coca.api; import java.io.IOException; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import coca.ca.BasicCa; import coca.ca.Ca; import coca.ca.CaValue; import coca.ca.stack.CaStack; import coca.co.CoException; /** * @author dzh * @date Oct 12, 2017 8:04:15 PM * @since 0.0.1 */ public class TestCoca { static Logger LOG = LoggerFactory.getLogger(TestCoca.class); Coca coca; @Before public void initCoca() { Map<String, String> conf = new HashMap<>(); // conf.put(CocaConst.P_CO_INS_FACTORY, CocaInsFactory.class.getName()); coca = Coca.newCoca(conf); } @After public void closeCoca() { try { coca.close(); } catch (IOException e) { LOG.error(e.getMessage()); } } @Test public void testWithStack() throws IOException, CoException { List<Ca<String, String>> caList = Arrays.asList(new CaLocal<String, String>("local"), new CaRemote<String, String>("remote")); LOG.info("index-{} name-{}", 1, caList.get(1).name()); CaStack<String, String> stack = coca.<String> withStack("test", caList); Ca<String, String> ca = stack.ca(0); Assert.assertEquals("local", ca.name()); LOG.info("index-{} name-{}", 1, stack.ca(1).name()); } public static class CaLocal<K, V> extends BasicCa<K, V> { public CaLocal(String name) { super(name, CaType.Local); } @Override protected CaValue<K, V> doRead(K key) { return null; } @Override protected boolean doWrite(CaValue<K, V> val) { return false; } } public static class CaRemote<K, V> extends BasicCa<K, V> { public CaRemote(String name) { super(name, CaType.Remote); } @Override protected CaValue<K, V> doRead(K key) { return null; } @Override protected boolean doWrite(CaValue<K, V> val) { return false; } } }
package com.flockinger.poppynotes.notesService.api; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.lang3.StringUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.stereotype.Component; import org.springframework.web.servlet.handler.HandlerInterceptorAdapter; import es.moki.ratelimitj.core.limiter.request.RequestRateLimiter; @Component public class RateLimitInterceptor extends HandlerInterceptorAdapter { @Autowired private RequestRateLimiter rateLimiter; private final static String MODIFY_IP_KEY_NAME = "modify-IP:"; @Override public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) throws Exception { if (StringUtils.equalsAnyIgnoreCase(request.getMethod(), "GET", "HEAD")) { return true; } String clientIp = request.getRemoteAddr(); boolean modifyCallAllowed = !rateLimiter.overLimitWhenIncremented(MODIFY_IP_KEY_NAME + clientIp); if(!modifyCallAllowed) { response.setStatus(HttpStatus.TOO_MANY_REQUESTS.value()); } return modifyCallAllowed; } }
window.addEventListener('DOMContentLoaded', () => { const menu = document.querySelector('.menu'), menuItem = document.querySelectorAll('.menu_item'), hamburger = document.querySelector('.hamburger'); hamburger.addEventListener('click', () => { hamburger.classList.toggle('hamburger_active'); menu.classList.toggle('menu_active'); }); menuItem.forEach(item => { item.addEventListener('click', () => { hamburger.classList.toggle('hamburger_active'); menu.classList.toggle('menu_active'); }) }) })
"use strict"; /* global jQuery, includeLayout */ (function(root, factory) { if(typeof define === 'function' && define.amd) { define(function(require, exports, module) { require("common.css!"); require("ui/toolbar"); module.exports = factory( require("jquery"), require("framework/command"), require("animation/fx"), require("../../../../layouts/SlideOut/SlideOutLayout.js").SlideOutController, require("../../../helpers/layoutHelper.js") ); }); } else { factory( jQuery, DevExpress.framework.dxCommand, DevExpress.fx, DevExpress.layouts.SlideOutLayout.SlideOutController, DevExpress.testHelpers ); } }(this, function($, dxCommand, fx, SlideOutController, layoutHelper) { includeLayout("SlideOut"); QUnit.testStart(function() { var markup = ' \ <div id="viewPort" class="dx-viewport"></div> \ \ <div id="templates-root"> \ \ <div data-options="dxView: { name: \'simple\' }"> \ <div data-options="dxContent: { targetPlaceholder: \'content\' }"> \ </div> \ </div> \ \ <div data-options="dxView: { name: \'simple-with-visible-back\' }"> \ <div data-bind="dxCommand: { id: \'back\', title: \'Custom\', visible: true, renderStage: \'onViewRendering\' }"></div> \ <div data-options="dxContent: { targetPlaceholder: \'content\' }"> \ </div> \ </div> \ \ <div data-options="dxView: { name: \'simple-with-visible-back-on-view-shown\' }"> \ <div data-bind="dxCommand: { id: \'back\', title: \'Custom\', visible: true, renderStage: \'onViewShown\' }"></div> \ <div data-options="dxContent: { targetPlaceholder: \'content\' }"> \ </div> \ </div> \ \ <div data-options="dxView: { name: \'simple-with-invisible-back\' }"> \ <div data-bind="dxCommand: { id: \'back\', title: \'Custom\', visible: false, renderStage: \'onViewRendering\' }"></div> \ <div data-options="dxContent: { targetPlaceholder: \'content\' }"> \ </div> \ </div> \ \ </div>'; $("#qunit-fixture").append(markup); }); function createLayoutController(options) { options = options || {}; options.device = options.device || { platform: "ios", phone: true }; return layoutHelper.createLayoutController(options, function(ctorOptions) { return new SlideOutController(ctorOptions); }); } QUnit.module("SlideOutLayoutController"); QUnit.test("Test template", function(assert) { var done = assert.async(); createLayoutController().done(function(layoutController) { var $viewPort = $("#viewPort"); assert.equal($viewPort.children(".dx-slideout.dx-fast-hidden").length, 0); assert.equal($viewPort.children(".dx-slideout:visible").length, 1); layoutController.deactivate(); assert.equal($viewPort.children(".dx-slideout.dx-fast-hidden").length, 1); assert.equal($viewPort.children(".dx-slideout:visible").length, 1, "Hidden by position not display"); done(); }); }); QUnit.test("T304402: SlideOut layout - It is impossible to change a menu item title at runtime", function(assert) { var done = assert.async(); createLayoutController().done(function(layoutController) { var $viewPort = $("#viewPort"), $layout = $viewPort.children(".dx-slideout"), commandMapping = layoutController._commandManager.commandMapping, navigationCommand = new dxCommand({ id: "item1", title: "initial text" }), navigation = [navigationCommand]; layoutHelper.prepareNavigation(navigation, commandMapping); layoutController.renderNavigation(navigation); var $item = $layout.find(".dx-list .dx-list-item .dx-navigation-item"); assert.equal($item.length, 1); assert.equal($item.text(), "initial text"); navigationCommand.option("title", "changed text"); $item = $layout.find(".dx-list .dx-list-item .dx-navigation-item"); assert.equal($item.text(), "changed text"); done(); }); }); QUnit.test("Navigation button exists if there is no back command", function(assert) { var done = assert.async(); createLayoutController().done(function(layoutController) { layoutController.showView({ model: { title: "SimpleView" }, viewName: "simple" }).done(function() { var $viewPort = $("#viewPort"); var $navButton = $viewPort.find(".dx-slideout .dx-active-view .nav-button:visible"); assert.equal($navButton.length, 1); done(); }); }); }); QUnit.test("Navigation button doesn't exist if back command is visible", function(assert) { var done = assert.async(); createLayoutController().done(function(layoutController) { layoutController.showView({ model: { title: "SimpleView" }, viewName: "simple-with-visible-back" }).done(function() { var $viewPort = $("#viewPort"); var $navButton = $viewPort.find(".dx-slideout .dx-active-view .layout-toolbar .nav-button:visible"), $toolbarLeftVisibleItems = $viewPort.find(".dx-slideout .dx-active-view .layout-toolbar .dx-toolbar-before .dx-toolbar-item:visible"); assert.equal($navButton.length, 0); assert.equal($toolbarLeftVisibleItems.length, 1, "T175800"); done(); }); }); }); QUnit.test("Navigation button doesn't exist if back command is visible and rendered on viewShown (T242013)", function(assert) { var done = assert.async(); createLayoutController().done(function(layoutController) { layoutController.showView({ model: { title: "SimpleView" }, viewName: "simple-with-visible-back-on-view-shown" }).done(function() { var $viewPort = $("#viewPort"), $navButton = $viewPort.find(".dx-slideout .dx-active-view .layout-toolbar .nav-button:visible"); assert.equal($navButton.length, 0); done(); }); }); }); QUnit.test("Navigation button exists if back command is invisible", function(assert) { var done = assert.async(); createLayoutController().done(function(layoutController) { layoutController.showView({ model: { title: "SimpleView" }, viewName: "simple-with-invisible-back" }).done(function() { var $viewPort = $("#viewPort"); var $navButton = $viewPort.find(".dx-slideout .dx-active-view .nav-button:visible"); assert.equal($navButton.length, 1); done(); }); }); }); QUnit.test("Navigation button is updated if toolbar is rerendered", function(assert) { var done = assert.async(); createLayoutController().done(function(layoutController) { var backCommand = new dxCommand({ id: 'back', title: 'Custom', visible: false, renderStage: 'onViewRendering' }); layoutController.showView({ model: { title: "SimpleView" }, commands: [ backCommand ], viewName: "simple" }).done(function() { var $viewPort = $("#viewPort"); var $navButton = $viewPort.find(".dx-slideout .dx-active-view .nav-button:visible"); assert.equal($navButton.length, 1); var toolbar = $viewPort.find(".dx-slideout .dx-active-view .layout-toolbar").dxToolbar("instance"); toolbar.repaint(); $navButton = $viewPort.find(".dx-slideout .dx-active-view .nav-button:visible"); assert.equal($navButton.length, 1); var slideOut = $viewPort.find(".dx-slideout").dxSlideOut("instance"); assert.ok(!slideOut.option("menuVisible")); $navButton.trigger("dxclick"); assert.ok(slideOut.option("menuVisible"), "T178940"); done(); }); }); }); QUnit.test("Test view is shown before menu is hidden (S172782)", function(assert) { assert.expect(5); var done = assert.async(); createLayoutController().done(function(layoutController) { var slideOut = layoutController.slideOut; assert.equal(slideOut.option("menuVisible"), false); slideOut.toggleMenuVisibility(true).done(function() { assert.equal(slideOut.option("menuVisible"), true); slideOut.on("optionChanged", function(args) { assert.equal(args.name, "menuVisible"); assert.equal(args.value, false); assert.equal(slideOut.$element().find(".layout-header .dx-toolbar-center .dx-toolbar-label .dx-item-content div").text(), "test title"); }); layoutController.showView({ model: { title: "test title" }, viewName: "simple" }).done(done); }); }); }); QUnit.test("There should be no transitions if menu is visible (T246943)", function(assert) { assert.expect(6); var done = assert.async(); createLayoutController().done(function(layoutController) { var slideOut = layoutController.slideOut, viewInfo1 = { model: { title: "test1" }, viewName: "simple" }, viewInfo2 = { model: { title: "test2" }, viewName: "simple" }, expectedFxOff; layoutController.transitionExecutor = { enter: function() { assert.equal(fx.off, expectedFxOff); }, leave: function() {}, start: function() { return $.Deferred().resolve().promise(); } }; expectedFxOff = false; layoutController.showView(viewInfo1); layoutController.showView(viewInfo2); expectedFxOff = true; slideOut.toggleMenuVisibility(true).done(function() { layoutController.showView(viewInfo1); done(); }); }); }); QUnit.test("Menu hides if navigation was canceled", function(assert) { var done = assert.async(); createLayoutController().done(function(layoutController) { var slideOut = layoutController.slideOut; assert.equal(slideOut.option("menuVisible"), false); slideOut.toggleMenuVisibility(true).done(function() { assert.equal(slideOut.option("menuVisible"), true); layoutController._navigationManager.fireEvent("navigationCanceled", [{}]); assert.equal(slideOut.option("menuVisible"), false); done(); }); }); }); QUnit.test("Menu is hiding if it is disable (T398810)", function(assert) { var done = assert.async(); createLayoutController().done(function(layoutController) { var slideOut = layoutController.slideOut; slideOut.toggleMenuVisibility(true).done(function() { assert.equal(slideOut.option("menuVisible"), true); layoutController.disable(); assert.equal(slideOut.option("menuVisible"), false); done(); }); }); }); QUnit.test("Menu doesn't hide if navigation was canceled by redirect reason (T264514)", function(assert) { var done = assert.async(); createLayoutController().done(function(layoutController) { var slideOut = layoutController.slideOut; assert.equal(slideOut.option("menuVisible"), false); slideOut.toggleMenuVisibility(true).done(function() { assert.equal(slideOut.option("menuVisible"), true); layoutController._navigationManager.fireEvent("navigationCanceled", [{ cancelReason: "redirect" }]); assert.equal(slideOut.option("menuVisible"), true, "T264514"); done(); }); }); }); QUnit.test("T297623 SlideOut layout element has not any unnecessary items", function(assert) { var done = assert.async(); createLayoutController({ initOptions: { navigation: [ { title: "c1", id: "c1", onExecute: "#simple", } ] } }).done(function(layoutController) { var slideOut = layoutController.slideOut; slideOut.option("selectedIndex", 0); assert.ok(slideOut.$element().find(".dx-slideout-item").length === 0, "there is no any slideOut item"); done(); }); }); }));
# -*- coding: utf-8 -*- # # S3XML Unit Tests # # To run this script use: # python web2py.py -S eden -M -R applications/eden/tests/unit_tests/modules/s3/s3xml.py # import unittest from gluon import * from gluon.contrib import simplejson as json try: from cStringIO import StringIO except: from StringIO import StringIO from lxml import etree from s3.s3xml import S3XMLFormat # ============================================================================= class S3TreeBuilderTests(unittest.TestCase): # ------------------------------------------------------------------------- def testEmptyTree(self): xml = current.xml tree = xml.tree(None) root = tree.getroot() attrib = root.attrib self.assertEqual(root.tag, xml.TAG.root) self.assertEqual(len(attrib), 1) self.assertEqual(attrib["success"], "false") # ------------------------------------------------------------------------- def testIncludeMaxBounds(self): xml = current.xml tree = xml.tree(None, maxbounds=True) root = tree.getroot() attrib = root.attrib self.assertEqual(root.tag, xml.TAG.root) self.assertEqual(len(attrib), 5) self.assertEqual(attrib["success"], "false") self.assertTrue("latmin" in attrib) self.assertTrue("latmax" in attrib) self.assertTrue("lonmin" in attrib) self.assertTrue("lonmax" in attrib) # ============================================================================= class S3JSONMessageTests(unittest.TestCase): # ------------------------------------------------------------------------- def testDefaultSuccessMessage(self): """ Test whether 200 issued by default if success=True """ json_message = current.xml.json_message msg = json_message() msg = json.loads(msg) self.assertEqual(len(msg), 2) self.assertEqual(msg["status"], "success") self.assertEqual(msg["statuscode"], "200") # ------------------------------------------------------------------------- def testDefaultErrorMessage(self): """ Test whether 404 issued by default if success=False """ json_message = current.xml.json_message msg = json_message(False) msg = json.loads(msg) self.assertEqual(len(msg), 2) self.assertEqual(msg["status"], "failed") self.assertEqual(msg["statuscode"], "404") # ------------------------------------------------------------------------- def testExtendedSuccessMessage(self): """ Test success message with specified message text """ json_message = current.xml.json_message msg = json_message(True, message="Test") msg = json.loads(msg) self.assertEqual(len(msg), 3) self.assertEqual(msg["status"], "success") self.assertEqual(msg["statuscode"], "200") self.assertEqual(msg["message"], "Test") # ------------------------------------------------------------------------- def testExtendedSuccessMessageWithResultNumber(self): """ Test success message with specified message text """ json_message = current.xml.json_message msg = json_message(True, message="Test", results=40) msg = json.loads(msg) self.assertEqual(len(msg), 4) self.assertEqual(msg["status"], "success") self.assertEqual(msg["statuscode"], "200") self.assertEqual(msg["message"], "Test") self.assertEqual(msg["results"], 40) # ------------------------------------------------------------------------- def testExtendedSuccessMessageWithSenderID(self): """ Test success message with specified message text """ json_message = current.xml.json_message msg = json_message(True, message="Test", sender="XYZ") msg = json.loads(msg) self.assertEqual(len(msg), 4) self.assertEqual(msg["status"], "success") self.assertEqual(msg["statuscode"], "200") self.assertEqual(msg["message"], "Test") self.assertEqual(msg["sender"], "XYZ") # ------------------------------------------------------------------------- def testExtendedSuccessMessageWithCreatedIDs(self): """ Test success message with specified message text """ json_message = current.xml.json_message msg = json_message(True, message="Test", created=[1, 2, 3]) msg = json.loads(msg) self.assertEqual(len(msg), 4) self.assertEqual(msg["status"], "success") self.assertEqual(msg["statuscode"], "200") self.assertEqual(msg["message"], "Test") self.assertEqual(msg["created"], [1, 2, 3]) # ------------------------------------------------------------------------- def testExtendedSuccessMessageWithUpdatedIDs(self): """ Test success message with specified message text """ json_message = current.xml.json_message msg = json_message(True, message="Test", updated=[1, 2, 3]) msg = json.loads(msg) self.assertEqual(len(msg), 4) self.assertEqual(msg["status"], "success") self.assertEqual(msg["statuscode"], "200") self.assertEqual(msg["message"], "Test") self.assertEqual(msg["updated"], [1, 2, 3]) # ------------------------------------------------------------------------- def testExtendedErrorMessage(self): """ Test error message with specified error code and text """ json_message = current.xml.json_message msg = json_message(False, 405, message="Test") msg = json.loads(msg) self.assertEqual(len(msg), 3) self.assertEqual(msg["status"], "failed") self.assertEqual(msg["statuscode"], "405") self.assertEqual(msg["message"], "Test") # ------------------------------------------------------------------------- def testExtendedErrorMessageWithTree(self): """ Test error message with specified error code, text and JSON tree """ json_message = current.xml.json_message msg = json_message(False, 405, message="Test", tree='{"test": "value"}') msg = json.loads(msg) self.assertEqual(len(msg), 4) self.assertEqual(msg["status"], "failed") self.assertEqual(msg["statuscode"], "405") self.assertEqual(msg["message"], "Test") self.assertTrue(isinstance(msg["tree"], dict)) tree = msg["tree"] self.assertEqual(len(tree), 1) self.assertEqual(tree["test"], "value") # ============================================================================= class S3XMLFormatTests(unittest.TestCase): """ Test S3XMLFormat helper class """ # ------------------------------------------------------------------------- def setUp(self): xmlstr = """<?xml version="1.0"?><s3xml/>""" stylesheet = """<?xml version="1.0"?> <xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0" xmlns:s3="http://eden.sahanafoundation.org/wiki/S3"> <xsl:output method="xml"/> <s3:fields tables="gis_location" select="ALL"/> <s3:fields tables="org_office" exclude="site_id"/> <s3:fields tables="pr_person" select="ALL" exclude="last_name"/> <s3:fields tables="pr_*" select="pe_id" exclude="pe_label"/> <s3:fields tables="pr_c*" select="ALL"/> <s3:fields tables="ANY" select="location_id,site_id"/> <xsl:template match="/"> <test>Test</test> </xsl:template> </xsl:stylesheet>""" self.tree = etree.ElementTree(etree.fromstring(xmlstr)) self.stylesheet = S3XMLFormat(StringIO(stylesheet)) # ------------------------------------------------------------------------- def testSelectAllFields(self): include, exclude = self.stylesheet.get_fields("gis_location") self.assertEqual(include, None) self.assertEqual(exclude, []) # ------------------------------------------------------------------------- def testSelectedFields(self): include, exclude = self.stylesheet.get_fields("org_facility") self.assertTrue("location_id" in include) self.assertTrue("site_id" in include) self.assertEqual(len(include), 2) self.assertEqual(exclude, []) # ------------------------------------------------------------------------- def testExcludedFields(self): include, exclude = self.stylesheet.get_fields("pr_person") self.assertEqual(include, None) self.assertEqual(exclude, ["last_name"]) # ------------------------------------------------------------------------- def testCombinedSelectedAndExcludedFields(self): include, exclude = self.stylesheet.get_fields("org_office") self.assertEqual(include, ["location_id"]) self.assertEqual(exclude, []) # ------------------------------------------------------------------------- def testWildcard(self): include, exclude = self.stylesheet.get_fields("pr_address") self.assertEqual(include, ["pe_id"]) self.assertEqual(exclude, ["pe_label"]) include, exclude = self.stylesheet.get_fields("pr_contact") self.assertEqual(include, None) self.assertEqual(exclude, ["pe_label"]) # ------------------------------------------------------------------------- def testTransformation(self): result = self.stylesheet.transform(self.tree) root = result.getroot() self.assertEqual(root.tag, "test") self.assertEqual(len(root), 0) self.assertEqual(root.text, "Test") # ============================================================================= def run_suite(*test_classes): """ Run the test suite """ loader = unittest.TestLoader() suite = unittest.TestSuite() for test_class in test_classes: tests = loader.loadTestsFromTestCase(test_class) suite.addTests(tests) if suite is not None: unittest.TextTestRunner(verbosity=2).run(suite) return if __name__ == "__main__": run_suite( S3TreeBuilderTests, S3JSONMessageTests, S3XMLFormatTests, ) # END ========================================================================
import yaml def process_lidar_data(file_path): # Load the LiDAR sensor data from the provided YAML file lidarmsg = yaml.load(open(file_path, 'r'), Loader=yaml.FullLoader) # Print the list of range values ranges = lidarmsg['ranges'] print("Range values:", ranges) # Print the maximum range value by adding 20 to the 'range_max' field print("Maximum range value:", lidarmsg['range_max'] + 20) # Print the timestamp in seconds from the 'stamp' field within the 'header' field print("Timestamp (secs):", lidarmsg['header']['stamp']['secs']) # Example usage process_lidar_data('lidar_data.yaml')
package topology_sort; import java.io.BufferedReader; import java.io.InputStreamReader; import java.util.*; /** * * @author exponential-e * 백준 9470번: Strahler 순서 * * @see https://www.acmicpc.net/problem/9470 * */ public class Boj9470 { private static final String NEW_LINE = "\n"; private static final String SPACE = " "; private static List<Integer>[] river; private static int[] indegree; private static int[] strahler; private static final int CIPHER = 10_000; public static void main(String[] args) throws Exception { BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); int T = Integer.parseInt(br.readLine()); StringBuilder sb = new StringBuilder(); while(T-- > 0) { StringTokenizer st = new StringTokenizer(br.readLine()); int K = Integer.parseInt(st.nextToken()); int M = Integer.parseInt(st.nextToken()); int P = Integer.parseInt(st.nextToken()); river = new ArrayList[M]; indegree = new int[M]; strahler = new int[M]; for(int i = 0; i < M; i++) { river[i] = new ArrayList<>(); } while(P-- > 0) { st = new StringTokenizer(br.readLine()); int from = Integer.parseInt(st.nextToken()) - 1; int to = Integer.parseInt(st.nextToken()) - 1; river[from].add(to); indegree[to]++; } sb.append(K).append(SPACE).append(topologySort(M)).append(NEW_LINE); } System.out.println(sb.toString()); } /** * * Topology Sort * * line 74: start nodes * line 85 ~ 91: update next node to largest, if max flows are much more than 2 next value: currnet + 1. * * @param m * @return */ private static int topologySort(int m) { Queue<Integer> q = new LinkedList<>(); for (int i = 0; i < indegree.length; i++) { if(indegree[i] != 0) continue; q.offer(i); strahler[i] = CIPHER + 1; } while(!q.isEmpty()) { int current = q.poll(); for(int next: river[current]) { indegree[next]--; int head = strahler[next] / CIPHER; int curHead = strahler[current] / CIPHER; if(head == curHead){ strahler[next] = strahler[next] + 1; if(strahler[next] % CIPHER >= 2) strahler[next] = (head + 1) * CIPHER; } else if(head < curHead){ strahler[next] = curHead * CIPHER + 1; } if(indegree[next] != 0) continue; q.offer(next); } } return strahler[m - 1] / CIPHER; } }
<filename>pkg/common/report/generate_report.go package report import ( "context" "fmt" "log" "regexp" "sort" "strings" "time" "github.com/openshift/osde2e/pkg/common/config" "github.com/openshift/osde2e/pkg/common/prometheus" "github.com/spf13/viper" v1 "github.com/prometheus/client_golang/api/prometheus/v1" "github.com/prometheus/common/model" ) const ( gateQuery = `count by (job, install_version, suite, testname, result) (cicd_jUnitResult)` stepDurationInHours = 4 ) type reportData struct { Versions []string Failures map[string]int } // GenerateReport generates a weather report. func GenerateReport() (WeatherReport, error) { // Range for the queries issued to Prometheus queryRange := v1.Range{ Start: time.Now().Add(-time.Hour * (viper.GetDuration(config.Weather.StartOfTimeWindowInHours))), End: time.Now(), Step: stepDurationInHours * time.Hour, } client, err := prometheus.CreateClient() if err != nil { return WeatherReport{}, fmt.Errorf("error while creating client: %v", err) } promAPI := v1.NewAPI(client) context, cancel := context.WithTimeout(context.Background(), 10*time.Second) defer cancel() // Assemble the allowlist regexes. We'll only produce a report based on these regexes. allowlistRegexes := []*regexp.Regexp{} jobAllowlistString := viper.GetString(config.Weather.JobAllowlist) for _, allowlistRegex := range strings.Split(jobAllowlistString, ",") { allowlistRegexes = append(allowlistRegexes, regexp.MustCompile(allowlistRegex)) } results, warnings, err := promAPI.QueryRange(context, gateQuery, queryRange) if err != nil { return WeatherReport{}, fmt.Errorf("error during query: %v", err) } if len(warnings) > 0 { log.Printf("Warnings: %v", warnings) } // Generate report from query results. if matrixResults, ok := results.(model.Matrix); ok { jobReportData, err := generateVersionsAndFailures(matrixResults) if err != nil { return WeatherReport{}, err } weatherReport := WeatherReport{ ReportDate: time.Now().UTC(), } for job, reportData := range jobReportData { allowed := false // If a job matches the allowlist, include it in the weather report. for _, allowlistRegex := range allowlistRegexes { if allowlistRegex.MatchString(job) { allowed = true break } } if allowed { weatherReport.Jobs = append(weatherReport.Jobs, JobReport{ Name: job, Viable: len(reportData.Failures) == 0, Versions: reportData.Versions, FailingTests: arrayFromMapKeys(reportData.Failures), }) } } sort.Stable(weatherReport) return weatherReport, nil } return WeatherReport{}, fmt.Errorf("results not in the expected format") } // generateVersionsAndFailures generates an intermediary data structure from the results that can be used to populate // the weather report. func generateVersionsAndFailures(matrixResults model.Matrix) (map[string]*reportData, error) { jobReportData := map[string]*reportData{} for _, sample := range matrixResults { job := fmt.Sprintf("%s", sample.Metric["job"]) // If there's no corresponding report data for a given job, make an empty struct. if _, ok := jobReportData[job]; !ok { jobReportData[job] = &reportData{ Versions: []string{}, Failures: map[string]int{}, } } jobReportData[job].addVersion(fmt.Sprintf("%s", sample.Metric["install_version"])) key := fmt.Sprintf("%s", sample.Metric["testname"]) if sample.Metric["result"] == "failed" { // Initialize the failure count for the key if it doesn't exist if _, ok := jobReportData[job].Failures[key]; !ok { jobReportData[job].Failures[key] = 0 } jobReportData[job].Failures[key] = jobReportData[job].Failures[key] + len(sample.Values) } } // Filter the failure results so that only results that cross the threshold are included. for _, r := range jobReportData { r.filterFailureResults() } return jobReportData, nil } // addVersion adds versions to the reportData, eliminating duplicates. func (r *reportData) addVersion(versionToAdd string) { for _, version := range r.Versions { if version == versionToAdd { return } } r.Versions = append(r.Versions, versionToAdd) } // filterFailureResults eliminates results from the report that don't match the failure criteria. // At the moment, this is pretty simple: just if tests fail more than once over the timeframe. func (r *reportData) filterFailureResults() { filteredFailures := map[string]int{} for testname, failureCount := range r.Failures { if failureCount >= (viper.GetInt(config.Weather.NumberOfSamplesNecessary) - 1) { filteredFailures[testname] = failureCount } } r.Failures = filteredFailures } func arrayFromMapKeys(mapToExtractFrom map[string]int) []string { keys := []string{} for key := range mapToExtractFrom { keys = append(keys, key) } return keys }
import React from 'react'; import { Divider, Layout as AntdLayout } from 'antd'; import CustomHeader from './Header'; const { Content } = AntdLayout; type ILayoutProps = { children: React.ReactNode; }; const Layout = ({ children }: ILayoutProps) => { return ( <AntdLayout style={{ minHeight: '100vh' }}> <CustomHeader /> <Content className="bg-white flex flex-col h-full"> <Divider className="mt-0" /> {children} </Content> {/* <Footer className='bg-black text-center'> <Powered by PHOS> </Footer> */} </AntdLayout> ); }; export default Layout;
import fpdf names=open("names.txt","r") x=names.read().split("\n") # print x for i in range(len(x)-2): name = x[i][:-1] filename=name+str(i) # print name pdffile = fpdf.FPDF() pdffile.compress = False pdffile.add_page(orientation = 'L') pdffile.add_font('boldfont','','bold4.otf',uni=True) pdffile.set_font('boldfont','',55,) pdffile.set_text_color(8,17,63) pdffile.image('certifoss.jpg',2,2,877/3,620/3) pdffile.ln(55) pdffile.cell(0,80.5,name,0,1,'C') try: pdffile.output('certipdfs/'+filename+'.pdf') print "made for " , name except: print "*******failed for ",name,"***********"
class CustomTile(BaseTile): def calculate_area(self): # Assuming DEFAULT_PROJECTIONS_BBOX is a tuple (min_longitude, min_latitude, max_longitude, max_latitude) min_longitude, min_latitude, max_longitude, max_latitude = DEFAULT_PROJECTIONS_BBOX width = max_longitude - min_longitude height = max_latitude - min_latitude area = width * height return area # Demonstration of usage if __name__ == "__main__": # Create an instance of CustomTile custom_tile = CustomTile() # Calculate the area covered by the tile area_covered = custom_tile.calculate_area() print("Area covered by the custom tile:", area_covered)
package com.littlejenny.gulimall.ware.entity; import com.baomidou.mybatisplus.annotation.TableId; import com.baomidou.mybatisplus.annotation.TableName; import java.io.Serializable; import java.util.Date; import lombok.Data; /** * 库存工作单 * * @author littlejenny * @email <EMAIL> * @date 2021-07-16 17:35:23 */ @Data @TableName("wms_ware_order_task_detail") public class WareOrderTaskDetailEntity implements Serializable { private static final long serialVersionUID = 1L; /** * id */ @TableId private Long id; /** * sku_id */ private Long skuId; /** * sku_name */ private String skuName; /** * 购买个数 */ private Integer skuNum; /** * (X)工作单id * (0)改成OrderSn了 */ private String orderSn; /** * 此工作單細節的處理狀況 * 1.鎖定 ->下單還沒付款 * 2.解鎖 ->下單過期、取消訂單、解決下單的分布式回滾, * 狀況為:下單途中調用鎖定庫存方法,下單後續流程因故回滾,然而遠程庫存卻沒有解鎖 * 3.扣除 ->付款 * */ private Integer taskState; private Long wareId; }
#!/bin/sh # # Copyright 2019 PingCAP, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # See the License for the specific language governing permissions and # limitations under the License. set -eux # clean env rm -f "$TEST_DIR/lightning-checkpoint-dirty-tableid.log" run_sql 'DROP DATABASE IF EXISTS tidb_lightning_checkpoint' export GO_FAILPOINTS="github.com/pingcap/tidb-lightning/lightning/restore/InitializeCheckpointExit=return(true)" run_lightning --enable-checkpoint=1 --log-file "$TEST_DIR/lightning-checkpoint-dirty-tableid.log" --config "tests/$TEST_NAME/mysql.toml" -d "tests/$TEST_NAME/data" run_sql 'DROP DATABASE IF EXISTS cpdt' export GO_FAILPOINTS="" set +e run_lightning --enable-checkpoint=1 --log-file "$TEST_DIR/lightning-checkpoint-dirty-tableid.log" --config "tests/$TEST_NAME/mysql.toml" -d "tests/$TEST_NAME/data" set -e ILLEGAL_CP_COUNT=$(grep "TiDB Lightning has detected tables with illegal checkpoints. To prevent data mismatch, this run will stop now. Please remove these checkpoints first" "$TEST_DIR/lightning-checkpoint-dirty-tableid.log" | wc -l) TABLE_SUGGEST=$(grep "./tidb-lightning-ctl --checkpoint-remove=" "$TEST_DIR/lightning-checkpoint-dirty-tableid.log" | wc -l) [ $ILLEGAL_CP_COUNT -eq 1 ] [ $TABLE_SUGGEST -eq 2 ] # Try again with the file checkpoints # clean env run_sql 'DROP DATABASE IF EXISTS cpdt' rm -f "$TEST_DIR/lightning-checkpoint-dirty-tableid.log" rm -f "/tmp/tidb_lightning_checkpoint.pb" export GO_FAILPOINTS="github.com/pingcap/tidb-lightning/lightning/restore/InitializeCheckpointExit=return(true)" run_lightning --enable-checkpoint=1 --log-file "$TEST_DIR/lightning-checkpoint-dirty-tableid.log" --config "tests/$TEST_NAME/file.toml" -d "tests/$TEST_NAME/data" run_sql 'DROP DATABASE IF EXISTS cpdt' export GO_FAILPOINTS="" set +e run_lightning --enable-checkpoint=1 --log-file "$TEST_DIR/lightning-checkpoint-dirty-tableid.log" --config "tests/$TEST_NAME/file.toml" -d "tests/$TEST_NAME/data" set -e ILLEGAL_CP_COUNT=$(grep "TiDB Lightning has detected tables with illegal checkpoints. To prevent data mismatch, this run will stop now. Please remove these checkpoints first" "$TEST_DIR/lightning-checkpoint-dirty-tableid.log" | wc -l) TABLE_SUGGEST=$(grep "./tidb-lightning-ctl --checkpoint-remove=" "$TEST_DIR/lightning-checkpoint-dirty-tableid.log" | wc -l) [ $ILLEGAL_CP_COUNT -eq 1 ] [ $TABLE_SUGGEST -eq 2 ]
#!/bin/bash -eux mkdir /home/vagrant/.ssh chmod 700 /home/vagrant/.ssh cd /home/vagrant/.ssh wget --no-check-certificate 'https://raw.github.com/mitchellh/vagrant/master/keys/vagrant.pub' -O authorized_keys chmod 600 /home/vagrant/.ssh/authorized_keys chown -R vagrant /home/vagrant/.ssh # sudoers don't need password for sudo. sed -i -e '/Defaults\s\+env_reset/a Defaults\texempt_group=sudo' /etc/sudoers sed -i -e 's/%sudo ALL=(ALL:ALL) ALL/%sudo ALL=NOPASSWD:ALL/g' /etc/sudoers sed -i "s/^.*requiretty/#Defaults requiretty/" /etc/sudoers # Disable daily apt unattended updates. echo 'APT::Periodic::Enable "0";' >> /etc/apt/apt.conf.d/10periodic
import { Injectable } from '@angular/core'; @Injectable({ providedIn: 'root', }) export class ConfigService { private menuOpen: boolean; constructor() { this.menuOpen = true; } toggleMenuOpen() { this.menuOpen = !this.menuOpen; } isMenuOpen() { return this.menuOpen; } }
/* eslint-disable no-undef */ // import globalStyles from './globalStyles.css'; // import buttonStyles from './buttonStyles.css'; <> {/* Hint: inspect the markup to see how the classes differ */} <ThemeProvider classNameMap={globalStyles}> <Button variant="primary">Blue Button</Button> </ThemeProvider>{' '} <Button variant="primary" classNameMap={buttonStyles}> Orchid button </Button> </>;
// Test cases specifically for issue #180 import { Mesh, BufferGeometry, TorusBufferGeometry, Scene, Raycaster, MeshBasicMaterial } from 'three'; import { acceleratedRaycast, computeBoundsTree, disposeBoundsTree, SAH, AVERAGE } from '../src/index.js'; Mesh.prototype.raycast = acceleratedRaycast; BufferGeometry.prototype.computeBoundsTree = computeBoundsTree; BufferGeometry.prototype.disposeBoundsTree = disposeBoundsTree; // https://stackoverflow.com/questions/3062746/special-simple-random-number-generator let _seed = null; function random() { if ( _seed === null ) throw new Error(); const a = 1103515245; const c = 12345; const m = 2e31; _seed = ( a * _seed + c ) % m; return _seed / m; } function runRandomTest( options, transformSeed, raySeed ) { let scene = null; let raycaster = null; let ungroupedGeometry = null; let ungroupedBvh = null; let groupedGeometry = null; let groupedBvh = null; describe( `Transform Seed : ${ transformSeed }`, () => { beforeAll( () => { ungroupedGeometry = new TorusBufferGeometry( 1, 1, 40, 10 ); groupedGeometry = new TorusBufferGeometry( 1, 1, 40, 10 ); const groupCount = 10; const groupSize = groupedGeometry.index.array.length / groupCount; for ( let g = 0; g < groupCount; g ++ ) { const groupStart = g * groupSize; groupedGeometry.addGroup( groupStart, groupSize, 0 ); } groupedGeometry.computeBoundsTree( options ); ungroupedGeometry.computeBoundsTree( options ); ungroupedBvh = ungroupedGeometry.boundsTree; groupedBvh = groupedGeometry.boundsTree; scene = new Scene(); raycaster = new Raycaster(); _seed = transformSeed; random(); // call random() to seed with a larger value for ( var i = 0; i < 10; i ++ ) { let geo = i % 2 ? groupedGeometry : ungroupedGeometry; let mesh = new Mesh( geo, new MeshBasicMaterial() ); mesh.rotation.x = random() * 10; mesh.rotation.y = random() * 10; mesh.rotation.z = random() * 10; mesh.position.x = random(); mesh.position.y = random(); mesh.position.z = random(); scene.add( mesh ); mesh.updateMatrix( true ); mesh.updateMatrixWorld( true ); } } ); it( `Cast Seed : ${ raySeed }`, () => { _seed = raySeed; random(); // call random() to seed with a larger value raycaster.firstHitOnly = false; raycaster.ray.origin.set( random() * 10, random() * 10, random() * 10 ); raycaster.ray.direction.copy( raycaster.ray.origin ).multiplyScalar( - 1 ).normalize(); ungroupedGeometry.boundsTree = ungroupedBvh; groupedGeometry.boundsTree = groupedBvh; const bvhHits = raycaster.intersectObject( scene, true ); raycaster.firstHitOnly = true; const firstHit = raycaster.intersectObject( scene, true ); // run the og hits _after_ because in the lazy generation case // the indices will be changing as the tree is generated and make // the results will look different. ungroupedGeometry.boundsTree = null; groupedGeometry.boundsTree = null; const ogHits = raycaster.intersectObject( scene, true ); expect( ogHits ).toEqual( bvhHits ); expect( firstHit[ 0 ] ).toEqual( ogHits[ 0 ] ); } ); } ); } runRandomTest( { strategy: AVERAGE, packData: true, lazyGeneration: false }, 7830035629, 4697211981 ); runRandomTest( { strategy: AVERAGE, packData: true, lazyGeneration: false }, 8294928772, 1592666709 ); runRandomTest( { strategy: SAH, packData: true, lazyGeneration: false }, 81992501, 8903271423 );
class Compound_Fetcher_Rhea: def __init__(self, compound_id, db='Rhea'): self.compound_id = compound_id self.db = db def get_all_info(self): print(f'Fetching all information for compound {self.compound_id} in {self.db}') # Implement code to retrieve and display all available information about the compound def find_reaction(self, query_id): print(f'Linking from compound {self.compound_id} in {self.db} to reaction {query_id}') # Implement code to link the compound to the specified reaction in the database if __name__ == '__main__': search = Compound_Fetcher_Rhea('7580') search.get_all_info() search.find_reaction('12345')
#!/bin/bash sudo apt update sudo apt install -y --no-install-recommends gunicorn xvfb wget libnss3 libxss1 libappindicator1 libindicator7 libsdl1.2-dev fonts-liberation libappindicator3-1 libasound2 libatk-bridge2.0-0 libgtk-3-0 libnspr4 libnss3 libxtst6 lsb-release xdg-utils enchant cd /usr/bin sudo wget https://chromedriver.storage.googleapis.com/79.0.3945.36/chromedriver_linux64.zip sudo unzip chromedriver_linux64.zip sudo wget https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb \ && sudo dpkg -i google-chrome*.deb sudo mkdir /tmp_images sudo chmod a+rwx /tmp_images pip install -r requirements.txt pip install pytest pytest-cov coveralls pyenchant ls -alsh /
#!/bin/bash # # Мониторинг 1С Предприятия 8.3 (сервер лицензирования) # # (c) 2019-2020, Алексей Ю. Федотов # # Email: fedotov@kaminsoft.ru # source ${0%/*}/1c_common_module.sh 2>/dev/null || { echo "ОШИБКА: Не найден файл 1c_common_module.sh!" ; exit 1; } function licenses_summary { RING_TOOL=$(check_ring_license) || exit 1 ( execute_tasks license_info $(get_license_list ${RING_TOOL}) ) | \ awk 'BEGIN { files=0; users=0 } { files+=1; users+=$1 } END { print files":"users }' } function license_info { ${RING_TOOL} license info --send-statistics false --name ${1} | \ grep -Pe '(Описание|Description).*на \d+ .*' | perl -pe 's/.*на (\d+) .*/\1/;' } function get_license_counts { CLSTR_LIST=${1##*:} for CURR_CLSTR in ${CLSTR_LIST//;/ }; do timeout -s HUP ${RAS_PARAMS[timeout]} rac session list --licenses --cluster=${CURR_CLSTR%,*} \ ${RAS_PARAMS[auth]} ${1%%:*}:${RAS_PARAMS[port]} 2>/dev/null | \ grep -Pe "(user-name|rmngr-address|app-id)" | \ perl -pe 's/ //g; s/\n/|/; s/rmngr-address:(\"(.*)\"|)\||/\2/; s/app-id://; s/user-name:/\n/;' | \ awk -F"|" -v hostname=${HOSTNAME,,} -v cluster=${CURR_CLSTR#*,} 'BEGIN { sc=0; hc=0; cc=0; wc=0 } \ { if ($1 != "") { sc+=1; uc[$1]; if ( index(tolower($3), hostname) > 0 ) { hc+=1 } \ if ($2 == "WebClient") { wc+=1 } if ($3 == "") { cc+=1 } } } \ END {print cluster":"hc":"length(uc)":"sc":"cc":"wc }' done } function used_license { ( execute_tasks get_license_counts $( pop_clusters_list ) ) | \ awk -F: 'BEGIN {ul=0; as=0; cl=0; uu=0; wc=0} \ { print $0; ul+=$2; uu+=$3; as+=$4; cl+=$5; wc+=$6; } \ END { print "summary:"ul":"uu":"as":"cl":"wc }' } function get_clusters_list { [[ ! -f ${CLSTR_CACHE} ]] && error "Не найден файл списка кластеров!" cut -f2 -d: ${CLSTR_CACHE} | perl -pe 's/;[^\n]/\n/; s/;//' | \ awk 'BEGIN {FS=","; print "{\"data\":[" } \ {print "{\"{#CLSTR_UUID}\":\""$1"\",\"{#CLSTR_NAME}\":\""$2"\"}," } \ END { print "]}" }' | \ perl -pe 's/\n//;' | perl -pe 's/(.*),]}/\1]}\n/' } case ${1} in info) licenses_summary ;; used) shift; make_ras_params ${@}; used_license ;; clusters) get_clusters_list ;; *) error "${ERROR_UNKNOWN_MODE}" ;; esac
if [ -z $1 ]; then echo "dataset name is not given" exit -1 fi dataset=$1 pickle_dir="tmp/${dataset}" extra=$2 # output_dir="/cs/home/hxiao/public_html/event_html/data/${dataset}" output_dir="html/data/${dataset}" remote_host="shell.cs.helsinki.fi" remote_dir="/cs/home/hxiao/public_html/event_html/data/${dataset}" if [ ! -d "${output_dir}/timeline" ]; then mkdir -p "${output_dir}/timeline" fi for p in $(ls ${pickle_dir}/result-*.pkl); do echo "${p}" output_name=$(basename ${p}) output_name="${output_name%.*}.json" if [ ! -f ${output_dir}/timeline/${output_name} ]; then python dump_vis_timeline_data.py \ --cand_trees_path ${p} \ --interactions_path data/${dataset}/interactions.* \ --people_path data/${dataset}/people.* \ --corpus_dict_path data/${dataset}/dict.pkl \ --lda_model_path $(ls data/${dataset}/model-*.lda) \ --output_path "${output_dir}/timeline/${output_name}" \ -k 10 \ ${extra} echo "Writing to '${output_dir}/timeline/${output_name}'" else echo "computed, ingore ${output_dir}/timeline/${output_name}" fi done echo "dumping timeline names..." python dump_all_event_json_names.py \ ${output_dir}/timeline \ ${output_dir}/timeline_names.json # chmod -R a+rx /cs/home/hxiao/public_html/event_html/data rsync -vr ${output_dir}/timeline ${remote_host}:${remote_dir}/ rsync -v ${output_dir}/timeline_names.json ${remote_host}:${remote_dir}/ ssh ${remote_host} "chmod -R a+rx ${remote_dir}"
# ------------------------------------------------------------------------------------------------- # Copyright (c) 2015 zsh-syntax-highlighting contributors # All rights reserved. # # Redistribution and use in source and binary forms, with or without modification, are permitted # provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this list of conditions # and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, this list of # conditions and the following disclaimer in the documentation and/or other materials provided # with the distribution. # * Neither the name of the zsh-syntax-highlighting contributors nor the names of its contributors # may be used to endorse or promote products derived from this software without specific prior # written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND # FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER # IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT # OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ------------------------------------------------------------------------------------------------- # -*- mode: zsh; sh-indentation: 2; indent-tabs-mode: nil; sh-basic-offset: 2; -*- # vim: ft=zsh sw=2 ts=2 et # ------------------------------------------------------------------------------------------------- BUFFER=": \$'*' 'foo'" expected_region_highlight=( "1 1 builtin" # : "3 6 default" # $'*' "3 6 dollar-quoted-argument" # $'*' - not a glob "8 12 default" # 'foo' "8 12 single-quoted-argument" # 'foo' )