text
stringlengths
1
1.05M
# for maildir install -v -m 755 -d ~/Mail/maildir/inbox/cur install -v -m 755 -d ~/Mail/maildir/inbox/new install -v -m 755 -d ~/Mail/maildir/inbox/tmp install -v -m 755 -d ~/Mail/maildir/draft/cur install -v -m 755 -d ~/Mail/maildir/draft/new install -v -m 755 -d ~/Mail/maildir/draft/tmp install -v -m 755 -d ~/Mail/maildir/sent/cur install -v -m 755 -d ~/Mail/maildir/sent/new install -v -m 755 -d ~/Mail/maildir/sent/tmp # for mh install -v -m 755 -d ~/Mail/mh/inbox install -v -m 755 -d ~/Mail/mh/draft install -v -m 755 -d ~/Mail/mh/sent touch ~/Mail/inbox/.mh_sequences touch ~/Mail/draft/.mh_sequences touch ~/Mail/sent/.mh_sequences # for mbox install -v -m 755 -d ~/Mail/mbox touch ~/Mail/mbox/Inbox touch ~/Mail/mbox/Draft touch ~/Mail//mbox/Sent printf "done\n"
# models.py from django.db import models class UploadFile(models.Model): file = models.FileField(upload_to='uploads/') uploaded_at = models.DateTimeField(auto_now_add=True) # admin.py from django.contrib import admin from .models import UploadFile admin.site.register(UploadFile)
use std::future::Future; use std::pin::Pin; use std::task::{Context, Poll}; use std::sync::{Arc, Mutex}; use std::sync::mpsc; use std::thread; struct AsyncTaskScheduler { tasks: Arc<Mutex<Vec<Box<dyn Future<Output = ()> + Send>>>>, } impl AsyncTaskScheduler { fn new() -> Self { AsyncTaskScheduler { tasks: Arc::new(Mutex::new(Vec::new())), } } fn add_task<F>(&self, task: F) where F: Future<Output = ()> + Send + 'static, { self.tasks.lock().unwrap().push(Box::new(task)); } fn run_tasks(&self) { let (sender, receiver) = mpsc::channel(); for task in self.tasks.lock().unwrap().iter() { let sender = sender.clone(); let task = task.clone(); thread::spawn(move || { let mut rt = tokio::runtime::Runtime::new().unwrap(); rt.block_on(task); sender.send(()).unwrap(); }); } drop(sender); for _ in 0..self.tasks.lock().unwrap().len() { receiver.recv().unwrap(); } } } // Example usage #[tokio::main] async fn main() { let scheduler = AsyncTaskScheduler::new(); scheduler.add_task(async { // Perform some asynchronous task }); scheduler.add_task(async { // Perform another asynchronous task }); scheduler.run_tasks(); }
<filename>util/replacecr/replace_cr_test.go package replacecr import ( "bytes" "testing" ) func TestReader(t *testing.T) { input := []byte("foo\r\rbar\r\nbaz\r\r") expect := []byte("foo\r\n\r\nbar\r\nbaz\r\n\r\n") got := make([]byte, 19) n, err := Reader(bytes.NewReader(input)).Read(got) if err != nil && err.Error() != "EOF" { t.Errorf("unexpected error: %s", err.Error()) } if n != 19 { t.Errorf("length error. expected: %d, got: %d", 19, n) } if !bytes.Equal(expect, got) { t.Errorf("byte mismatch. expected:\n%v\ngot:\n%v", expect, got) } }
export const makeRow = (size: number) => { return new Array(size).fill(1); }; export const makeGrid = (size: number): number[][] => { return [...new Array(size).fill(makeRow(size))]; };
update_v011() { [ -e "${BASH_IT}/lib/custom.bash" ] && unlink "${BASH_IT}/lib/custom.bash" }
//##################################################################### // Copyright 2002, <NAME>, <NAME>. // This file is part of PhysBAM whose distribution is governed by the license contained in the accompanying file PHYSBAM_COPYRIGHT.txt. //##################################################################### #include <PhysBAM_Tools/Grids_Uniform_Arrays/ARRAYS_ND.h> #include <PhysBAM_Tools/Math_Tools/maxabs.h> #include <PhysBAM_Tools/Math_Tools/minabs.h> #include <PhysBAM_Tools/Math_Tools/sqr.h> #include <PhysBAM_Dynamics/Level_Sets/HAMILTONIAN_NORMAL_VELOCITY_2D.h> using namespace PhysBAM; //##################################################################### // Function H //##################################################################### template<class T> T HAMILTONIAN_NORMAL_VELOCITY_2D<T>:: H(const T phi_x,const T phi_y,const int i,const int j,const T t) { return speed(i,j)*sqrt(sqr(phi_x)+sqr(phi_y)); } //##################################################################### // Function Maxabs_H1 //##################################################################### template<class T> T HAMILTONIAN_NORMAL_VELOCITY_2D<T>:: Maxabs_H1(const T phi_x_1,const T phi_x_2,const T phi_y_1,const T phi_y_2,const int i,const int j,const T t) { if(phi_y_1*phi_y_2 <= 0) return abs(speed(i,j)); // use phi_y=0 T phi_x=maxabs(phi_x_1,phi_x_2),phi_y=minabs(phi_y_1,phi_y_2); return abs(speed(i,j))*phi_x/sqrt(sqr(phi_x)+sqr(phi_y)); } //##################################################################### // Function Maxabs_H2 //##################################################################### template<class T> T HAMILTONIAN_NORMAL_VELOCITY_2D<T>:: Maxabs_H2(const T phi_x_1,const T phi_x_2,const T phi_y_1,const T phi_y_2,const int i,const int j,const T t) { if(phi_x_1*phi_x_2 <= 0) return abs(speed(i,j)); // use phi_x=0 T phi_x=minabs(phi_x_1,phi_x_2),phi_y=maxabs(phi_y_1,phi_y_2); return abs(speed(i,j))*phi_y/sqrt(sqr(phi_x)+sqr(phi_y)); } //##################################################################### template class HAMILTONIAN_NORMAL_VELOCITY_2D<float>; #ifndef COMPILE_WITHOUT_DOUBLE_SUPPORT template class HAMILTONIAN_NORMAL_VELOCITY_2D<double>; #endif
/* * Copyright (c) 2019-2021 WangBin <wb<EMAIL>g1 at g<EMAIL>> * This file is part of MDK * MDK SDK: https://github.com/wang-bin/mdk-sdk * Free for opensource softwares or non-commercial use. * * The above copyright notice and this permission notice shall be included * in all copies or substantial portions of the Software. */ #pragma once #include <stdint.h> #define MDK_VERSION_INT(major, minor, patch) \ (((major&0xff)<<16) | ((minor&0xff)<<8) | (patch&0xff)) #define MDK_MAJOR 0 #define MDK_MINOR 12 #define MDK_MICRO 0 #define MDK_VERSION MDK_VERSION_INT(MDK_MAJOR, MDK_MINOR, MDK_MICRO) #define MDK_VERSION_CHECK(a, b, c) (MDK_VERSION >= MDK_VERSION_INT(a, b, c)) #if defined(_WIN32) #define MDK_EXPORT __declspec(dllexport) #define MDK_IMPORT __declspec(dllimport) #else #define MDK_EXPORT __attribute__((visibility("default"))) #define MDK_IMPORT __attribute__((visibility("default"))) #endif #ifdef BUILD_MDK_STATIC # define MDK_API #else # if defined(BUILD_MDK_LIB) # define MDK_API MDK_EXPORT # else # define MDK_API MDK_IMPORT # endif #endif #ifdef __cplusplus extern "C" { #endif /*! \brief CallbackToken A callback can be registered by (member)function onXXX(obj, callback, CallbackToken* token = nullptr). With the returned token we can remove the callback by onXXX(nullptr, token). Non-null callback(.opaque != null): register a callback and return a token(if not null). Null callback(.opaque == null) + non-null token: can remove the callback of given token. Null callback(.opaque == null) + null token: clear all callbacks. */ typedef uint64_t MDK_CallbackToken; typedef enum MDK_MediaType { MDK_MediaType_Unknown = -1, MDK_MediaType_Video = 0, MDK_MediaType_Audio = 1, MDK_MediaType_Subtitle = 3, } MDK_MediaType; /*! \brief The MediaStatus enum Defines the io status of a media stream, Use flags_added/removed() to check the change, for example buffering after seek is Loaded|Prepared|Buffering, and changes to Loaded|Prepared|Buffered when seek completed */ typedef enum MDK_MediaStatus { MDK_MediaStatus_NoMedia = 0, /* initial status, not invalid. // what if set an empty url and closed?*/ MDK_MediaStatus_Unloaded = 1, /* unloaded // (TODO: or when a source(url) is set?)*/ MDK_MediaStatus_Loading = 1<<1, /* opening and parsing the media */ MDK_MediaStatus_Loaded = 1<<2, /* media is loaded and parsed. player is stopped state. mediaInfo() is available now */ MDK_MediaStatus_Prepared = 1<<8, /* all tracks are buffered and ready to decode frames. tracks failed to open decoder are ignored*/ MDK_MediaStatus_Stalled = 1<<3, /* insufficient buffering or other interruptions (timeout, user interrupt)*/ MDK_MediaStatus_Buffering = 1<<4, /* when buffering starts */ MDK_MediaStatus_Buffered = 1<<5, /* when buffering ends */ MDK_MediaStatus_End = 1<<6, /* reached the end of the current media, no more data to read */ MDK_MediaStatus_Seeking = 1<<7, MDK_MediaStatus_Invalid = 1<<31, /* failed to load media because of unsupport format or invalid media source */ } MDK_MediaStatus; typedef struct mdkMediaStatusChangedCallback { bool (*cb)(MDK_MediaStatus, void* opaque); void* opaque; } mdkMediaStatusChangedCallback; /*! \brief The State enum Current playback state. Set/Get by user */ typedef enum MDK_State { MDK_State_NotRunning, MDK_State_Stopped = MDK_State_NotRunning, MDK_State_Running, MDK_State_Playing = MDK_State_Running, /* start/resume to play*/ MDK_State_Paused, } MDK_State; typedef MDK_State MDK_PlaybackState; typedef struct mdkStateChangedCallback { void (*cb)(MDK_State, void* opaque); void* opaque; } mdkStateChangedCallback; typedef enum MDKSeekFlag { /* choose one of FromX */ MDK_SeekFlag_From0 = 1, /* relative to time 0*/ MDK_SeekFlag_FromStart = 1<<1, /* relative to media start position*/ MDK_SeekFlag_FromNow = 1<<2, /* relative to current position, the seek position can be negative*/ MDK_SeekFlag_Frame = 1<<6, /* Seek by frame. Seek target is frame count instead of milliseconds. Currently only FromNow|Frame and positive target is supported, .i.e step forward. BUG: avsync */ /* combine the above values with one of the following*/ /* KeyFrame forward seek may fail(permission denied) near the end of media if there's no key frame after seek target position*/ MDK_SeekFlag_KeyFrame = 1<<8, /* fast key-frame seek, forward if Backward is not set. It's accurate seek without this flag. Accurate seek is slow and implies backward seek internally.*/ MDK_SeekFlag_Fast = MDK_SeekFlag_KeyFrame, MDK_SeekFlag_Default = MDK_SeekFlag_KeyFrame|MDK_SeekFlag_FromStart } MDK_SeekFlag; /*! \brief VideoEffect per video renderer effect. set via Player.setVideoEffect(MDK_VideoEffect effect, const float*); */ enum MDK_VideoEffect { MDK_VideoEffect_Brightness, /* [-1.0f, 1.0f], default 0 */ MDK_VideoEffect_Contrast, /* [-1.0f, 1.0f], default 0 */ MDK_VideoEffect_Hue, /* [-1.0f, 1.0f], default 0 */ MDK_VideoEffect_Saturation, /* [-1.0f, 1.0f], default 0 */ }; MDK_API int MDK_version(); /*! \brief javaVM deprecated. use MDK_setGlobalOptionPtr("jvm",..) or MDK_setGlobalOptionPtr("JavaVM",..) instead Set/Get current java vm \param vm null to get current vm \return vm before set */ MDK_API void* MDK_javaVM(void* vm); typedef enum MDK_LogLevel { MDK_LogLevel_Off, MDK_LogLevel_Error, MDK_LogLevel_Warning, MDK_LogLevel_Info, MDK_LogLevel_Debug, MDK_LogLevel_All } MDK_LogLevel; MDK_API void MDK_setLogLevel(MDK_LogLevel value); MDK_API MDK_LogLevel MDK_logLevel(); /* \brief setLogHandler If log handler is not set, i.e. setLogHandler() was not called, log is disabled. If set to non-null handler, logs that >= logLevel() will be passed to the handler. If previous handler is set by user and not null, then call setLogHandler(nullptr) will print to stderr, and call setLogHandler(nullptr) again to silence the log To disable log, setLogHandler(nullptr) twice is better than simply setLogLevel(LogLevel::Off) */ typedef struct mdkLogHandler { void (*cb)(MDK_LogLevel, const char*, void* opaque); void* opaque; } mdkLogHandler; MDK_API void MDK_setLogHandler(mdkLogHandler); /* keys: - "avutil_lib", "avcodec_lib", "avformat_lib", "swresample_lib", "avfilter_lib": path to ffmpeg runtime libraries - "plugins": plugin filenames or paths in pattern "p1:p2:p3" - "MDK_KEY": license key for your product - "ffmpeg.loglevel": ffmpeg log leve names, "trace", "debug", "verbose", "info", "warning", "error", "fatal", "panic", "quiet" - "logLevel": can be "Off", "Error", "Warning", "Info", "Debug", "All". same as SetGlobalOption("logLevel", LogLevel) */ MDK_API void MDK_setGlobalOptionString(const char* key, const char* value); /* keys: - "videoout.clear_on_stop": 0/1. clear renderer using background color if playback stops - "logLevel": raw value of LogLevel */ MDK_API void MDK_setGlobalOptionInt32(const char* key, int value); /* keys: - "jvm", "JavaVM": JavaVM*. android only */ MDK_API void MDK_setGlobalOptionPtr(const char* key, void* value); MDK_API bool MDK_getGlobalOptionString(const char* key, const char** value); MDK_API bool MDK_getGlobalOptionInt32(const char* key, int* value); MDK_API bool MDK_getGlobalOptionPtr(const char* key, void** value); /* events: {timestamp(ms), "render.video", "1st_frame"}: when the first frame is rendererd {error, "decoder.audio/video/subtitle", "open", stream}: decoder of a stream is open, or failed to open if error != 0. TODO: do not use "open"? {progress 0~100, "reader.buffering"}: error is buffering progress {0/1, "thread.audio/video/subtitle", stream}: decoder thread is started (error = 1) and about to exit(error = 0) {error, "snapshot", saved_file if no error and error string if error < 0} */ typedef struct mdkMediaEvent { int64_t error; /* result <0: error code(fourcc?). >=0: special value depending on event*/ const char* category; const char* detail; /* if error, detail can be error string*/ union { struct { int stream; } decoder; }; } mdkMediaEvent; /* bool MDK_SomeFunc(SomeStruct*, mdkStringMapEntry* entry) entry: in/out, can not be null. Input entry->priv is null: The result entry points to the first entry containing the same key as entry->key, or the first entry if entry->key is null. The result entry->priv is set to a new value by api. Input entry->priv is not null(set by the api): the result entry points to the next entry. return: true if entry is found, false if not. */ typedef struct mdkStringMapEntry { const char* key; /* input: set by user to query .value field if priv is null output: set by api if priv is not null (set by api) */ const char* value; /* output: set by api, or not touched if no such key */ void* priv; /* input/output: set by api */ } mdkStringMapEntry; /* \brief MDK_strdup Always use this if a duplicated string is needed. DO NOT call strdup() directly because may fail to free() it in mdk, for example if user code is built against msvc debug crt but mdk uses release crt, then free() in mdk will crash */ MDK_API char* MDK_strdup(const char* strSource); #ifdef __cplusplus } #endif
<gh_stars>0 package ru.northarea.nexus; import ru.northarea.nexus.exception.NexusException; public interface Nexus { void handle() throws NexusException; }
<reponame>vikramreddy4/simsApp<filename>src/GraphQL/customer/QueryAllCustomers.js import gql from "graphql-tag"; export default gql(` query { listCustomers { items { id name active phone email } } }`);
<reponame>whuang285/ui describe("sandwich settings", function() { it("tests the settings menu can be used to navigate", function() { cy.login(Cypress.config('adminUsername'), Cypress.config('adminPassword')) /* testing */ cy.visit("/datacenter/settings/user-profile") /* test sandwich/user-profile */ cy.get('.settings-menu-toggle').click() cy.get('.MuiPaper-root > .settings-sidebar > #user-profile').click() cy.url().should("include", "/user-profile") /* test sandwich/manage-users */ cy.get('.settings-menu-toggle').click() cy.get('.MuiPaper-root > .settings-sidebar > #manage-users').click() cy.url().should("include", "/manage-users") /* test sandwich/sso-provider */ cy.get('.settings-menu-toggle').click() cy.get('.MuiPaper-root > .settings-sidebar > #sso-provider').click() cy.url().should("include", "/sso-provider") /* test sandwich/sources */ cy.get('.settings-menu-toggle').click() cy.get('.MuiPaper-root > .settings-sidebar > #sources').click() cy.url().should("include", "/sources") /* test sandwich/display-formats */ cy.get('.settings-menu-toggle').click() cy.get('.MuiPaper-root > .settings-sidebar > #display-formats').click() cy.url().should("include", "/display-formats") /* test sandwich/metrics-settings */ cy.get('.settings-menu-toggle').click() cy.get('.MuiPaper-root > .settings-sidebar > #metrics-settings').click() cy.url().should("include", "/metrics-settings") }) })
<filename>instrumentation-api/src/main/java/org/glowroot/instrumentation/api/config/ConfigService.java /* * Copyright 2011-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.glowroot.instrumentation.api.config; public interface ConfigService { /** * Registers a listener that will receive a callback when the instrumentation's property values * are changed. */ void registerConfigListener(ConfigListener listener); /** * Returns the {@code String} instrumentation property value with the specified {@code name}. * {@code null} is never returned. If there is no {@code String} instrumentation property with * the specified {@code name} then the empty string {@code ""} is returned. * * Properties are scoped per instrumentation. They are defined in the instrumentation's * META-INF/instrumentation.*.json file, and can be modified (assuming they are not marked as * hidden) on the configuration page under the instrumentation's configuration section. */ StringProperty getStringProperty(String name); /** * Returns the {@code boolean} instrumentation property value with the specified {@code name}. * If there is no {@code boolean} instrumentation property with the specified {@code name} then * {@code false} is returned. * * Properties are scoped per instrumentation. They are defined in the instrumentation's * META-INF/instrumentation.*.json file, and can be modified (assuming they are not marked as * hidden) on the configuration page under the instrumentation's configuration section. */ BooleanProperty getBooleanProperty(String name); /** * Returns the {@code Double} instrumentation property value with the specified {@code name}. If * there is no {@code Double} instrumentation property with the specified {@code name} then * {@code null} is returned. * * Properties are scoped per instrumentation. They are defined in the instrumentation's * META-INF/instrumentation.*.json file, and can be modified (assuming they are not marked as * hidden) on the configuration page under the instrumentation's configuration section. */ DoubleProperty getDoubleProperty(String name); /** * Returns the {@code List} instrumentation property value with the specified {@code name}. * {@code null} is never returned. If there is no {@code String} instrumentation property with * the specified {@code name} then the empty list is returned. * * Properties are scoped per instrumentation. They are defined in the instrumentation's * META-INF/instrumentation.*.json file, and can be modified (assuming they are not marked as * hidden) on the configuration page under the instrumentation's configuration section. */ ListProperty getListProperty(String name); }
<gh_stars>1-10 # -*- coding: utf-8 -*- """ Azure Resource Manager (ARM) Container Registry Webhook State Module .. versionadded:: 3.0.0 .. versionchanged:: 4.0.0 :maintainer: <<EMAIL>> :configuration: This module requires Azure Resource Manager credentials to be passed via acct. Note that the authentication parameters are case sensitive. Required provider parameters: if using username and password: * ``subscription_id`` * ``username`` * ``password`` if using a service principal: * ``subscription_id`` * ``tenant`` * ``client_id`` * ``secret`` Optional provider parameters: **cloud_environment**: Used to point the cloud driver to different API endpoints, such as Azure GovCloud. Possible values: * ``AZURE_PUBLIC_CLOUD`` (default) * ``AZURE_CHINA_CLOUD`` * ``AZURE_US_GOV_CLOUD`` * ``AZURE_GERMAN_CLOUD`` Example configuration for Azure Resource Manager authentication: .. code-block:: yaml azurerm: default: subscription_id: 3287abc8-f98a-c678-3bde-326766fd3617 tenant: ABCDEFAB-1234-ABCD-1234-ABCDEFABCDEF client_id: ABCDEFAB-1234-ABCD-1234-ABCDEFABCDEF secret: XXXXXXXXXXXXXXXXXXXXXXXX cloud_environment: AZURE_PUBLIC_CLOUD user_pass_auth: subscription_id: 3287abc8-f98a-c678-3bde-326766fd3617 username: fletch password: <PASSWORD> The authentication parameters can also be passed as a dictionary of keyword arguments to the ``connection_auth`` parameter of each state, but this is not preferred and could be deprecated in the future. """ # Import Python libs from dict_tools import differ import logging log = logging.getLogger(__name__) async def present( hub, ctx, name, registry_name, resource_group, service_uri, actions, custom_headers=None, status="enabled", scope=None, tags=None, connection_auth=None, **kwargs, ): """ .. versionadded:: 3.0.0 .. versionchanged:: 4.0.0 Ensure a container registry webhook exists. :param name: The name of the webhook. :param registry_name: The name of the container registry. :param resource_group: The name of the resource group to which the container registry belongs. :param service_uri: The service URI for the webhook to post notifications. :param actions: The list of actions that trigger the webhook to post notifications. Possible values include 'chart_delete', 'chart_push', 'delete', 'push', and 'quarantine'. :param custom_headers: A dictionary of custom headers that will be added to the webhook notifications. :param status: The status of the webhook at the time the operation was called. Possible values are 'enabled' and 'disabled'. :param scope: The scope of repositories where the event can be triggered. For example, ``foo:>>*<<`` means events for all tags under repository ``foo``. ``foo:bar`` means events for ``foo:bar`` only. ``foo`` is equivalent to ``foo:latest``. Empty means all events. :param tags: A dictionary of strings can be passed as tag metadata to the object. Example usage: .. code-block:: yaml Ensure container registry webhook exists: azurerm.containerregistry.webhook.present: - name: testhook - registry_name: testrepo - resource_group: testgroup - service_uri: http://idem.eitr.tech/webhook - actions: - push - status: enabled - customer_headers: X-Custom-Header: idem - tags: how_awesome: very contact_name: <NAME> """ ret = {"name": name, "result": False, "comment": "", "changes": {}} action = "create" if not isinstance(connection_auth, dict): if ctx["acct"]: connection_auth = ctx["acct"] else: ret[ "comment" ] = "Connection information must be specified via acct or connection_auth dictionary!" return ret # get existing container registry webhook if present hook = await hub.exec.azurerm.containerregistry.webhook.get( ctx, name, registry_name, resource_group, callback_config=True, azurerm_log_level="info", **connection_auth, ) if "error" not in hook: action = "update" # sku changes if service_uri.upper() != hook["service_uri"].upper(): ret["changes"]["service_uri"] = { "old": hook["service_uri"], "new": service_uri, } # actions changes old_act = sorted([act.lower() for act in hook["actions"]]) actions = sorted([act.lower() for act in actions]) if old_act != actions: ret["changes"]["actions"] = { "old": old_act, "new": actions, } # custom_headers changes head_diff = differ.deep_diff( hook.get("custom_headers", {}), custom_headers or {} ) if head_diff: ret["changes"]["tags"] = head_diff # status changes if status.upper() != hook["status"].upper(): ret["changes"]["status"] = {"old": hook["status"], "new": status} # scope changes if scope: if scope.upper() != hook["scope"].upper(): ret["changes"]["scope"] = {"old": hook["scope"], "new": scope} # tag changes tag_diff = differ.deep_diff(hook.get("tags", {}), tags or {}) if tag_diff: ret["changes"]["tags"] = tag_diff if not ret["changes"]: ret["result"] = True ret[ "comment" ] = "Container registry webhook {0} is already present.".format(name) return ret if ctx["test"]: ret["comment"] = "Container registry webhook {0} would be updated.".format( name ) ret["result"] = None return ret elif ctx["test"]: ret["comment"] = "Container registry webhook {0} would be created.".format(name) ret["result"] = None return ret hook_kwargs = kwargs.copy() hook_kwargs.update(connection_auth) hook = await hub.exec.azurerm.containerregistry.webhook.create_or_update( ctx=ctx, name=name, registry_name=registry_name, resource_group=resource_group, service_uri=service_uri, actions=actions, custom_headers=custom_headers, status=status, scope=scope, tags=tags, **hook_kwargs, ) if action == "create": ret["changes"] = {"old": {}, "new": hook} if "error" not in hook: ret["result"] = True ret["comment"] = f"Container registry webhook {name} has been {action}d." return ret ret["comment"] = "Failed to {0} container registry webhook {1}! ({2})".format( action, name, hook.get("error") ) if not ret["result"]: ret["changes"] = {} return ret async def absent( hub, ctx, name, registry_name, resource_group, connection_auth=None, **kwargs ): """ .. versionadded:: 3.0.0 Ensure a webhook does not exist in a container registry. :param name: Name of the webhook. :param registry_name: The name of the container registry. :param resource_group: The name of the resource group to which the container registry belongs. .. code-block:: yaml Ensure container registry webhook is absent: azurerm.containerregistry.webhook.absent: - name: testhook - registry_name: testrepo - resource_group: testgroup """ ret = {"name": name, "result": False, "comment": "", "changes": {}} if not isinstance(connection_auth, dict): if ctx["acct"]: connection_auth = ctx["acct"] else: ret[ "comment" ] = "Connection information must be specified via acct or connection_auth dictionary!" return ret hook = await hub.exec.azurerm.containerregistry.webhook.get( ctx, name, registry_name, resource_group, azurerm_log_level="info", **connection_auth, ) if "error" in hook: ret["result"] = True ret["comment"] = "Container registry webhook {0} is already absent.".format( name ) return ret if ctx["test"]: ret["comment"] = "Container registry webhook {0} would be deleted.".format(name) ret["result"] = None ret["changes"] = { "old": hook, "new": {}, } return ret deleted = await hub.exec.azurerm.containerregistry.webhook.delete( ctx, name, registry_name, resource_group, **connection_auth ) if deleted: ret["result"] = True ret["comment"] = "Container registry webhook {0} has been deleted.".format(name) ret["changes"] = {"old": hook, "new": {}} return ret ret["comment"] = "Failed to delete container registry webhook {0}!".format(name) return ret
#!/usr/bin/env sh rm -rf dist npm run build if [ ! -d "dist"] then echo "构建失败" else echo "构建成功" cd dist git init # 配置user git config --local user.name "gausszhou" git config --local user.email gausszhou@qq.com git add . time=$(date "+%Y-%m-%d %H:%m") git commit -m "$time deploy" git branch gh-pages git checkout gh-pages # 部署到 github gh-pages / git push git@github.com:gausszhou/vue-ruler.git gh-pages -f fi cd -
<filename>flashlib.frc.nt/src/main/java/com/flash3388/frc/nt/beans/NtStringProperty.java package com.flash3388.frc.nt.beans; import com.beans.Property; import edu.wpi.first.networktables.NetworkTableEntry; import edu.wpi.first.networktables.NetworkTableType; import edu.wpi.first.networktables.NetworkTableValue; public class NtStringProperty extends NtPropertyBase implements Property<String> { public NtStringProperty(NetworkTableEntry entry) { super(entry); } @Override public String get() { NetworkTableValue value = getOfType(NetworkTableType.kString); return value.getString(); } @Override public void set(String value) { set(NetworkTableValue.makeString(value)); } }
docker run --name share -p 8082:80 -v `pwd`/nginx.conf:/etc/nginx/nginx.conf:ro -v /home/public:/opt/www/files:ro -d nginx
if [ "$(grep -c '^passwd_same=' /etc/webmin/useradmin/config)" = "0" ]; then echo "passwd_same=1" >> /etc/webmin/useradmin/config else sed -i "s/^passwd_same=.*/passwd_same=1/" /etc/webmin/useradmin/config fi
#!/bin/bash -e ### BEGIN INIT INFO # Provides: rockchip # Required-Start: # Required-Stop: # Default-Start: # Default-Stop: # Short-Description: # Description: Setup rockchip platform environment ### END INIT INFO PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin install_mali() { case $1 in rk3288) MALI=midgard-t76x-r18p0-r0p0 # 3288w cat /sys/devices/platform/*gpu/gpuinfo | grep -q r1p0 && \ MALI=midgard-t76x-r18p0-r1p0 ;; rk3399|rk3399pro) MALI=midgard-t86x-r18p0 ;; rk3328) MALI=utgard-450 ;; rk3326|px30) MALI=bifrost-g31-g2p0 ;; rk3128|rk3036) MALI=utgard-400 ;; rk3568|rk3566) MALI=bifrost-g52-g2p0 ;; rk3588|rk3588s) MALI=valhall-g610-g6p0 ;; esac apt install -f /packages/libmali/libmali-*$MALI*-x11*.deb } function update_npu_fw() { /usr/bin/npu-image.sh sleep 1 /usr/bin/npu_transfer_proxy& } COMPATIBLE=$(cat /proc/device-tree/compatible) if [[ $COMPATIBLE =~ "rk3288" ]]; then CHIPNAME="rk3288" elif [[ $COMPATIBLE =~ "rk3328" ]]; then CHIPNAME="rk3328" elif [[ $COMPATIBLE =~ "rk3399" && $COMPATIBLE =~ "rk3399pro" ]]; then CHIPNAME="rk3399pro" update_npu_fw elif [[ $COMPATIBLE =~ "rk3399" ]]; then CHIPNAME="rk3399" elif [[ $COMPATIBLE =~ "rk3326" ]]; then CHIPNAME="rk3326" elif [[ $COMPATIBLE =~ "px30" ]]; then CHIPNAME="px30" elif [[ $COMPATIBLE =~ "rk3128" ]]; then CHIPNAME="rk3128" elif [[ $COMPATIBLE =~ "rk3566" ]]; then CHIPNAME="rk3566" elif [[ $COMPATIBLE =~ "rk3568" ]]; then CHIPNAME="rk3568" elif [[ $COMPATIBLE =~ "rk3588" ]]; then CHIPNAME="rk3588" else CHIPNAME="rk3036" fi COMPATIBLE=${COMPATIBLE#rockchip,} BOARDNAME=${COMPATIBLE%%rockchip,*} # first boot configure if [ ! -e "/usr/local/first_boot_flag" ] ; then echo "It's the first time booting." echo "The rootfs will be configured." # Force rootfs synced mount -o remount,sync / install_mali ${CHIPNAME} setcap CAP_SYS_ADMIN+ep /usr/bin/gst-launch-1.0 rm -rf /packages # The base target does not come with sddm systemctl restart sddm.service || true touch /usr/local/first_boot_flag fi # enable rkwifbt service #service rkwifibt start # enable async service #service async start # enable adbd service #service adbd start # support power management if [ -e "/usr/sbin/pm-suspend" -a -e /etc/Powermanager ] ; then mv /etc/Powermanager/power-key.sh /usr/bin/ mv /etc/Powermanager/power-key.conf /etc/triggerhappy/triggers.d/ if [[ "$CHIPNAME" == "rk3399pro" ]]; then mv /etc/Powermanager/01npu /usr/lib/pm-utils/sleep.d/ mv /etc/Powermanager/02npu /lib/systemd/system-sleep/ fi mv /etc/Powermanager/triggerhappy /etc/init.d/triggerhappy rm /etc/Powermanager -rf service triggerhappy restart fi # Create dummy video node for chromium V4L2 VDA/VEA with rkmpp plugin echo dec > /dev/video-dec0 echo enc > /dev/video-enc0 chmod 660 /dev/video-* chown root.video /dev/video-* # The chromium using fixed pathes for libv4l2.so ln -rsf /usr/lib/*/libv4l2.so /usr/lib/ [ -e /usr/lib/aarch64-linux-gnu/ ] && ln -Tsf lib /usr/lib64 # sync system time hwclock --systohc # read mac-address from efuse # if [ "$BOARDNAME" == "rk3288-miniarm" ]; then # MAC=`xxd -s 16 -l 6 -g 1 /sys/bus/nvmem/devices/rockchip-efuse0/nvmem | awk '{print $2$3$4$5$6$7 }'` # ifconfig eth0 hw ether $MAC # fi
#!/bin/bash echo "going to install my favorite tools for programming" sudo apt-get update ; sudo apt-get upgrade -y ; sudo apt-get dist-upgrade -y #This installs my programming languages, ides and virtual enviroments cd py_scripts ; python3 py_run.py declare -A prolang='(["nmap version: "]=`nmap -v` ["wireshark version: "]=`wireshark --version` ["python verison: "]=`python3 --version` ["Rust version: "]=`rustc --version` ["node version: "]=`node --version` ["npm version: "]=`npm --version`)' for i in "${!prolang[@]}"; do echo "$i ${prolang[$i]}"; done
// Copyright (C) (See commit logs on github.com/robhz786/strf) // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) #include "test_utils.hpp" template <typename... F> constexpr STRF_HD bool all_are_constrainable() { return strf::detail::fold_and<strf::is_constrainable<F>()...>::value; } STRF_TEST_FUNC void test_input_facets_pack() { TEST("1,0,0,0,0 10000 1000000 10,000 1'0000 1'000000 10.000 1^00^00 1'000000") .with(strf::numpunct<10>(1)) ( !strf::fmt(10000), ' ' , !strf::hex(0x10000), ' ' , !strf::oct(01000000), ' ' , strf::with ( strf::numpunct<10>(3) , strf::numpunct<16>(4).thousands_sep('\'') , strf::numpunct<8>(6).thousands_sep('\'') ) ( !strf::fmt(10000), ' ' , !strf::hex(0x10000), ' ' , !strf::oct(01000000), ' ' , strf::with ( strf::numpunct<10>(3).thousands_sep('.') , strf::numpunct<16>(2).thousands_sep('^') ) ( !strf::fmt(10000), ' ' , !strf::hex(0x10000), ' ' , !strf::oct(01000000) ) ) ); { // inside joins TEST("****1,0,0,0,0 10000 1000000 10,000 1'0000 1'000000") .with(strf::numpunct<10>(1)) ( strf::join_right(50, U'*') ( !strf::fmt(10000), ' ' , !strf::hex(0x10000), ' ' , !strf::oct(01000000), ' ' , strf::with ( strf::numpunct<10>(3) , strf::numpunct<16>(4).thousands_sep('\'') , strf::numpunct<8>(6).thousands_sep('\'') ) ( !strf::fmt(10000), ' ' , !strf::hex(0x10000), ' ' , !strf::oct(01000000) ))); } static_assert ( all_are_constrainable<>() , "all_are_constrainable ill implemented"); static_assert ( all_are_constrainable < strf::fast_width_t , strf::numpunct<10> >() , "these facets should be constrainable"); static_assert ( ! all_are_constrainable < strf::utf_t<char> , strf::fast_width_t , strf::numpunct<10> >() , "charset is not constrainable"); } REGISTER_STRF_TEST(test_input_facets_pack);
#!/bin/bash # SPDX-License-Identifier: BSD-2-Clause # Copyright (C) 2019 - 2020 Intel Corporation. # # set_host_configuration.sh - set Huge Pages parameters required for memkind tests set -e MEMKIND_HUGE_PAGES_NO=3000 MEMKIND_OVERCOMMIT_HUGEPAGES_NO=128 # find out current configuration MEMKIND_HUGE_PAGES_FOUND=$(cat /proc/sys/vm/nr_hugepages) MEMKIND_OVERCOMMIT_HUGEPAGES_FOUND=$(cat /proc/sys/vm/nr_overcommit_hugepages) # set expected configuration if [ "$MEMKIND_HUGE_PAGES_FOUND" != "$MEMKIND_HUGE_PAGES_NO" ]; then echo "Setting number of hugepages to ${MEMKIND_HUGE_PAGES_NO}." sudo sysctl vm.nr_hugepages="$MEMKIND_HUGE_PAGES_NO" fi if [ "$MEMKIND_OVERCOMMIT_HUGEPAGES_FOUND" != "$MEMKIND_OVERCOMMIT_HUGEPAGES_NO" ]; then echo "Setting number of overcommit hugepages to ${MEMKIND_OVERCOMMIT_HUGEPAGES_NO}." sudo sysctl vm.nr_overcommit_hugepages="$MEMKIND_OVERCOMMIT_HUGEPAGES_NO" fi
import random class DetectionPoint: def __init__(self, name, description): self.name = name self.description = description def get_name(self): return self.name def get_description(self): return self.description def randomDetectionPoint(): detection_points = [ DetectionPoint("HTTP Verb", "GET Request used where POST is expected"), DetectionPoint("SQL Injection", "Potential SQL injection vulnerability detected") ] selected_detection_point = random.choice(detection_points) return selected_detection_point.get_name(), selected_detection_point.get_description() # Example usage name, description = randomDetectionPoint() print(f"Randomly selected detection point: {name}") print(f"Description: {description}")
# This script would run before 'composer install' and 'composer update' commands. # It clones palantirnet/drupal-rector repo to under the drupal-rector/ directory. # Composer is set to use the cloned repo instead of getting drupal-rector from packagist # (This is done through 'type: path' under repositories section in composer.json) # Clone palantirnet/drupal-rector if the drupal-rector/ directory does not exist. DIR="drupal-rector/" if [ ! -d "$DIR" ]; then echo "Creating development environment under ${DIR} directory" # If a public SSH key found clone with SSH, otherwise clone with HTTPS FILE="$HOME/.ssh/id_rsa.pub" if [ -e "$FILE" ]; then echo "Clone with SSH" git clone git@github.com:palantirnet/drupal-rector.git else echo "Clone with HTTPS" git clone https://github.com/palantirnet/drupal-rector.git fi fi # Create a copy of rector.yml file if [ ! -e "rector.yml" ]; then echo "Copying rector.yml into the document root directory" cp drupal-rector/rector.yml . fi # Create symlink for rector_examples to be in drupal's default module directory if [ ! -L "web/modules/custom/rector_examples" ]; then echo "Creating a symlink for web/modules/custom/rector_examples..." mkdir -p web/modules/custom ln -s ../../../drupal-rector/rector_examples web/modules/custom/rector_examples fi
#!/usr/bin/env bash # Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Fully supervised baseline without mixup (not shown in paper since Mixup is better) python fully_supervised/fs_baseline.py --train_dir experiments/fs --dataset=cifar10-1 --wd=0.02 --smoothing=0.001 python fully_supervised/fs_baseline.py --train_dir experiments/fs --dataset=cifar100-1 --wd=0.02 --smoothing=0.001 python fully_supervised/fs_baseline.py --train_dir experiments/fs --dataset=svhn-1 --wd=0.002 --smoothing=0.01 python fully_supervised/fs_baseline.py --train_dir experiments/fs --dataset=svhn_noextra-1 --wd=0.002 --smoothing=0.01 # Fully supervised Mixup baselines (in paper) # Uses default parameters: --wd=0.002 --beta=0.5 python fully_supervised/fs_mixup.py --train_dir experiments/fs --dataset=cifar10-1 python fully_supervised/fs_mixup.py --train_dir experiments/fs --dataset=svhn-1 python fully_supervised/fs_mixup.py --train_dir experiments/fs --dataset=svhn_noextra-1 # Fully supervised Mixup baselines on 26M parameter large network (in paper) # Uses default parameters: --wd=0.002 --beta=0.5 python fully_supervised/fs_mixup.py --train_dir experiments/fs --dataset=cifar10-1 --filters=135 python fully_supervised/fs_mixup.py --train_dir experiments/fs --dataset=cifar100-1 --filters=135
# Copyright 2020 Nym # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #!/bin/bash # set CHANGELOG_GITHUB_TOKEN in your .bashrc file # For each version, you can add a release summary with text, images, gif animations, etc, and show new features and notes clearly to the user. This is done using GitHub metadata. # Example: adding the release summary for v1.0.0: # 1. Create a new GitHub Issue # 2. In the Issue's Description field, add your release summary content # 3. Set the Issue Label `release-summary` and add it to the GitHub Milestone `v1.0.0` # 4. Close the Issue and execute `github-changelog-generator` github_changelog_generator -u nymtech -p nym --exclude-tags 0.1.0 --token "$CHANGELOG_GITHUB_TOKEN"
/* Copyright (C) 2021 The Falco Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // This package contains utilities for passing pointers to go managed // memory to/from the plugins framework. This allows using go structs // to represent the state of a created plugin (e.g. ss_plugin_t) or // the state of an open plugin instance (e.g. ss_instance_t), without // having those structs be garbage collected after a given go // plugin_XXX function has returned. // // Here's an overview of how to use the functions: // // type pluginState struct { // // State for a created plugin goes here // } // // // export plugin_init // func plugin_init(config *C.char, rc *int32) unsafe.Pointer { // // sobj := state.NewStateContainer() // // // Allocate the context struct attach it to the state // pCtx := &pluginState{} // state.SetContext(sobj, unsafe.Pointer(pCtx)) // // *rc = sdk.ScapSuccess // return sobj // } // // //export plugin_destroy // func plugin_destroy(plgState unsafe.Pointer) { // state.Free(plgState) // } // // When go 1.17 is more widespread, this implementation will change to // use cgo.Handle (https://pkg.go.dev/runtime/cgo@go1.17#Handle) // instead. package state
def longestSubstringLength(s: str) -> int: max_length = 0 start = 0 char_index = {} for end in range(len(s)): if s[end] in char_index and char_index[s[end]] >= start: start = char_index[s[end]] + 1 char_index[s[end]] = end max_length = max(max_length, end - start + 1) return max_length
<filename>src/net/kislay/goasat/sip/syntaxencoding/SipHeaderFieldValue.java /* This file is part of Peers, a java SIP softphone. This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. Copyright 2007, 2008, 2009, 2010 <NAME> */ package net.kislay.goasat.sip.syntaxencoding; import java.util.HashMap; import net.kislay.goasat.sip.RFC3261; public class SipHeaderFieldValue { private String value; private HashMap<SipHeaderParamName, String> params; public SipHeaderFieldValue(String value) { int startPos = value.indexOf(RFC3261.RIGHT_ANGLE_BRACKET); int pos; if (startPos > -1) { pos = value.indexOf(RFC3261.PARAM_SEPARATOR, startPos); } else { pos = value.indexOf(RFC3261.PARAM_SEPARATOR); } String paramsString; if (pos > -1) { this.value = value.substring(0,pos); paramsString = value.substring(pos); } else { this.value = value; paramsString = ""; } params = new HashMap<SipHeaderParamName, String>(); if (paramsString.contains(RFC3261.PARAM_SEPARATOR)) { String[] arr = paramsString.split(RFC3261.PARAM_SEPARATOR); if (arr.length > 1) { for (int i = 1; i < arr.length; ++i) { String paramName = arr[i]; String paramValue = ""; pos = paramName.indexOf(RFC3261.PARAM_ASSIGNMENT); if (pos > -1) { paramName = arr[i].substring(0, pos); paramValue = arr[i].substring(pos + 1); } params.put(new SipHeaderParamName(paramName), paramValue); } } } } public String getParam(SipHeaderParamName name) { return params.get(name); } public void addParam(SipHeaderParamName name, String value) { params.put(name, value); } public void removeParam(SipHeaderParamName name) { params.remove(name); } public String getValue() { return value; } public void setValue(String value) { this.value = value; } @Override public String toString() { if (params == null || params.isEmpty()) { return value; } StringBuffer buf = new StringBuffer(value); for (SipHeaderParamName name: params.keySet()) { buf.append(RFC3261.PARAM_SEPARATOR).append(name); String value = params.get(name); if (!"".equals(value.trim())) { buf.append(RFC3261.PARAM_ASSIGNMENT).append(value); } } return buf.toString(); } }
export default function (moduleName: string): any;
/** * Copyright (C) Oranda - All Rights Reserved (January 2019 - January 2021) */ import { HaliaCoreAPI, haliaCoreAPI, ImportRegisterParams } from "./halia"; export interface HaliaPlugin<Imports = any, Exports = any> { id: string; name: string; description?: string; dependencies?: string[]; install: (imports: Imports) => Exports; } /** * HaliaCore */ export const HaliaCore: HaliaPlugin<undefined, HaliaCoreAPI> = { id: "haliaCore", name: "Halia Core", description: "The Extensible Halia Core", dependencies: [], install: () => { return haliaCoreAPI; } }; /** * HaliaProgram */ export type handler = (dependencies: HaliaPlugin[]) => void; export interface Program { registerCode: (handler: handler) => void; } export class HaliaProgramAPI { public code: any[] = []; public registerCode = (handler: handler) => { this.code.push(handler); }; public run = async () => { const results = []; for (const handler of this.code) { const res = await handler(); results.push(res); } return results; }; } export const HaliaProgram: HaliaPlugin<undefined, HaliaProgramAPI> = { id: "haliaProgram", name: "Halia Program", description: "Extensible Program", dependencies: [], install: () => { return new HaliaProgramAPI(); } }; export interface OptionalDependenciesPatch { optionalDependencies?: string[]; } /** * NOTE: Optional Dependencies may be useful while debugging */ export const OptionalDependencies: HaliaPlugin<undefined, void> = { id: "optionalDependencies", name: "Optional Dependencies", description: "Optional Dependencies", dependencies: [HaliaCore.id], install: ({ haliaCore }: { haliaCore: HaliaCoreAPI }) => { haliaCore.importRegister.addRegister("optional-dependencies", ({ stack, node, importMap, exportMap }: ImportRegisterParams<HaliaPlugin & OptionalDependenciesPatch>) => { const { original: { optionalDependencies = [] } } = node; optionalDependencies.forEach(depId => { importMap[depId] = exportMap[depId]; }); }); } };
#!/bin/sh # CYBERWATCH SAS - 2017 # # Security fix for DSA-2443-1 # # Security announcement date: 2012-03-26 00:00:00 UTC # Script generation date: 2017-01-01 21:06:22 UTC # # Operating System: Debian 6 (Squeeze) # Architecture: i386 # # Vulnerable packages fix on version: # - linux-2.6:2.6.32-41squeeze2 # # Last versions recommanded by security team: # - linux-2.6:2.6.32-48squeeze20 # # CVE List: # - CVE-2009-4307 # - CVE-2011-1833 # - CVE-2011-4127 # - CVE-2011-4347 # - CVE-2012-0045 # - CVE-2012-1090 # - CVE-2012-1097 # # More details: # - https://www.cyberwatch.fr/vulnerabilites # # Licence: Released under The MIT License (MIT), See LICENSE FILE sudo apt-get install --only-upgrade linux-2.6=2.6.32-48squeeze20 -y
import React from 'react'; import './ContactData.css'; const CONTACT_ITEMS = [ '<NAME> 1791', 'Córdoba, Argentina', '+54 9 351 157 404 863', ]; const ContactData = () => { return ( <ul className="ContactData"> {CONTACT_ITEMS.map((data, i) => ( <li className="ContactData__Item" key={i}> {data} <span>&gt;&gt;&gt;</span> </li> ))} </ul> ); }; export default ContactData;
typedef struct{ int employee_id; char employee_name[20]; char department[20]; float salary; }Employee;
<reponame>pwbrown/advent-of-code import { getInput } from './utils'; const countIncreases = (arr: number[]) => arr.reduce<number>((p, c, i, a) => { if (i !== 0 && c > a[i-1]) { p += 1; } return p; }, 0); const depths = getInput(1).split('\n').map(n => parseInt(n)); const windows: number[][] = []; depths.forEach((depth, i) => { /** Create new window */ windows.push([depth]); /** Add to previous 2 windows */ for (let p = i - 1; p >= Math.max(0, i - 2); p -= 1) { windows[p].push(depth); } }); /** Sum values in windows of at least 3 depths */ const windowSums = windows .filter(w => w.length === 3) .map(w => w.reduce((p, c) => p + c, 0)); console.log(`Part 1: ${countIncreases(depths)}`); console.log(`Part 2: ${countIncreases(windowSums)}`);
import numpy as np import matplotlib.pyplot as plt def customize_plot_ticks(a_plot, p_plot, wl): fig, ax = plt.subplots() cbar = plt.colorbar() cbar.set_label(r"$\chi_{"+str(wl)+"\mathrm{cm}} [\mathrm{cm}^2/\mathrm{g}_{\mathrm{dust}}]$") plt.xticks(np.arange(len(a_plot)), np.round_(np.log10(a_plot), decimals=2)) plt.yticks(np.arange(len(p_plot)), np.round_(p_plot, decimals=2)) plt.xticks(rotation=90) plt.xlabel('$\log (a_{\mathrm{max}} [\mathrm{cm}])$') plt.show() return fig, ax
#!/bin/bash # TODO - Remove all references to database password/usernames # Create main database, create tables, create empty test database mysql -u root -p${DB_ROOT_PASSWORD} -h tnb-mysql < sqlCreateDBs.sql # Dump main database mysqldump -u root -p${DB_ROOT_PASSWORD} -h tnb-mysql crypto_sim > dumps/crypto_sim-dump.sql # Copy main database to create test database using dump mysql -u root -p${DB_ROOT_PASSWORD} -h tnb-mysql crypto_sim_test < dumps/crypto_sim-dump.sql # Create dump of test database to be used by testing framework to repopulate mysqldump -u root -p${DB_ROOT_PASSWORD} -h tnb-mysql crypto_sim_test > dumps/crypto_sim_test-dump.sql
'use strict'; const YQL=require('yql'); let getWeather = (location,type='forecast') =>{ return new Promise((resolve,reject) =>{ let query=new YQL(`select ${type === 'current' ? 'item.condition, location' : '*'} `+ `from weather.forecast where woeid in (select woeid from geo.places(1) where `+ `text = "${location}") and u="c"`); query.exec((error,response) =>{ if(error){ reject(error); }else{ resolve(response); } }); }); }; module.exports=getWeather;
function sumArrays(arr1, arr2) { if (arr1.length !== arr2.length) { throw new Error('Arrays must be of same length'); } let sumArr = []; for (let i = 0; i < arr1.length; i++) { let sumElement = arr1[i] + arr2[i]; sumArr.push(sumElement); } return sumArr; }
import java.util.List; import java.util.NoSuchElementException; class Pet { private long id; private String name; private long ownerId; public Pet(long id, String name, long ownerId) { this.id = id; this.name = name; this.ownerId = ownerId; } // Getters and setters for id, name, and ownerId // ... @Override public String toString() { return "Pet{" + "id=" + id + ", name='" + name + '\'' + ", ownerId=" + ownerId + '}'; } } class PetService { private PetRepository petRepository; public PetService(PetRepository petRepository) { this.petRepository = petRepository; } public Pet findPetById(long id) { return petRepository.findPetById(id) .orElseThrow(() -> new NoSuchElementException("Pet with id:" + id + " can not be found")); } public Pet savePet(Pet pet) { return petRepository.save(pet); } public List<Pet> getPetsByOwnerId(long ownerId) { return petRepository.findAllPetsByCustomerId(ownerId); } } interface PetRepository { java.util.Optional<Pet> findPetById(long id); Pet save(Pet pet); List<Pet> findAllPetsByCustomerId(long ownerId); }
<reponame>schinmayee/nimbus //##################################################################### // Copyright 2006-2009, <NAME>, <NAME>, <NAME>. // This file is part of PhysBAM whose distribution is governed by the license contained in the accompanying file PHYSBAM_COPYRIGHT.txt. //##################################################################### #include <PhysBAM_Tools/Matrices/DIAGONAL_MATRIX_3X3.h> #include <PhysBAM_Tools/Matrices/MATRIX_0X0.h> #include <PhysBAM_Tools/Matrices/MATRIX_1X1.h> #include <PhysBAM_Tools/Read_Write/Arrays/READ_WRITE_ARRAY.h> #include <PhysBAM_Tools/Read_Write/Point_Clouds/READ_WRITE_POINT_CLOUD.h> #include <PhysBAM_Geometry/Solids_Geometry/RIGID_GEOMETRY.h> #include <PhysBAM_Solids/PhysBAM_Rigids/Particles/RIGID_BODY_PARTICLES.h> #include <PhysBAM_Solids/PhysBAM_Rigids/Particles/RIGIDS_PARTICLES_FORWARD.h> namespace PhysBAM{ //##################################################################### // Constructor //##################################################################### template<class TV> RIGID_BODY_PARTICLES<TV>:: RIGID_BODY_PARTICLES(ARRAY_COLLECTION* array_collection_input) :angular_momentum(0,0),mass(0,0),inertia_tensor(0,0),kinematic(0,0) { delete array_collection;array_collection=array_collection_input; Initialize_Array_Collection(); } //##################################################################### // Constructor //##################################################################### template<class TV> RIGID_BODY_PARTICLES<TV>:: RIGID_BODY_PARTICLES() :angular_momentum(0,0),mass(0,0),inertia_tensor(0,0),kinematic(0,0) { Initialize_Array_Collection(); } //##################################################################### // Destructor //##################################################################### template<class TV> RIGID_BODY_PARTICLES<TV>:: ~RIGID_BODY_PARTICLES() { Delete_All_Particles(); } //##################################################################### // Function Initialize_Array_Collection //##################################################################### template<class TV> void RIGID_BODY_PARTICLES<TV>:: Initialize_Array_Collection() { RIGID_GEOMETRY_PARTICLES<TV>::Initialize_Array_Collection(); array_collection->Add_Array(ATTRIBUTE_ID_ANGULAR_MOMENTUM,&angular_momentum); array_collection->Add_Array(ATTRIBUTE_ID_RIGID_MASS,&mass); array_collection->Add_Array(ATTRIBUTE_ID_RIGID_INERTIA_TENSOR,&inertia_tensor); array_collection->Add_Array(ATTRIBUTE_ID_KINEMATIC,&kinematic); } //##################################################################### template class RIGID_BODY_PARTICLES<VECTOR<float,1> >; template class RIGID_BODY_PARTICLES<VECTOR<float,2> >; template class RIGID_BODY_PARTICLES<VECTOR<float,3> >; #ifndef COMPILE_WITHOUT_DOUBLE_SUPPORT template class RIGID_BODY_PARTICLES<VECTOR<double,1> >; template class RIGID_BODY_PARTICLES<VECTOR<double,2> >; template class RIGID_BODY_PARTICLES<VECTOR<double,3> >; #endif }
python3 test.py --dataroot datasets/pothole600 --dataset pothole --name pothole --model aaunet --input rgb --epoch aaunet_rgb
<filename>src/components/daily/bolustooltip/BolusTooltip.js /* * == BSD2 LICENSE == * Copyright (c) 2016, Tidepool Project * * This program is free software; you can redistribute it and/or modify it under * the terms of the associated License, which is identical to the BSD 2-Clause * License as published by the Open Source Initiative at opensource.org. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. See the License for more details. * * You should have received a copy of the License along with this program; ifg * not, you can obtain one from Tidepool Project at tidepool.org. * == BSD2 LICENSE == */ import React from 'react'; import PropTypes from 'prop-types'; import _ from 'lodash'; import * as bolusUtils from '../../../utils/bolus'; import { formatDuration } from '../../../utils/datetime'; import { formatInsulin, formatBgValue, formatInputTime } from '../../../utils/format'; import { getAnnotationMessages } from '../../../utils/annotations'; import Tooltip from '../../common/tooltips/Tooltip'; import colors from '../../../styles/colors.css'; import styles from './BolusTooltip.css'; import i18next from 'i18next'; const t = i18next.t.bind(i18next); class BolusTooltip extends React.Component { formatBgValue(val) { return formatBgValue(val, this.props.bgPrefs); } isAnimasExtended() { const annotations = bolusUtils.getAnnotations(this.props.bolus); const isAnimasExtended = _.findIndex(annotations, { code: 'animas/bolus/extended-equal-split' }) !== -1; return isAnimasExtended; } animasExtendedAnnotationMessage() { let content = null; if (this.isAnimasExtended()) { const messages = getAnnotationMessages(bolusUtils.getBolusFromInsulinEvent(this.props.bolus)); content = ( <div className={styles.annotation}> {_.find(messages, { code: 'animas/bolus/extended-equal-split' }).message.value} </div> ); } return content; } getTarget() { const wizardTarget = _.get(this.props.bolus, 'bgTarget'); const target = _.get(wizardTarget, 'target', null); const targetLow = _.get(wizardTarget, 'low', null); const targetHigh = _.get(wizardTarget, 'high', null); const targetRange = _.get(wizardTarget, 'range', null); const isAutomatedTarget = _.findIndex(_.get(this.props.bolus, 'annotations', []), { code: 'wizard/target-automated', }) !== -1; if (isAutomatedTarget) { return ( <div className={styles.target}> <div className={styles.label}>{t('Target')}</div> <div className={styles.value}>{t('Auto')}</div> <div className={styles.units} /> </div> ); } if (targetLow) { // medtronic let value; if (targetLow === targetHigh) { value = `${this.formatBgValue(targetLow)}`; } else { value = `${this.formatBgValue(targetLow)}-${this.formatBgValue(targetHigh)}`; } return ( <div className={styles.target}> <div className={styles.label}>{t('Target')}</div> <div className={styles.value}>{value}</div> <div className={styles.units} /> </div> ); } if (targetRange) { // animas return [ <div className={styles.target} key={'target'}> <div className={styles.label}>{t('Target')}</div> <div className={styles.value}>{`${this.formatBgValue(target)}`}</div> <div className={styles.units} /> </div>, <div className={styles.target} key={'range'}> <div className={styles.label}>{t('Range')}</div> <div className={styles.value}>{`${this.formatBgValue(targetRange)}`}</div> <div className={styles.units} /> </div>, ]; } if (targetHigh) { // insulet return [ <div className={styles.target} key={'target'}> <div className={styles.label}>{t('Target')}</div> <div className={styles.value}>{`${this.formatBgValue(target)}`}</div> <div className={styles.units} /> </div>, <div className={styles.target} key={'high'}> <div className={styles.label}>{t('High')}</div> <div className={styles.value}>{`${this.formatBgValue(targetHigh)}`}</div> <div className={styles.units} /> </div>, ]; } // tandem return ( <div className={styles.target}> <div className={styles.label}>{t('Target')}</div> <div className={styles.value}>{`${this.formatBgValue(target)}`}</div> <div className={styles.units} /> </div> ); } getExtended() { const bolus = bolusUtils.getBolusFromInsulinEvent(this.props.bolus); const hasExtended = bolusUtils.hasExtended(bolus); const normalPercentage = bolusUtils.getNormalPercentage(bolus); const normal = _.get(bolus, 'normal', NaN); const isAnimasExtended = this.isAnimasExtended(); const extendedPercentage = _.isNaN(bolusUtils.getExtendedPercentage(bolus)) ? '' : `(${bolusUtils.getExtendedPercentage(bolus)})`; let extendedLine = null; if (hasExtended) { if (isAnimasExtended) { extendedLine = ( <div className={styles.extended}> <div className={styles.label}>Extended Over*</div> <div className={styles.value}>{formatDuration(bolusUtils.getDuration(bolus))}</div> </div> ); } else { extendedLine = [ !!normal && ( <div className={styles.normal} key={'normal'}> <div className={styles.label}> {t('Up Front ({{normalPercentage}})', { normalPercentage })} </div> <div className={styles.value}>{`${formatInsulin(normal)}`}</div> <div className={styles.units}>U</div> </div> ), <div className={styles.extended} key={'extended'}> <div className={styles.label}> {`Over ${formatDuration(bolusUtils.getDuration(bolus))} ${extendedPercentage}`} </div> <div className={styles.value}> {`${formatInsulin(bolusUtils.getExtended(bolus))}`} </div> <div className={styles.units}>U</div> </div>, ]; } } return extendedLine; } getBolusTypeLine(bolusType) { return bolusType && ( <div className={styles.bolus}> <div className={styles.label}>{t('bolus_type')}</div> <div className={styles.value}>{t(`bolus_${bolusType}`)}</div> </div> ); } getIobLine(iob) { return !!iob && ( <div className={styles.iob}> <div className={styles.label}>{t('IOB')}</div> <div className={styles.value}>{formatInsulin(iob)}</div> <div className={styles.units}>U</div> </div> ); } getPrescriptorLine(prescriptor) { return prescriptor && prescriptor !== 'manual' && ( <div className={styles.prescriptor}> <div className={styles.label}>{t('Prescribed by Loop Mode')}</div> </div> ); } renderWizard() { const { bolus, timePrefs } = this.props; // to be renamed const wizard = bolus; const recommended = bolusUtils.getRecommended(wizard); const suggested = _.isFinite(recommended) ? recommended : null; const prescriptor = _.get(wizard, 'bolus.prescriptor', null); const inputTime = _.get(wizard, 'inputTime', null); const bolusType = _.get(wizard, 'bolus.subType', null); const fatMeal = _.get(wizard, 'inputMeal.fat', null); const bg = _.get(wizard, 'bgInput', null); const iob = _.get(wizard, 'insulinOnBoard', null); const carbs = bolusUtils.getCarbs(wizard); const carbsInput = _.isFinite(carbs) && carbs > 0; const carbRatio = _.get(wizard, 'insulinCarbRatio', null); const isf = _.get(wizard, 'insulinSensitivity', null); const delivered = bolusUtils.getDelivered(wizard); const isInterrupted = bolusUtils.isInterruptedBolus(wizard); const programmed = bolusUtils.getProgrammed(wizard); const hasExtended = bolusUtils.hasExtended(wizard); const isAnimasExtended = this.isAnimasExtended(); let overrideLine = null; if (bolusUtils.isOverride(wizard)) { overrideLine = ( <div className={styles.override}> <div className={styles.label}>{t('Override')}</div> <div className={styles.value}>{`+${formatInsulin(programmed - recommended)}`}</div> <div className={styles.units}>U</div> </div> ); } if (bolusUtils.isUnderride(wizard)) { overrideLine = ( <div className={styles.override}> <div className={styles.label}>{t('Underride')}</div> <div className={styles.value}>{`-${formatInsulin(recommended - programmed)}`}</div> <div className={styles.units}>U</div> </div> ); } const deliveredLine = _.isFinite(delivered) && ( <div className={styles.delivered}> <div className={styles.label}>{t('Delivered')}</div> <div className={styles.value}>{`${formatInsulin(delivered)}`}</div> <div className={styles.units}>U</div> </div> ); const suggestedLine = (isInterrupted || overrideLine) && suggested !== null && ( <div className={styles.suggested}> <div className={styles.label}>{t('Recommended')}</div> <div className={styles.value}>{formatInsulin(suggested)}</div> <div className={styles.units}>U</div> </div> ); const bgLine = !!bg && ( <div className={styles.bg}> <div className={styles.label}>{t('BG')}</div> <div className={styles.value}>{this.formatBgValue(bg)}</div> <div className={styles.units} /> </div> ); const carbsLine = !!carbs && ( <div className={styles.carbs}> <div className={styles.label}>{t('Carbs')}</div> <div className={styles.value}>{carbs}</div> <div className={styles.units}>g</div> </div> ); const iobLine = this.getIobLine(iob); const interruptedLine = isInterrupted && ( <div className={styles.interrupted}> <div className={styles.label}>{t('Interrupted')}</div> <div className={styles.value}>{`-${formatInsulin(programmed - delivered)}`}</div> <div className={styles.units}>U</div> </div> ); const icRatioLine = !!carbsInput && !!carbRatio && ( <div className={styles.carbRatio}> <div className={styles.label}>{t('I:C Ratio')}</div> <div className={styles.value}>{`1:${carbRatio}`}</div> <div className={styles.units} /> </div> ); const isfLine = !!isf && !!bg && ( <div className={styles.isf}> <div className={styles.label}>{t('ISF')}</div> <div className={styles.value}>{`${this.formatBgValue(isf)}`}</div> <div className={styles.units} /> </div> ); const bolusTypeLine = this.getBolusTypeLine(bolusType); const prescriptorLine = this.getPrescriptorLine(prescriptor); const mealLine = fatMeal && ( <div className={styles.fat}> <div className={styles.label}>{t('High fat meal')}</div> </div> ); const inputLine = inputTime && ( <div className={styles.input}> <div className={styles.label}>{t('Entered at')} {formatInputTime(bolus.inputTime, timePrefs)}</div> </div> ); return ( <div className={styles.container}> {bgLine} {carbsLine} {mealLine} {inputLine} {iobLine} {(prescriptorLine || bolusTypeLine || suggestedLine) && <div className={styles.dividerSmall} />} {prescriptorLine} {bolusTypeLine} {suggestedLine} {this.getExtended()} {(overrideLine) && <div className={styles.dividerSmall} />} {overrideLine} {interruptedLine} {deliveredLine} {(icRatioLine || isfLine || bg || isAnimasExtended) && ( <div className={styles.dividerLarge} /> )} {icRatioLine} {isfLine} {!!bg && this.getTarget()} {this.animasExtendedAnnotationMessage()} </div> ); } renderNormal() { const bolus = this.props.bolus; const prescriptor = _.get(bolus, 'prescriptor', null); const bolusType = _.get(bolus, 'subType', null); const iob = _.get(bolus, 'insulinOnBoard', null); const delivered = bolusUtils.getDelivered(bolus); const isInterrupted = bolusUtils.isInterruptedBolus(bolus); const programmed = bolusUtils.getProgrammed(bolus); const isAnimasExtended = this.isAnimasExtended(); const deliveredLine = _.isFinite(delivered) && ( <div className={styles.delivered}> <div className={styles.label}>{t('Delivered')}</div> <div className={styles.value}>{`${formatInsulin(delivered)}`}</div> <div className={styles.units}>U</div> </div> ); const interruptedLine = isInterrupted && ( <div className={styles.interrupted}> <div className={styles.label}>{t('Interrupted')}</div> <div className={styles.value}>{`-${formatInsulin(programmed - delivered)}`}</div> <div className={styles.units}>U</div> </div> ); const programmedLine = isInterrupted && !!programmed && ( <div className={styles.programmed}> <div className={styles.label}>{t('Programmed')}</div> <div className={styles.value}>{`${formatInsulin(programmed)}`}</div> <div className={styles.units}>U</div> </div> ); const bolusTypeLine = this.getBolusTypeLine(bolusType); const iobLine = this.getIobLine(iob); const prescriptorLine = this.getPrescriptorLine(prescriptor); return ( <div className={styles.container}> {programmedLine} {interruptedLine} {bolusTypeLine} {iobLine} {prescriptorLine} {deliveredLine} {this.getExtended()} {isAnimasExtended && <div className={styles.dividerLarge} />} {this.animasExtendedAnnotationMessage()} </div> ); } renderBolus() { let content; if (this.props.bolus.type === 'wizard') { content = this.renderWizard(); } else { content = this.renderNormal(); } return content; } render() { const { bolus, timePrefs } = this.props; const dateTitle = { source: _.get(bolus, 'source', 'tidepool'), normalTime: bolus.normalTime, timezone: _.get(bolus, 'timezone', 'UTC'), timePrefs, }; return ( <Tooltip {...this.props} dateTitle={dateTitle} content={this.renderBolus()} /> ); } } BolusTooltip.propTypes = { position: PropTypes.shape({ top: PropTypes.number.isRequired, left: PropTypes.number.isRequired, }).isRequired, offset: PropTypes.shape({ top: PropTypes.number.isRequired, left: PropTypes.number, horizontal: PropTypes.number, }), tail: PropTypes.bool.isRequired, side: PropTypes.oneOf(['top', 'right', 'bottom', 'left']).isRequired, tailColor: PropTypes.string.isRequired, tailWidth: PropTypes.number.isRequired, tailHeight: PropTypes.number.isRequired, backgroundColor: PropTypes.string, borderColor: PropTypes.string.isRequired, borderWidth: PropTypes.number.isRequired, bolus: PropTypes.shape({ type: PropTypes.string.isRequired, inputTime: PropTypes.string, }).isRequired, bgPrefs: PropTypes.object.isRequired, timePrefs: PropTypes.object.isRequired, }; BolusTooltip.defaultProps = { tail: true, side: 'right', tailWidth: 9, tailHeight: 17, tailColor: colors.bolus, borderColor: colors.bolus, borderWidth: 2, }; export default BolusTooltip;
import { Component, OnInit, Input, ViewChild, ViewChildren, QueryList, Output, EventEmitter } from '@angular/core'; import { MimeTypePipe, MimeTypeMasterData } from '../../pipes-module/mime-type'; import {staticData} from './toc-data'; import { isNgTemplate } from '@angular/compiler'; @Component({ selector: 'sb-toc-curriculum', templateUrl: './toc-curriculum.component.html', styleUrls: ['./toc-curriculum.component.scss', '../../layout/library-cards-grid/library-cards-grid.component.scss', '../../card/library-card/library-card.component.scss'] }) export class TocCurriculumComponent implements OnInit { @Input() tocData = staticData; @Input() activeMimeTypeFilter = ['all']; @Input() noContentMessage = 'No content available'; @Input() isAccordion: boolean = false; @Input() isChapterListing: boolean = false; @Input() type = undefined; @Input() contentStatus = []; @Input() shadowColor = "hsl(0, 0%, 80%)"; @Input() progressColor = "#024f9d"; @Input() refresh: boolean; @Input() layoutConfig = {}; @Input() playBtnConfig; @Input() platform; @Input() trackableDefaultImage; @Input() maxAttempts; @Input() disabled = ''; @ViewChild('chapter',{static:false}) divs: QueryList<any>; @ViewChildren('chapterContainer') chapterContainer: QueryList<any>; @Input() activeContent; @Output() tocCardClick: EventEmitter<any> = new EventEmitter(); @Output() noContent: EventEmitter<any> = new EventEmitter(); @Input() scoreLabel = ''; get MimeTypeMasterData() { return MimeTypeMasterData; } isMimeTypeFilterChanged = false; private isSameMimeTypeInChildren = ((mimeTypesCount, activeMimeType) => { const contentMimeType = Object.keys(JSON.parse(mimeTypesCount)); return Boolean(activeMimeType.filter(value => contentMimeType.includes(value)).length); }); private isSameMimeType = (currentContent) => this.activeMimeTypeFilter.find(mimeType => mimeType === currentContent.mimeType); private isChildrenPresent = (currentContent) => Boolean(currentContent.children && currentContent.children.length); constructor() { } ngOnInit() { this.setActiveContent(); } ngOnChanges(changes) { if (changes.activeMimeTypeFilter) { this.isMimeTypeFilterChanged = false; } else if (changes.tocData) { this.setActiveContent(); } } setActiveContent() { if (this.tocData && this.tocData.children) { const flattenDeepContents = this.flattenDeep(this.tocData.children); this.activeContent = this.firstNonCollectionContent(flattenDeepContents); if (!this.activeContent) { this.noContent.emit({ message: 'No Content Available' }); } } } public filterChildren(content) { // Check for the ActiveMimeType if (this.activeMimeTypeFilter.includes('all')) { if (content.mimeType === MimeTypeMasterData.COLLECTION) { return this.isShowContent(content, true); } return true; // Return true for all the content except collection mimeType } else if (content.mimeType && this.isSameMimeType(content)) { return true; } else if (content.mimeType === MimeTypeMasterData.COLLECTION) { return this.isShowContent(content, false); } else { return false; } } private isShowContent(content, isActiveFilterAll) { if (content.mimeTypesCount && !isActiveFilterAll) { return this.isSameMimeTypeInChildren(content.mimeTypesCount, this.activeMimeTypeFilter); } else if (this.isChildrenPresent(content)) { const contentList = this.flattenDeep(content.children); return isActiveFilterAll ? contentList.some((c) => c.mimeType !== MimeTypeMasterData.COLLECTION) : contentList.some((c) => this.isSameMimeType(c)); } else { return false; } } public onTocChapterClick(event,item) { /*const rollup = this.getRollup(this.tocData, event.data.sbUniqueIdentifier); if (rollup.length) { rollup.pop(); }*/ this.tocCardClick.emit({ ...event, item }); } public onTocCardClick(event) { const rollup = this.getRollup(this.tocData, event.data.sbUniqueIdentifier); if (rollup.length) { rollup.pop(); } this.tocCardClick.emit({ ...event, rollup }); } public collapsedChangeHandler(event) { } private firstNonCollectionContent(contents) { return contents.find((content) => content.mimeType !== 'application/vnd.ekstep.content-collection'); } private flattenDeep(contents) { if (contents) { return contents.reduce((acc, val) => { if (val.children) { acc.push(val); return acc.concat(this.flattenDeep(val.children)); } else { return acc.concat(val); } }, []); } } isExpanded(index: number, item) { return Boolean(index === 0 || item && item.mimeType !== MimeTypeMasterData.COLLECTION); } isShowBody(item, index) { if (item) { const isShowAllMimeType = () => this.activeMimeTypeFilter.indexOf('all') > -1; const isCollection = () => item.mimeType === MimeTypeMasterData.COLLECTION; const isMathchesMimeType = () => this.activeMimeTypeFilter.indexOf(item.mimeType) > -1; const isLastContent = () => this.tocData.children.length === index + 1; if (this.tocData && isShowAllMimeType() || (isCollection() || isMathchesMimeType())) { this.isMimeTypeFilterChanged = true; return true; } else if (this.activeMimeTypeFilter.indexOf(item.mimeType) < 0 && !isLastContent()) { return false; } else if (this.activeMimeTypeFilter.indexOf(item.mimeType) < 0 && isLastContent()) { if (this.isMimeTypeFilterChanged) { return false; } this.isMimeTypeFilterChanged = false; return true; } } return false; } getRollup(tree, identifier) { let rollup = [tree.identifier]; if (tree.sbUniqueIdentifier === identifier) { return rollup; } if (!tree.children || !tree.children.length) { return []; } let notDone = true; let childRollup: any; let index = 0; while (notDone && tree.children[index]) { childRollup = this.getRollup(tree.children[index], identifier); if (childRollup && childRollup.length) { notDone = false; } index++; } if (childRollup && childRollup.length) { rollup.push(...childRollup); return rollup; } else { return []; } } createUniqueId(tocData, item) { item['sbUniqueIdentifier'] = tocData.identifier + '_' + item.identifier; } fetchProgress(item?) { var bgColor = this.progressColor; let widthStyle = item.progressPercentage!=null?item.progressPercentage+"%":"2%"; return { width: widthStyle, 'background-color':bgColor }; } showCompleted(item?) { if(item!=null && item.progressPercentage!=null && item.progressPercentage === 100) { return true; } return false; } fetchProgressShadow(item) { var bgColor = this.shadowColor; let displayStatus = item.progressPercentage!=null && item.progressPercentage != "0"?'block':'none'; return { 'background':bgColor, display: displayStatus }; } onPlayButtonClick(event) { const rollup = this.getRollup(this.tocData, event.data.sbUniqueIdentifier); if (rollup.length) { rollup.pop(); } this.tocCardClick.emit({ ...event, rollup }); } }
#!/usr/bin/env bash MOUNT_FOLDER=/app docker run --rm -it -v $(pwd):$MOUNT_FOLDER -w $MOUNT_FOLDER themattrix/tox
<reponame>hetzz/WebSafe<filename>flask/app/static/mail.js $(document).on('click', '.star', function (e) { // console.log(this); if ($(this).children().html() == 'star') { $(this).children().html('star_border'); } else { $(this).children().html('star'); } update_mail($(this).parent().parent()); }); $(document).on('click', '.mail-header', function (e) { // console.log(this) $(this).next().toggleClass('hide'); $(this).addClass('read').removeClass('unread'); update_mail($(this).parent()); }); function update_mail(mail) { conns = parseInt($('.server-info').find('.active-conns').text()); r = parseInt((Math.random() - 0.5) * 100); // console.log(conns, r); $('.server-info').find('.active-conns').text(Math.max(conns + r, 10)); data = { id: mail.attr('id'), starred: $(mail).find('.star').text(), read_status: 'read', attack: $(mail).find('.mail-attack').text(), fazer: parseInt($('.skill2').find('.progress-bar').text()), skills: parseInt($('.skill1').find('.progress-bar').text()), } $.post('/update_mail', data, function(res){ mail_html = document.createElement('div'); mail_html.setAttribute('class', "mail"); mail_html.setAttribute('id', res.id); mail_html.innerHTML = ` <div class="mail-header ${res.read_status}"> <div class="mail-attack hide">${res.attack}</div> <div class="star"><i class="material-icons"> star_border </i> </div> <div class="sender">${res.sender}</div> <div class="mail-subject">${res.subject}</div> <div class="mail-time">${res.time}</div> </div> <div class="mail-content hide"> <div class="mail-info">From ${res.sender} &lt;${res.email}&gt; to you at ${res.time}</div> <div class="mail-body"> ${res.content} </div> <div class="action-buttons"> <i class="material-icons action1" onclick="hint(${res.id})" data-toggle="tooltip" title="Investigate"> account_tree </i> <i class="material-icons action2" onclick="cancel(${res.id})" data-toggle="tooltip" title="Ignore Mail"> cancel </i> <i class="material-icons action3" onclick="defend(${res.id})" data-toggle="tooltip" title="Take Suggested Actions"> adb </i> </div> </div> </div>`; console.log(res); mails_div = document.getElementsByClassName('mails')[0]; if (res != 'false') { mails_div.appendChild(mail_html); showToast('New Mail Arrived!!'); } }); } function showToast(text) { // Get the snackbar DIV var x = document.getElementById("snackbar"); x.innerHTML = text; // Add the "show" class to DIV x.className = "show"; console.log(text); // After 3 seconds, remove the show class from DIV setTimeout(function(){ x.className = x.className.replace("show", ""); }, 3000); } function increaseScore() { skills = parseInt($('.skill1').find('.progress-bar').text()); fazer = parseInt($('.skill2').find('.progress-bar').text()); skills += 5; fazer += 10; if (skills >= 100) { skills = 100 showToast("Yayayay! You have completed the level"); } $('.skill1').find('.progress-bar').text(skills); $('.skill1').find('.progress-bar').css('width', skills + '%') $('.skill2').find('.progress-bar').text(fazer); $('.skill2').find('.progress-bar').css('width', fazer + '%') } function decreaseScore() { skills = parseInt($('.skill1').find('.progress-bar').text()); fazer = parseInt($('.skill2').find('.progress-bar').text()); skills -= 5; fazer -= 10; if (skills <= 0) { skills = 0 showToast("The game will be restarted"); } $('.skill1').find('.progress-bar').text(skills); $('.skill1').find('.progress-bar').css('width', skills + '%') $('.skill2').find('.progress-bar').text(fazer); $('.skill2').find('.progress-bar').css('width', fazer + '%') } function cancel(mail_id) { type = document.getElementById(mail_id).children[0].children[0].innerHTML; console.log(type) if (type=='False' || type=='false') { showToast('Oh ho! That was a legit mail, the client is not happy.'); decreaseScore(); } else if (type=='True' || type=='true'){ showToast('Great! you just avoided a malicious attack!'); increaseScore(); } else { showToast('Action for this mail has been already taken'); return } $(`#${mail_id}`).find('.mail-attack').text('done'); update_mail($(`#${mail_id}`)); } function defend(mail_id) { type = document.getElementById(mail_id).children[0].children[0].innerHTML; if (type == 'True' || type =='true') { showToast('Sad! you just falled for a malicious mail'); decreaseScore(); } else if (type == 'False' || type =='false'){ showToast('Cool! You took action on user request'); increaseScore(); } else { showToast('Action for this mail has been already taken'); return } $(`#${mail_id}`).find('.mail-attack').text('done'); update_mail($(`#${mail_id}`)); } function hint(mail_id) { type = document.getElementById(mail_id).children[0].children[0].innerHTML; showToast('More Information regarding such attacks can be found at: '); decreaseScore(); update_mail($(`#${mail_id}`)); } (function(b){b.toast=function(a,h,g,l,k){b("#toast-container").length||(b("body").prepend('<div id="toast-container" aria-live="polite" aria-atomic="true"></div>'),b("#toast-container").append('<div id="toast-wrapper"></div>'));var c="",d="",e="text-muted",f="",m="object"===typeof a?a.title||"":a||"Notice!";h="object"===typeof a?a.subtitle||"":h||"";g="object"===typeof a?a.content||"":g||"";k="object"===typeof a?a.delay||3E3:k||3E3;switch("object"===typeof a?a.type||"":l||"info"){case "info":c="bg-info"; f=e=d="text-white";break;case "success":c="bg-success";f=e=d="text-white";break;case "warning":case "warn":c="bg-warning";f=e=d="text-white";break;case "error":case "danger":c="bg-danger",f=e=d="text-white"}a='<div class="toast" role="alert" aria-live="assertive" aria-atomic="true" data-delay="'+k+'">'+('<div class="toast-header '+c+" "+d+'">')+('<strong class="mr-auto">'+m+"</strong>");a+='<small class="'+e+'">'+h+"</small>";a+='<button type="button" class="ml-2 mb-1 close" data-dismiss="toast" aria-label="Close">'; a+='<span aria-hidden="true" class="'+f+'">&times;</span>';a+="</button>";a+="</div>";""!==g&&(a+='<div class="toast-body">',a+=g,a+="</div>");a+="</div>";b("#toast-wrapper").append(a);b("#toast-wrapper .toast:last").toast("show")}})(jQuery); const TYPES = ['info', 'warning', 'success', 'error'], TITLES = { 'info': 'Notice!', 'success': 'Awesome!', 'warning': 'Watch Out!', 'error': 'Doh!' }, CONTENT = { 'info': 'Hello, world! This is a toast message.', 'success': 'The action has been completed.', 'warning': 'It\'s all about to go wrong', 'error': 'It all went wrong.' }; function show_random_snack() { let type = TYPES[Math.floor(Math.random() * TYPES.length)], content = CONTENT[type].replace('toast', 'snack'); $.toast({ title: content, type: type, delay: 5000 }); }
<gh_stars>1-10 import os, shutil import shlex, subprocess dn = os.path.dirname USER = os.environ.get('USER') TEST_DIR = os.environ.get('JASPY_TEST_DIR', '/home/{}/jaspy-test'.format(USER)) TEST_DIR = '/apps/jasmin' THIS_DIR = dn(os.path.realpath(__file__)) SCRIPT_DIR = os.path.join(dn(dn(THIS_DIR)), 'deployment') def _clear_base_dir(jaspy_base_dir): if os.path.isdir(jaspy_base_dir): shutil.rmtree(jaspy_base_dir) def _run(cmd, jaspy_base_dir, use_func=False): """ Run a command `cmd`, with JASPY_BASE_DIR environment variable set by `jaspy_base_dir`. :param cmd: Command string :param jaspy_base_dir: String to set as JASPY_BASE_DIR environment variable. :param use_func: Use other function in `subprocess` module instead of `subprocess.Popen` (default) :return: Response obje.... """ env = {'JASPY_BASE_DIR': jaspy_base_dir} args = shlex.split(cmd) print('Running command: {}'.format(cmd)) if not use_func: sp = subprocess.Popen(args, env=env, cwd=SCRIPT_DIR) sp.communicate() else: func = getattr(subprocess, use_func) sp = func(cmd, shell=True, executable='/bin/bash', env=env, cwd=SCRIPT_DIR) return sp def _common_create_jaspy_env(py_version, env_name, conda_forge_count): jaspy_base_dir = TEST_DIR miniconda_code = env_name.split('-', 1)[1].replace(env_name.split('-')[-1], '').strip('-') # Clear out base dir mc_base_dir = os.path.join(jaspy_base_dir, 'jaspy/miniconda_envs', 'jas{}'.format(py_version), miniconda_code) _clear_base_dir(mc_base_dir) print('Download miniconda') cmd = '{} {}'.format(os.path.join(SCRIPT_DIR, 'install-miniconda.sh'), py_version) _run(cmd, jaspy_base_dir) assert(os.path.exists(os.path.join(jaspy_base_dir, 'jaspy/miniconda_envs', 'jas{}'.format(py_version)))) print('Install a jaspy environment') cmd = '{} {}'.format(os.path.join(SCRIPT_DIR, 'install-jaspy-env.sh'), env_name) _run(cmd, jaspy_base_dir) assert(os.path.exists(os.path.join(jaspy_base_dir, 'jaspy/miniconda_envs', 'jas{}'.format(py_version), miniconda_code, 'envs', env_name))) # Test by activating and listing contents of current environment bin_dir = os.path.join(jaspy_base_dir, py_version, 'bin') cmd = 'export PATH={}:$PATH ; conda activate {} ; conda list | grep conda-forge | wc -l'.format(bin_dir, env_name) resp = _run(cmd, jaspy_base_dir, use_func='check_output') assert(resp.decode("utf-8").strip() == str(conda_forge_count)) def OFFtest_create_jaspy_env_py27(): py_version = 'py2.7' env_name = 'jaspy2.7-m2-4.5.4-rc4' conda_forge_count = 350 _common_create_jaspy_env(py_version, env_name, conda_forge_count) def test_create_jaspy_env_py36(): py_version = 'py3.6' env_name = 'jaspy3.6-m3-4.5.4-rc3' conda_forge_count = 347 _common_create_jaspy_env(py_version, env_name, conda_forge_count)
#!/bin/bash function shortenv() { if [[ $1 == "production" ]] then shortenv=prod elif [[ $1 == "development" ]] then shortenv=dev elif [[ $1 == "testing" ]] then shortenv=test elif [[ $1 == "staging" ]] then shortenv=stage else shortenv=$1 fi }
-- Re-use existing labels added by Balancer team -- Add 'Balancer' and version to name -- Use label type 'lp_pool_name' -- Main Query INSERT INTO labels.labels(address, name, type, author, source) SELECT address, LOWER(CONCAT('Balancer v1 LP ', name)) AS label, 'lp_pool_name' AS type, 'masquot' AS author, 'ethereum_mainnet_balancer_pools_general_format' AS source FROM (SELECT * FROM labels.labels WHERE type = 'balancer_pool') b1 UNION ALL SELECT address, LOWER(CONCAT('Balancer v2 LP ', name)) AS label, 'lp_pool_name' AS type, 'masquot' AS author, 'ethereum_mainnet_balancer_pools_general_format' AS source FROM (SELECT * FROM labels.labels WHERE type = 'balancer_v2_pool') b2 ON CONFLICT DO NOTHING ;
module LikesHelper MAX_NAMES = 4.freeze def tooltip(run) count = run.likes.count return 'Like this run' if count.zero? likes = run.likes.joins(:user).limit(MAX_NAMES).pluck(:name) if count <= MAX_NAMES return "Liked by #{likes.to_sentence}" end # We use one fewer names than MAX_NAMES here because we want to avoid the case where e.g. four likes are displayed # as "Alice, Bob, Carol, and 1 other" rather than the "Alice, Bob, Carol, and Dan". return "Liked by #{likes[0..(MAX_NAMES - 2)].to_sentence} and #{count - (MAX_NAMES - 1)} others" end end
import json from django.db import connection from django.http import HttpResponse, HttpResponseBadRequest from django.views.generic.base import View class AdminQueryDataApi(View): PER_PAGE = 15 SUPPORTED_SORT_ORDER = ['search_query', 'num_usage', 'updated_at'] def order_by(self, order_by): order = 'DESC' if order_by.startswith('-') else 'ASC' order_by = order_by.replace('-', '') if order_by not in self.SUPPORTED_SORT_ORDER: raise Exception('Unknown sort order') return order, order_by def build_result_entry_from_log(self, log): return { 'search_query': log[0], 'num_usage': log[1], 'updated_at': str(log[2]), 'num_suggestions': log[3] } def num_suggestion_condition(self, fail=False): return 'AND num_suggestions=0' if fail else '' def get(self, request): try: page = int(request.GET.get('page', 0)) start = page * self.PER_PAGE q = request.GET.get('q', '').lower() order, order_by = self.order_by(request.GET.get('order_by') or '-updated_at') additional_condition = self.num_suggestion_condition(request.GET.get('fail')) cursor = connection.cursor() cursor.execute(''' SELECT DISTINCT search_query, COUNT(search_query) as num_usage, MAX(created_at) as updated_at, MAX(num_suggestions) as max_num_suggestions FROM search_suggestionlog WHERE lower(search_query) LIKE '%{search_query}%' AND char_length(search_query) > 2 {additional_condition} GROUP BY search_query ORDER BY {order_by} {order} OFFSET {start} LIMIT {limit} '''.format( search_query=q, additional_condition=additional_condition, order_by=order_by, order=order, start=start, limit=self.PER_PAGE )) logs = cursor.fetchall() results = list(map(self.build_result_entry_from_log, logs)) data = { 'data': results } return HttpResponse(json.dumps(data)) except Exception as e: return HttpResponseBadRequest(json.dumps({'error': str(e)}))
package xyz.brassgoggledcoders.opentransport.modules.vanilla.guiinterfaces; import net.minecraft.client.gui.Gui; import net.minecraft.client.gui.GuiCommandBlock; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.inventory.Container; import net.minecraft.tileentity.TileEntityCommandBlock; import xyz.brassgoggledcoders.opentransport.api.blockwrappers.IBlockWrapper; import xyz.brassgoggledcoders.opentransport.api.blockwrappers.IGuiInterface; import xyz.brassgoggledcoders.opentransport.api.entities.IHolderEntity; public class CommandGuiInterface implements IGuiInterface { @Override public Gui getGUI(EntityPlayer entityPlayer, IHolderEntity holderEntity, IBlockWrapper blockWrapper) { return new GuiCommandBlock((TileEntityCommandBlock) holderEntity.getBlockWrapper().getTileEntity()); } @Override public Container getContainer(EntityPlayer entityPlayer, IHolderEntity holderEntity, IBlockWrapper blockWrapper) { return null; } }
<reponame>AllenElguira16/repairservicelocator import Axios from 'axios'; import * as io from 'socket.io-client' class Module{ public static getUser(){ return Axios.post("/api/userDetails"); } public static getUserContacts(){ return Axios.get("/api/contacts"); } public static runSocket(){ let socket = io("https://www.repairservicelocator.test:8000"); return socket; } public static getChats(id: number){ return Axios.get(`/api/chats/${id}`); } public static sendChat(id: number, content: Object){ return Axios.post(`/api/chats/${id}`, content); } public static getShops(category: string, shopName: string){ return Axios.get(`/api/shops/${category}/${shopName}`); } public static getTotalRatings(id: number){ return Axios.get(`/api/shops/${id}/reviews/total`); } public static addToContacts(form: any){ return Axios.post("/api/addToContacts", form); } public static dataURLtoFile(dataurl: any, filename: any) { var arr = dataurl.split(','), mime = arr[0].match(/:(.*?);/)[1], bstr = atob(arr[1]), n = bstr.length, u8arr = new Uint8Array(n); while (n--) { u8arr[n] = bstr.charCodeAt(n); } return new File([u8arr], filename, { type: mime }); } } export default Module;
// Licensed to Elasticsearch B.V. under one or more contributor // license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright // ownership. Elasticsearch B.V. licenses this file to you under // the Apache License, Version 2.0 (the "License"); you may // not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package kafka import ( "errors" "fmt" "strings" "sync" "sync/atomic" "github.com/Shopify/sarama" "github.com/elastic/beats/libbeat/common/fmtstr" "github.com/elastic/beats/libbeat/logp" "github.com/elastic/beats/libbeat/outputs" "github.com/elastic/beats/libbeat/outputs/codec" "github.com/elastic/beats/libbeat/outputs/outil" "github.com/elastic/beats/libbeat/publisher" ) type client struct { observer outputs.Observer hosts []string topic outil.Selector key *fmtstr.EventFormatString index string codec codec.Codec config sarama.Config producer sarama.AsyncProducer wg sync.WaitGroup } type msgRef struct { client *client count int32 total int failed []publisher.Event batch publisher.Batch err error } var ( errNoTopicsSelected = errors.New("no topic could be selected") ) func newKafkaClient( observer outputs.Observer, hosts []string, index string, key *fmtstr.EventFormatString, topic outil.Selector, writer codec.Codec, cfg *sarama.Config, ) (*client, error) { c := &client{ observer: observer, hosts: hosts, topic: topic, key: key, index: index, codec: writer, config: *cfg, } return c, nil } func (c *client) Connect() error { debugf("connect: %v", c.hosts) // try to connect producer, err := sarama.NewAsyncProducer(c.hosts, &c.config) if err != nil { logp.Err("Kafka connect fails with: %v", err) return err } c.producer = producer c.wg.Add(2) go c.successWorker(producer.Successes()) go c.errorWorker(producer.Errors()) return nil } func (c *client) Close() error { debugf("closed kafka client") c.producer.AsyncClose() c.wg.Wait() c.producer = nil return nil } func (c *client) Publish(batch publisher.Batch) error { events := batch.Events() c.observer.NewBatch(len(events)) ref := &msgRef{ client: c, count: int32(len(events)), total: len(events), failed: nil, batch: batch, } ch := c.producer.Input() for i := range events { d := &events[i] msg, err := c.getEventMessage(d) if err != nil { logp.Err("Dropping event: %v", err) ref.done() c.observer.Dropped(1) continue } msg.ref = ref msg.initProducerMessage() ch <- &msg.msg } return nil } func (c *client) String() string { return "kafka(" + strings.Join(c.hosts, ",") + ")" } func (c *client) getEventMessage(data *publisher.Event) (*message, error) { event := &data.Content msg := &message{partition: -1, data: *data} if event.Meta != nil { if value, ok := event.Meta["partition"]; ok { if partition, ok := value.(int32); ok { msg.partition = partition } } if value, ok := event.Meta["topic"]; ok { if topic, ok := value.(string); ok { msg.topic = topic } } } if msg.topic == "" { topic, err := c.topic.Select(event) if err != nil { return nil, fmt.Errorf("setting kafka topic failed with %v", err) } if topic == "" { return nil, errNoTopicsSelected } msg.topic = topic if event.Meta == nil { event.Meta = map[string]interface{}{} } event.Meta["topic"] = topic } serializedEvent, err := c.codec.Encode(c.index, event) if err != nil { return nil, err } buf := make([]byte, len(serializedEvent)) copy(buf, serializedEvent) msg.value = buf // message timestamps have been added to kafka with version 0.10.0.0 if c.config.Version.IsAtLeast(sarama.V0_10_0_0) { msg.ts = event.Timestamp } if c.key != nil { if key, err := c.key.RunBytes(event); err == nil { msg.key = key } } return msg, nil } func (c *client) successWorker(ch <-chan *sarama.ProducerMessage) { defer c.wg.Done() defer debugf("Stop kafka ack worker") for libMsg := range ch { msg := libMsg.Metadata.(*message) msg.ref.done() } } func (c *client) errorWorker(ch <-chan *sarama.ProducerError) { defer c.wg.Done() defer debugf("Stop kafka error handler") for errMsg := range ch { msg := errMsg.Msg.Metadata.(*message) msg.ref.fail(msg, errMsg.Err) } } func (r *msgRef) done() { r.dec() } func (r *msgRef) fail(msg *message, err error) { switch err { case sarama.ErrInvalidMessage: logp.Err("Kafka (topic=%v): dropping invalid message", msg.topic) case sarama.ErrMessageSizeTooLarge, sarama.ErrInvalidMessageSize: logp.Err("Kafka (topic=%v): dropping too large message of size %v.", msg.topic, len(msg.key)+len(msg.value)) default: r.failed = append(r.failed, msg.data) r.err = err } r.dec() } func (r *msgRef) dec() { i := atomic.AddInt32(&r.count, -1) if i > 0 { return } debugf("finished kafka batch") stats := r.client.observer err := r.err if err != nil { failed := len(r.failed) success := r.total - failed r.batch.RetryEvents(r.failed) stats.Failed(failed) if success > 0 { stats.Acked(success) } debugf("Kafka publish failed with: %v", err) } else { r.batch.ACK() stats.Acked(r.total) } }
import React, { Component } from 'react' import { Elements, Slideshows } from '@ludwigjs/ui' class Preso1 extends Component { constructor(props) { super(props) this.click = this.click.bind(this) } click() { const { click = () => {} } = this.props click() } render() { const { buttonText = '', classNames = ''} = this.props return ( <Slideshows.WithProgressBar> <div className='preso ludwig-bg-dk'> <div> <Elements.PageTitle classNames={`white txt-shdw`} titleText={`Getting Started`} /> </div> </div> <div className='preso ludwig-bg-dk'> <div> <h2>{`Quick Overview`}</h2> <ul> <li>{`A UI, API and DB built on reactjs, nodejs and mysql`}</li> <li>{`+ a few other JS frameworks`}</li> <li>{`Includes instant end-to-end data flow`}</li> </ul> </div> </div> <div className='preso ludwig-bg-dk'> <div> <h1>{`Before Setup`}</h1> </div> </div> <div className='preso ludwig-bg-dk'> <div> <h2>{`Necessary Requirements:`}</h2> <ul> <li>{`Nodejs version 8.11.4`}</li> <li>{`1 Amazon SES account approved to send email`}</li> <li>{`1 registered domain`}</li> <li>{`1 running MySQL instance`}</li> <li>{`1 Heroku Account (required only for deployment)`}</li> </ul> </div> </div> <div className='preso ludwig-bg-dk'> <div> <h1>{`Begin Setup`}</h1> </div> </div> <div className='preso ludwig-bg-dk'> <div> <h2>{`First`}</h2> <ul> <li>{`Download Ludwig’s code to your desired directory`}</li> <li>{`Run npm install in Ludwig’s root directory`}</li> </ul> </div> </div> <div className='preso ludwig-bg-dk'> <div> <h2>{`Next`}</h2> <ul> <li>{`Create a file to store Ludwig’s environment variables`}</li> <li>{`Verify it’s location matches the location indicated in the package.json`}</li> <li>{`Add the following key value pairs:`}</li> </ul> <p className='code'> {`export CLEARDB_DATABASE_URL=<YOUR_DATABASE_CONNECTION_STRING>`}<br/> {`export SECURITY_TOKEN_SECRET=<YOUR_SECRET>`}<br/> {`export AWS_REGION=<YOUR_AWS_REGION>`}<br/> {`export AWS_ACCESS_KEY_ID=<YOUR_AWS_ACCESS_KEY_ID>`}<br/> {`export AWS_SECRET_ACCESS_KEY=<YOUR_AWS_SECRET_ACCESS_KEY>`}<br/> {`export LOGIN_EMAIL=<YOUR_LOGIN_EMAIL>`} </p> </div> </div> <div className='preso ludwig-bg-dk'> <div> <h1>{`Almost Done!`}</h1> </div> </div> <div className='preso ludwig-bg-dk'> <div> <h2>{`Setup Your Database`}</h2> <ul> <li>{`Run the database script located in “server/schemas”`}</li> </ul> </div> </div> <div className='preso ludwig-bg-dk'> <div> <h2>{`Run Ludwig`}</h2> <ul> <li>{`Open a terminal window`}</li> <li>{`Run npm run build-local`}</li> <li>{`Open another terminal window`}</li> <li>{`Run npm run local`}</li> </ul> </div> </div> <div className='preso ludwig-bg-dk'> <div> <h2>{`Demo Ludwig`}</h2> <ul> <li>{`Open a browser and visit 0.0.0.0:3000`}</li> <li>{`For hot reloading visit 0.0.0.0:8080`}</li> <li>{`Example demo coming soon...`}</li> </ul> </div> </div> <div className='preso ludwig-bg-dk'> <div> <h1>{`That's it!`}</h1> <p>{`(Now you can customize Ludwig and have fun)`}</p> </div> </div> </Slideshows.WithProgressBar> ) } } export default Preso1
""" Find out the roots of given quadratic equation """ from math import sqrt def quadratic_roots(a, b, c): # calculate the discriminant D = b**2 - 4*a*c # calculate the two roots if D == 0: return (-b+sqrt(D)) // (2*a) else: x1 = (-b + sqrt(D)) // (2*a) x2 = (-b - sqrt(D)) // (2*a) return (x1, x2) if __name__ == '__main__': a = 1 b = 4 c = 4 print("Roots of the quadratic equation:", quadratic_roots(a, b, c))
#!/usr/bin/env bash # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # $Id: createvol.sh 11601 2010-08-11 17:26:15Z kris $ $HeadURL: svn://svn.lab.vmops.com/repos/branches/2.1.refactor/java/scripts/storage/qcow2/createvol.sh $ # createvol.sh -- install a volume usage() { printf "Usage: %s: -t <volume-fs> -n <volumename> -f <root disk file> -s <size in Gigabytes> -c <md5 cksum> -d <descr> -h [-u]\n" $(basename $0) >&2 } #set -x qemu_img="cloud-qemu-img" which $qemu_img if [ $? -gt 0 ] then which qemu-img if [ $? -eq 0 ] then qemu_img="qemu-img" fi fi verify_cksum() { digestalgo="" case ${#1} in 32) digestalgo="md5sum" ;; 40) digestalgo="sha1sum" ;; 56) digestalgo="sha224sum" ;; 64) digestalgo="sha256sum" ;; 96) digestalgo="sha384sum" ;; 128) digestalgo="sha512sum" ;; *) echo "Please provide valid cheksum" ; exit 3 ;; esac echo "$1 $2" | $digestalgo -c --status #printf "$1\t$2" | $digestalgo -c --status if [ $? -gt 0 ] then printf "Checksum failed, not proceeding with install\n" exit 3 fi } untar() { local ft=$(file $1| awk -F" " '{print $2}') local basedir=$(dirname $1) case $ft in USTAR) local rootimg=$(tar tf $1 | grep $3) (cd $2; tar xf $1) rm -f $1 printf "$2/$rootimg" ;; *) printf "$1" return 0 ;; esac } uncompress() { local ft=$(file $1| awk -F" " '{print $2}') local imgfile=${1%.*} #strip out trailing file suffix local tmpfile=${imgfile}.tmp case $ft in gzip) gunzip -c $1 > $tmpfile ;; bzip2) bunzip2 -c $1 > $tmpfile ;; ZIP) unzip -p $1 | cat > $tmpfile ;; *) printf "$1" return 0 ;; esac if [ $? -gt 0 ] then printf "Failed to uncompress file, exiting " exit 1 fi mv $tmpfile $imgfile printf "$imgfile" return 0 } create_from_file() { local volfs=$1 local volimg="$2" local volname=$3 if [ -b $volimg ]; then $qemu_img convert -f raw -O qcow2 "$volimg" /$volfs/$volname else # if backing image exists, we need to combine them, otherwise # copy the image to preserve snapshots/compression if $qemu_img info "$volimg" | grep -q backing; then $qemu_img convert -f qcow2 -O qcow2 "$volimg" /$volfs/$volname >& /dev/null else cp -f $volimg /$volfs/$volname fi fi if [ "$cleanup" == "true" ] then rm -f "$volimg" fi chmod a+r /$volfs/$volname } create_from_snapshot() { local volImg="$1" local snapshotName="$2" local volfs=$3 local volname=$4 $qemu_img convert -f qcow2 -O qcow2 -s "$snapshotName" "$volImg" /$volfs/$volname >& /dev/null if [ $? -gt 0 ] then printf "Failed to create volume /$tmplfs/$volname from snapshot $snapshotName on disk $volImg " exit 2 fi chmod a+r /$volfs/$volname } tflag= nflag= fflag= sflag= hflag= hvm=false cleanup=false dflag= cflag= snapshotName= while getopts 'uht:n:f:sc:d:' OPTION do case $OPTION in t) tflag=1 volfs="$OPTARG" ;; n) nflag=1 volname="$OPTARG" ;; f) fflag=1 volimg="$OPTARG" ;; s) sflag=1 sflag=1 ;; c) cflag=1 snapshotName="$OPTARG" ;; d) dflag=1 descr="$OPTARG" ;; u) cleanup="true" ;; ?) usage exit 2 ;; esac done if [ ! -d /$volfs ] then mkdir -p /$volfs if [ $? -gt 0 ] then printf "Failed to create user fs $volfs\n" >&2 exit 1 fi fi if [ ! -f $volimg -a ! -b $volimg ] then printf "root disk file $volimg doesn't exist\n" exit 3 fi volimg=$(uncompress "$volimg") if [ $? -ne 0 ] then printf "failed to uncompress $volimg\n" fi if [ "$sflag" == "1" ] then create_from_snapshot "$volimg" "$snapshotName" $volfs $volname else create_from_file $volfs "$volimg" $volname fi touch /$volfs/volume.properties chmod a+r /$volfs/volume.properties echo -n "" > /$volfs/volume.properties today=$(date '+%m_%d_%Y') echo "filename=$volname" > /$volfs/volume.properties echo "snapshot.name=$today" >> /$volfs/volume.properties echo "description=$descr" >> /$volfs/volume.properties if [ "$cleanup" == "true" ] then rm -f "$volimg" fi exit 0
package ops import ( "encoding/json" "fmt" "io" "io/ioutil" "net/http" "net/url" "strings" "bitbucket.org/atlassianlabs/restclient" ctrllogz "github.com/atlassian/ctrl/logz" ops_v1 "github.com/atlassian/voyager/pkg/apis/ops/v1" "github.com/atlassian/voyager/pkg/ops/util/zappers" "github.com/atlassian/voyager/pkg/util" "github.com/atlassian/voyager/pkg/util/apiservice" "github.com/atlassian/voyager/pkg/util/logz" "github.com/atlassian/voyager/pkg/util/pkiutil" "github.com/pkg/errors" "go.uber.org/zap" "k8s.io/apimachinery/pkg/runtime" "k8s.io/apiserver/pkg/endpoints/request" ) const ( discoveryURI = "/v2/x-operations" reportTag = "report" ) type ProviderInterface interface { ProxyRequest(asapConfig pkiutil.ASAP, w http.ResponseWriter, r *http.Request, uri string) Request(asapConfig pkiutil.ASAP, r *http.Request, uri string, user string) (*http.Response, error) ReportAction() string Render(w http.ResponseWriter, r *http.Request) error Name() string OwnsPlan(string) bool } type Action struct { method string action string tags []string // Currently tags are only for `get` methods as reporting is the only consumer } type PathDefinition struct { Tags []string `json:"tags"` OperationID string `json:"operationId"` } type Provider struct { ProviderName string `json:"name"` plans []string audience string actions []Action client *http.Client baseURL *url.URL } // https://extranet.atlassian.com/display/VDEV/Voyager+Ops+API+Specification#VoyagerOpsAPISpecification-Discovery func NewProvider(logger *zap.Logger, broker *ops_v1.Route) (bool /*retriable*/, *Provider, error) { providerLog := loggerForProvider(logger, broker) providerLog.Sugar().Infof("Handling new provider: %s, URL: %s", broker.Name, broker.Spec.URL) baseURL, err := url.Parse(broker.Spec.URL) if err != nil { return false, nil, errors.Wrap(err, "failed to resolve broker url") } rm := createProviderClient(broker.Spec.URL) providerReq, err := rm.NewRequest(restclient.JoinPath(discoveryURI)) if err != nil { return false, nil, err } client := util.HTTPClient() res, err := client.Do(providerReq) if err != nil { return true, nil, err } defer util.CloseSilently(res.Body) if res.StatusCode != http.StatusOK { return false, nil, errors.Errorf("attempted to get provider schema returned status code: %d", res.StatusCode) } body, err := ioutil.ReadAll(res.Body) if err != nil { return false, nil, err } s := OpenAPISpec{} err = json.Unmarshal(body, &s) if err != nil { providerLog.Sugar().Debug("Could not unmarshall response: %s", body) return false, nil, err } p := &Provider{ ProviderName: broker.ObjectMeta.Name, plans: broker.Spec.Plans, audience: broker.Spec.ASAP.Audience, actions: []Action{}, client: client, baseURL: baseURL, } for path := range s.Paths { if strings.Contains(path, "/x-operation_instances/") { taggedPath := map[string]PathDefinition{} pathMap, ok := s.Paths[path].(map[string]interface{}) if ok { if err := runtime.DefaultUnstructuredConverter.FromUnstructured(pathMap, &taggedPath); err == nil { if len(taggedPath) > 0 { for method, definition := range taggedPath { p.actions = append(p.actions, Action{ method: method, action: definition.OperationID, tags: definition.Tags, }) } } } } } } return false, p, nil } func (p *Provider) Request(asapConfig pkiutil.ASAP, r *http.Request, uri string, user string) (*http.Response, error) { headerValue, err := asapConfig.GenerateToken(p.audience, user) if err != nil { return nil, errors.Wrap(err, "Error setting up asap with provider") } r.Header.Set("Authorization", fmt.Sprintf("Bearer %s", headerValue)) relative, err := url.Parse(uri) if err != nil { return nil, err } // modify the input request url to <broker>/<operation> r.URL = p.baseURL.ResolveReference(relative) return p.client.Do(r) } func (p *Provider) ProxyRequest(asapConfig pkiutil.ASAP, w http.ResponseWriter, r *http.Request, uri string) { logger := logz.RetrieveLoggerFromContext(r.Context()) providerRequest, err := http.NewRequest(r.Method, "", r.Body) if err != nil { apiservice.RespondWithInternalError(logger, w, r, fmt.Sprintf("error returned creating request to provider %s: %s", p.Name(), err.Error()), err) return } userInfo, ok := request.UserFrom(r.Context()) if !ok { apiservice.RespondWithInternalError(logger, w, r, fmt.Sprintf("auth information missing from context"), errors.New("auth information missing from context")) return } providerRequest = providerRequest.WithContext(r.Context()) resp, err := p.Request(asapConfig, providerRequest, uri, userInfo.GetName()) if err != nil { apiservice.RespondWithInternalError(logger, w, r, fmt.Sprintf("error making request to provider %s: %s", p.Name(), err.Error()), err) return } defer util.CloseSilently(resp.Body) logger.Sugar().Debug("Provider request returned", zap.String("Status", resp.Status)) w.Header().Set("Content-Type", resp.Header.Get("Content-Type")) w.WriteHeader(resp.StatusCode) _, err = io.Copy(w, resp.Body) if err != nil { logger.Error("failed to copy response body", zap.Error(err)) } } func (p *Provider) Name() string { return p.ProviderName } func loggerForProvider(logger *zap.Logger, route *ops_v1.Route) *zap.Logger { return logger.With( zappers.Route(route), ctrllogz.Namespace(route), ) } func createProviderClient(url string) *restclient.RequestMutator { return restclient.NewRequestMutator( restclient.BaseURL(url), restclient.Header("Content-Type", "application/json"), ) } func (p *Provider) ReportAction() string { for _, v := range p.actions { for _, t := range v.tags { if t == reportTag { return v.action } } } return "" } func (p *Provider) OwnsPlan(planID string) bool { for _, val := range p.plans { if val == planID { return true } } return false }
from sqlalchemy import Column, Integer, String, Float from .base import Base class User(Base): __tablename__ = 'User' id = Column(Integer, primary_key=True) username = Column(String(20), unique=True) password = Column(String(256)) def __init__(self, username=None, password=<PASSWORD>): self.username = username self.password = password def __repr__(self): return '<User %d: %r>' % (self.id, self.username)
#! /bin/bash # script que saca un "Hola " + parámetros + "!" por pantalla # ¿cómo se cogen todos los parámetros? TODOS_LOS_PARAMETROS=$@ echo "Hola $TODOS_LOS_PARAMETROS!"
function draw() { var canvas = document.getElementById('canvas'); if (canvas.getContext) { var ctx = canvas.getContext('2d'); var rectfy = 100; var rectfx = 160; var rectsy = 200; var rectty = 300; ctx.fillStyle = 'red'; // db에서 예약여부 확인하기 ctx.fillRect(rectfx, rectfy, 150, 80); ctx.fillStyle = 'black' ctx.font="35px Georgia" ctx.fillText('301', rectfx+50,rectfy+30); ctx.fillText('김XX', rectfx+30,rectfy+70); // DB에서 담당직원 불러오기 ctx.fillStyle = 'blue'; ctx.fillRect(rectfx+170, rectfy, 150, 80); ctx.fillStyle = 'black' ctx.font="35px Georgia" ctx.fillText('302', rectfx+50+170,rectfy+30); ctx.fillText('이XX', rectfx+30+170,rectfy+70); // DB에서 담당직원 불러오기 ctx.fillStyle = 'blue'; ctx.fillRect(rectfx+170*2, rectfy, 150, 80); ctx.fillStyle = 'black' ctx.font="35px Georgia" ctx.fillText('303', rectfx+50+170*2,rectfy+30); ctx.fillText('박XX', rectfx+30+170*2,rectfy+70); // DB에서 담당직원 불러오기 ctx.fillStyle = 'blue'; ctx.fillRect(rectfx+170*3, rectfy, 150, 80); ctx.fillStyle = 'black' ctx.font="35px Georgia" ctx.fillText('304', rectfx+50+170*3,rectfy+30); ctx.fillText('김XX', rectfx+30+170*3,rectfy+70); // DB에서 담당직원 불러오기 //----------------------------------------------------------- ctx.fillStyle = 'blue'; // db에서 예약여부 확인하기 ctx.fillRect(rectfx, rectsy, 150, 80); ctx.fillStyle = 'black' ctx.font="35px Georgia" ctx.fillText('201', rectfx+50,rectsy+30); ctx.fillText('김XX', rectfx+30,rectsy+70); // DB에서 담당직원 불러오기 ctx.fillStyle = 'blue'; ctx.fillRect(rectfx+170, rectsy, 150, 80); ctx.fillStyle = 'black' ctx.font="35px Georgia" ctx.fillText('202', rectfx+50+170,rectsy+30); ctx.fillText('김XX', rectfx+30+170,rectsy+70); // DB에서 담당직원 불러오기 ctx.fillStyle = 'blue'; ctx.fillRect(rectfx+170*2, rectsy, 150, 80); ctx.fillStyle = 'black' ctx.font="35px Georgia" ctx.fillText('203', rectfx+50+170*2,rectsy+30); ctx.fillText('김XX', rectfx+30+170*2,rectsy+70); // DB에서 담당직원 불러오기 ctx.fillStyle = 'red'; ctx.fillRect(rectfx+170*3, rectsy, 150, 80); ctx.fillStyle = 'black' ctx.font="35px Georgia" ctx.fillText('204', rectfx+50+170*3,rectsy+30); ctx.fillText('김XX', rectfx+30+170*3,rectsy+70); // DB에서 담당직원 불러오기 //----------------------------------------------------------------- ctx.fillStyle = 'green'; // db에서 예약여부 확인하기 ctx.fillRect(rectfx, rectty, 150, 80); ctx.fillStyle = 'black' ctx.font="35px Georgia" ctx.fillText('101', rectfx+50,rectty+30); ctx.fillText('김XX', rectfx+30,rectty+70); // DB에서 담당직원 불러오기 ctx.fillStyle = 'red'; ctx.fillRect(rectfx+170, rectty, 150, 80); ctx.fillStyle = 'black' ctx.font="35px Georgia" ctx.fillText('102', rectfx+50+170,rectty+30); ctx.fillText('김XX', rectfx+30+170,rectty+70); // DB에서 담당직원 불러오기 ctx.fillStyle = 'blue'; ctx.fillRect(rectfx+170*2, rectty, 150, 80); ctx.fillStyle = 'black' ctx.font="35px Georgia" ctx.fillText('103', rectfx+50+170*2,rectty+30); ctx.fillText('김XX', rectfx+30+170*2,rectty+70); // DB에서 담당직원 불러오기 ctx.fillStyle = 'blue'; ctx.fillRect(rectfx+170*3, rectty, 150, 80); ctx.fillStyle = 'black' ctx.font="35px Georgia" ctx.fillText('104', rectfx+50+170*3,rectty+30); ctx.fillText('김XX', rectfx+30+170*3,rectty+70); // DB에서 담당직원 불러오기 //------------------------------------------------------------------ ctx.fillStyle = 'white'; ctx.fillRect(0, 100, 120,200); ctx.fillStyle = 'red'; ctx.fillRect(10, 120, 30,30); ctx.fillStyle = 'blue'; ctx.fillRect(10, 180, 30,30); ctx.fillStyle = 'green'; ctx.fillRect(10, 240, 30,30); ctx.fillStyle = 'black' ctx.font="17px Georgia" ctx.fillText('사용불가', 45,140); ctx.fillText('예약안됨', 45,200); ctx.fillText('예약됨', 45,260); } } window.onload = function(){ draw(); }
class BuildConfig: def __init__(self, f77, cc, pplist, usr_lflags, ifmpi, source_root, examples_root, log_root, verbose, parallel_procs): self.f77 = f77 self.cc = cc self.pplist = pplist self.usr_lflags = usr_lflags self.ifmpi = ifmpi self.source_root = source_root self.examples_root = examples_root self.log_root = log_root self.verbose = verbose self.parallel_procs = parallel_procs def print_config_values(self): for varname, varval in ( ('FC', self.f77), ('CC', self.cc), ('PPLIST', self.pplist), ('USR_LFLAGS', self.usr_lflags), ('IFMPI', self.ifmpi), ('SOURCE_ROOT', self.source_root), ('EXAMPLES_ROOT', self.examples_root), ('LOG_ROOT', self.log_root), ('VERBOSE_TESTS', self.verbose), ('PARALLEL_PROCS', self.parallel_procs) ): print(f"{varname}: {varval}")
#!/usr/bin/env bash source ~/retropie-custom/chromium/preconfigure.sh listen_wiki
#! /bin/bash if [[ -z $CMSSW_BASE ]] then echo NO CMSSW environment is set exit fi if [[ ! -d "$CMSSW_BASE/src/DQM/Integration/python/test" ]] then echo NO "$CMSSW_BASE/src/DQM/Integration/python/test" exists exit fi cd $CMSSW_BASE/src/DQM/Integration/python/test echo "| *File* || *Version* ||" twist=0 cvs stat *dqm_sourceclient-live_cfg.py | grep -oP "File:.*py|Working revision:.*" | sed "s|File: ||g" | sed "s|Working revision:||g" | while read f do if [[ $twist -eq 1 ]] then line="$line | $f ||" echo $line twist=0 else line="|| $f " twist=1 fi done | sed "s/ | / | /g" | sed "s/ ||/ ||/g"
function run_once_a_day() { local run_once_a_day_MARKER run_once_a_day_MARKER="$(run_once_a_day_marker "$@")" if [ -e "$run_once_a_day_MARKER" ]; then case "$(dateh)" in "$(cat "$run_once_a_day_MARKER")") return 0 ;; esac fi # shellcheck disable=SC2068 $@ || return $? dateh > "$run_once_a_day_MARKER" || return $? }
package de.rieckpil.blog; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; @Entity public class CustomMessage { @Id @GeneratedValue(strategy = GenerationType.AUTO) private Long id; private String content; private String author; private long createdAt; public CustomMessage() { } public CustomMessage(String content, String author, long createdAt) { this.content = content; this.author = author; this.createdAt = createdAt; } public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getContent() { return content; } public void setContent(String content) { this.content = content; } public String getAuthor() { return author; } public void setAuthor(String author) { this.author = author; } public long getCreatedAt() { return createdAt; } public void setCreatedAt(long createdAt) { this.createdAt = createdAt; } }
hive -e "create table stock_db.stock_market_data_2016(symbol string, series string, open double, high double, low double, close double, last double, prevclose double, tottrdqty int, tottrval double, mydate string, totaltrades int, isin string)" hive -e "insert overwrite table stock_db.stock_market_data_2016 select * from stock_db.stock_market_data where tottrdqty >= 300000 and substr(mydate,1,4) = '2016'" hive -e "select * from stock_db.stock_market_data_2016 limit 25" > /home/manav/Desktop/stock_market_case_study/hive/output3_1.tsv #hive -e "create table stock_db.it_companies_stock (symbol string, series string, open double, high double, low double, close double, last double, prevclose double, tottrdqty int, tottrval double, mydate string, totaltrades int, isin string)" hive -e "insert overwrite table stock_db.it_companies_stock select * from stock_db.stock_market_data_2016 where lower(symbol) in ('hcltech', 'niittech', 'tataelxsi','tcs', 'infy', 'wipro', 'datamatics','techm','mindtree', 'ofss')" hive -e "select * from stock_db.it_companies_stock" > /home/manav/Desktop/stock_market_case_study/hive/output3_2.tsv #hive -e "create table stock_db.it_companies_stock_close (symbol1 string, close1 float, symbol2 string, close2 float,mydate string)" hive -e "insert overwrite table stock_db.it_companies_stock_close select t1.symbol,t1.close, t2.symbol, t2.close, from_unixtime(unix_timestamp(t1.mydate, 'yyyy-mm-dd'), 'yyyy-mmm-dd') as md from stock_db.it_companies_stock t1 cross join stock_db.it_companies_stock t2 where t1.symbol > t2.symbol and t1.mydate=t2.mydate order by t1.symbol asc,t2.symbol asc, from_unixtime(unix_timestamp(md, 'yyyy-mm-dd'), 'yyyy-mmm-dd') asc" #hive -e "create table stock_db.it_pearsons_corr_coeff(symbol1 string, symbol2 string, corr float)" hive -e "insert overwrite table stock_db.it_pearsons_corr_coeff select symbol1, symbol2, (avg(close1*close2) - (avg(close1) *avg(close2)))/(stddev_pop(close1) * stddev_pop(close2)) as pearsoncoefficient from stock_db.it_companies_stock_close group by symbol1, symbol2 order by pearsoncoefficient desc" hive -e "select * from stock_db.it_pearsons_corr_coeff" > '/home/manav/Desktop/stock_market_case_study/hive/output3_3.tsv'
#!/bin/bash -e IMAGE="xeone3-centos76-analytics-dev" DIR=$(dirname $(readlink -f "$0")) . "${DIR}/../../../../script/shell.sh"
const mostFrequent = arr => { let counter = {}; let frequentNum; let maxCount = 0; for (let num of arr) { if (counter[num] !== undefined) { counter[num]++; } else { counter[num] = 1; } } for (let num in counter) { if (counter[num] > maxCount) { maxCount = counter[num]; frequentNum = num; } } return frequentNum; } mostFrequent(arr); // 4
package com.itms.wikiapp.common.security; import com.itms.wikiapp.common.exception.ErrorHandlerFactory; import com.itms.wikiapp.userAuth.model.UserErrorCode; import com.itms.wikiapp.userAuth.repository.UserAuthRepository; import lombok.SneakyThrows; import org.springframework.security.core.userdetails.User; import org.springframework.security.core.userdetails.UserDetails; import org.springframework.security.core.userdetails.UserDetailsService; import org.springframework.stereotype.Service; import java.util.ArrayList; @Service public class JwtUserDetailsService implements UserDetailsService { private final UserAuthRepository userAuthRepository; public JwtUserDetailsService(UserAuthRepository userAuthRepository) { this.userAuthRepository = userAuthRepository; } @SneakyThrows @Override public UserDetails loadUserByUsername(String email) { if (userAuthRepository.findByEmail(email).isPresent()) { return new User(email, "credential", new ArrayList<>()); } else { throw new ErrorHandlerFactory().newException(UserErrorCode.NOT_FOUND); } } }
import Foundation let date = Date() let formatter = DateFormatter() formatter.dateFormat = "dd/MM/yyyy" let result = formatter.string(from: date) print(result)
const todos = []; // List Todos function listTodos() { console.log('My Todos:') todos.forEach((todo, index) => console.log(`${index + 1}: ${todo}`)); } // Add Todo function addTodo(todo) { todos.push(todo); console.log(`"${todo}" added to list`); } // Edit Todo function editTodo(index, newTodo) { todos[index] = newTodo; console.log(`Todo #${index + 1} changed to "${newTodo}"`); } // Delete Todo function deleteTodo(index) { const deletedTodo = todos.splice(index, 1); console.log(`"${deletedTodo}" deleted from list`); } listTodos(); addTodo('Take out the trash'); addTodo('Do laundry'); editTodo(1, 'Buy groceries'); listTodos(); deleteTodo(0); listTodos();
package com.binaryheap.testing.models; import com.binaryheap.testing.exceptions.ClubExistsException; import org.junit.Test; import static org.junit.Assert.assertEquals; public class BagTests { @Test public void add_club_should_add_one_club() throws ClubExistsException { // arrange Club club = new Club(); Bag bag = new Bag(); club.setNumber(7); club.setLoft(34.5F); // act bag.addClub(club); // assert assertEquals(1, bag.getClubs().size()); } @Test(expected = ClubExistsException.class) public void add_club_should_throw_exists_exception() throws ClubExistsException { // arrange Club club1 = new Club(); Club club2 = new Club(); Bag bag = new Bag(); club1.setNumber(7); club1.setLoft(34.5F); club2.setNumber(7); club2.setLoft(34.5F); // act bag.addClub(club1); bag.addClub(club2); // assert will be that there is an exception defined above } @Test public void add_club_should_create_integration_event() throws ClubExistsException { // arrange Club club = new Club(); Bag bag = new Bag(); club.setNumber(7); club.setLoft(34.5F); // act bag.addClub(club); // assert assertEquals(1, bag.getIntegrationEvents().size()); } @Test public void sonar_test() { Bag bag = new Bag(); bag.setId(1L); assertEquals(1L, bag.getId()); } }
#!/usr/bin/env bash # Copyright 2020 The Kubernetes Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. if [ -f Gopkg.toml ]; then echo "Repo uses 'dep' for vendoring." (set -x; dep ensure) elif [ -f go.mod ]; then release-tools/verify-go-version.sh "go" (set -x; env GO111MODULE=on go mod tidy && env GO111MODULE=on go mod vendor) fi
<gh_stars>0 const FILE = require("../models/fileSchema"); const downloadFile = async (req, res, next) => { /**find the filepath in DB,if it is there then * it exists and can be shared */ const file = await FILE.findOne({ uuid: req.params.uuid, }); //find the file path in database if (!file) { return res.status(404).json({ error: "File Not Available!" }); } //if file is not found const response = await file.save(); const filePath = `${__dirname}/../${file.path}`; //file path res.download(filePath); //download the file }; exports.downloadFile = downloadFile;
// Code generated by counterfeiter. DO NOT EDIT. package fakes import ( "sync" "github.com/pivotal-cf/om/vmlifecycle/vmmanagers" ) type CreateVMService struct { CreateVMStub func() (vmmanagers.Status, vmmanagers.StateInfo, error) createVMMutex sync.RWMutex createVMArgsForCall []struct { } createVMReturns struct { result1 vmmanagers.Status result2 vmmanagers.StateInfo result3 error } createVMReturnsOnCall map[int]struct { result1 vmmanagers.Status result2 vmmanagers.StateInfo result3 error } invocations map[string][][]interface{} invocationsMutex sync.RWMutex } func (fake *CreateVMService) CreateVM() (vmmanagers.Status, vmmanagers.StateInfo, error) { fake.createVMMutex.Lock() ret, specificReturn := fake.createVMReturnsOnCall[len(fake.createVMArgsForCall)] fake.createVMArgsForCall = append(fake.createVMArgsForCall, struct { }{}) fake.recordInvocation("CreateVM", []interface{}{}) fake.createVMMutex.Unlock() if fake.CreateVMStub != nil { return fake.CreateVMStub() } if specificReturn { return ret.result1, ret.result2, ret.result3 } fakeReturns := fake.createVMReturns return fakeReturns.result1, fakeReturns.result2, fakeReturns.result3 } func (fake *CreateVMService) CreateVMCallCount() int { fake.createVMMutex.RLock() defer fake.createVMMutex.RUnlock() return len(fake.createVMArgsForCall) } func (fake *CreateVMService) CreateVMCalls(stub func() (vmmanagers.Status, vmmanagers.StateInfo, error)) { fake.createVMMutex.Lock() defer fake.createVMMutex.Unlock() fake.CreateVMStub = stub } func (fake *CreateVMService) CreateVMReturns(result1 vmmanagers.Status, result2 vmmanagers.StateInfo, result3 error) { fake.createVMMutex.Lock() defer fake.createVMMutex.Unlock() fake.CreateVMStub = nil fake.createVMReturns = struct { result1 vmmanagers.Status result2 vmmanagers.StateInfo result3 error }{result1, result2, result3} } func (fake *CreateVMService) CreateVMReturnsOnCall(i int, result1 vmmanagers.Status, result2 vmmanagers.StateInfo, result3 error) { fake.createVMMutex.Lock() defer fake.createVMMutex.Unlock() fake.CreateVMStub = nil if fake.createVMReturnsOnCall == nil { fake.createVMReturnsOnCall = make(map[int]struct { result1 vmmanagers.Status result2 vmmanagers.StateInfo result3 error }) } fake.createVMReturnsOnCall[i] = struct { result1 vmmanagers.Status result2 vmmanagers.StateInfo result3 error }{result1, result2, result3} } func (fake *CreateVMService) Invocations() map[string][][]interface{} { fake.invocationsMutex.RLock() defer fake.invocationsMutex.RUnlock() fake.createVMMutex.RLock() defer fake.createVMMutex.RUnlock() copiedInvocations := map[string][][]interface{}{} for key, value := range fake.invocations { copiedInvocations[key] = value } return copiedInvocations } func (fake *CreateVMService) recordInvocation(key string, args []interface{}) { fake.invocationsMutex.Lock() defer fake.invocationsMutex.Unlock() if fake.invocations == nil { fake.invocations = map[string][][]interface{}{} } if fake.invocations[key] == nil { fake.invocations[key] = [][]interface{}{} } fake.invocations[key] = append(fake.invocations[key], args) } var _ vmmanagers.CreateVMService = new(CreateVMService)
package lu.uni.serval.diff.parser.patch; import java.util.Objects; public class LinePair { private final Integer oldVersionLine; private final Integer newVersionLine; public LinePair(Integer oldVersionLine, Integer newVersionLine) { assert oldVersionLine != null; assert newVersionLine != null; this.oldVersionLine = oldVersionLine; this.newVersionLine = newVersionLine; } public Integer getOldVersionLine() { return oldVersionLine; } public Integer getNewVersionLine() { return newVersionLine; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; LinePair linesPair = (LinePair) o; return Objects.equals(oldVersionLine, linesPair.oldVersionLine) && Objects.equals(newVersionLine, linesPair.newVersionLine); } @Override public int hashCode() { return Objects.hash(oldVersionLine, newVersionLine); } }
# install apllications sudo apt update # Install function for command function install { which $1 &> /dev/null if [ $? -ne 0 ]; then echo "Installing: ${1}..." sudo apt install -y $1 else echo "Already installed: ${1}" fi } install python3 install pipenv install git install gnome-tweaks chrome-gnome-shell # install node.js cd ~ curl -sL https://deb.nodesource.com/setup_14.x -o nodesource_setup.sh sudo bash nodesource_setup.sh sudo apt install nodejs
<reponame>quexer/utee<filename>perf_log_test.go package utee import ( "testing" "time" "github.com/sirupsen/logrus" ) func TestNewPerfLog(t *testing.T) { pl := NewPerfLog(100, logrus.WithField("on", "test")) pl.Tick(1) time.Sleep(10 * time.Millisecond) pl.Done() // show output nothing pl = NewPerfLog(20, logrus.WithField("on", "test2")) time.Sleep(30 * time.Millisecond) pl.Tick("a") time.Sleep(1200 * time.Millisecond) pl.Tick("b") time.Sleep(15 * time.Millisecond) pl.Done() // show output log }
def construct_url(base_url, path): if base_url.endswith('/') and path.startswith('/'): return base_url + path[1:] elif not base_url.endswith('/') and not path.startswith('/'): return base_url + '/' + path else: return base_url + path
// Requirements const { ltVC, vcChatRole } = require("../config.json"); // Code module.exports = { name: 'voiceChannelJoin', execute(member, channel) { console.log(member+' has joined a channel' + channel); if (channel == ltVC) { if (member.roles.cache.some(role => role.name === 'vc chat')) { console.log("User already has role"); } else { member.roles.add(vcChatRole); } } else if (channel !== ltVC) { console.log('User did not join'+ channel) } } }
#!/bin/bash set -e # Makes programs, downloads sample data, trains a GloVe model, and then evaluates it. # One optional argument can specify the language used for eval script: matlab, octave or [default] python make if [ ! -e text8 ]; then if hash wget 2>/dev/null; then wget http://mattmahoney.net/dc/text8.zip else curl -O http://mattmahoney.net/dc/text8.zip fi unzip text8.zip rm text8.zip fi CORPUS=text8 VOCAB_FILE=vocab.txt COOCCURRENCE_FILE=cooccurrence.bin COOCCURRENCE_SHUF_FILE=cooccurrence.shuf.bin BUILDDIR=build SAVE_FILE=vectors VERBOSE=2 MEMORY=4.0 VOCAB_MIN_COUNT=5 VECTOR_SIZE=50 MAX_ITER=15 WINDOW_SIZE=15 BINARY=2 NUM_THREADS=8 X_MAX=10 if hash python 2>/dev/null; then PYTHON=python else PYTHON=python3 fi echo echo "$ $BUILDDIR/vocab_count -min-count $VOCAB_MIN_COUNT -verbose $VERBOSE < $CORPUS > $VOCAB_FILE" $BUILDDIR/vocab_count -min-count $VOCAB_MIN_COUNT -verbose $VERBOSE < $CORPUS > $VOCAB_FILE echo "$ $BUILDDIR/cooccur -memory $MEMORY -vocab-file $VOCAB_FILE -verbose $VERBOSE -window-size $WINDOW_SIZE < $CORPUS > $COOCCURRENCE_FILE" $BUILDDIR/cooccur -memory $MEMORY -vocab-file $VOCAB_FILE -verbose $VERBOSE -window-size $WINDOW_SIZE < $CORPUS > $COOCCURRENCE_FILE echo "$ $BUILDDIR/shuffle -memory $MEMORY -verbose $VERBOSE < $COOCCURRENCE_FILE > $COOCCURRENCE_SHUF_FILE" $BUILDDIR/shuffle -memory $MEMORY -verbose $VERBOSE < $COOCCURRENCE_FILE > $COOCCURRENCE_SHUF_FILE echo "$ $BUILDDIR/glove -save-file $SAVE_FILE -threads $NUM_THREADS -input-file $COOCCURRENCE_SHUF_FILE -x-max $X_MAX -iter $MAX_ITER -vector-size $VECTOR_SIZE -binary $BINARY -vocab-file $VOCAB_FILE -verbose $VERBOSE" $BUILDDIR/glove -save-file $SAVE_FILE -threads $NUM_THREADS -input-file $COOCCURRENCE_SHUF_FILE -x-max $X_MAX -iter $MAX_ITER -vector-size $VECTOR_SIZE -binary $BINARY -vocab-file $VOCAB_FILE -verbose $VERBOSE if [ "$CORPUS" = 'text8' ]; then if [ "$1" = 'matlab' ]; then matlab -nodisplay -nodesktop -nojvm -nosplash < ./eval/matlab/read_and_evaluate.m 1>&2 elif [ "$1" = 'octave' ]; then octave < ./eval/octave/read_and_evaluate_octave.m 1>&2 else echo "$ $PYTHON eval/python/evaluate.py" $PYTHON eval/python/evaluate.py fi fi
#!/bin/bash npm install ethereumjs-vm
<reponame>ch1huizong/learning #!/usr/bin/env python # encoding: utf-8 # # Copyright (c) 2008 <NAME> All rights reserved. # """ """ __version__ = "$Id$" #end_pymotw_header import pkgutil __path__ = pkgutil.extend_path(__path__, __name__) __path__.reverse()
cm.define('Com.HiddenStoreField', { extend: 'Com.AbstractInputContainer', params: { constructor: 'Com.AbstractInput', storeRaw: false, triggerName: null, }, }, function() { Com.AbstractInputContainer.apply(this, arguments); }); cm.getConstructor('Com.HiddenStoreField', function(classConstructor, className, classProto, classInherit) { classProto.onConstructStart = function() { const that = this; // Binds that.processDataHandler = that.processData.bind(that); }; classProto.onRenderController = function() { const that = this; // Get trigger field const field = that.components.form.getField(that.params.triggerName); if (field) { that.components.trigger = field.controller; that.components.trigger.addEvent('onSelect', that.processDataHandler); that.components.trigger.addEvent('onReset', that.resetHandler); that.processData(); } }; classProto.processData = function() { const that = this; const data = that.params.storeRaw ? that.components.trigger.getRaw() : that.components.trigger.get(); that.set(data); }; }); /****** FORM FIELD COMPONENT *******/ Com.FormFields.add('hidden-store', { node: cm.node('input', {'type': 'hidden'}), visible: false, fieldConstructor: 'Com.AbstractFormField', constructor: 'Com.HiddenStoreField', });
<reponame>leomillon/try-jcv /* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.webdemo.kotlin.impl.translator; import com.intellij.openapi.project.Project; import com.intellij.psi.PsiFile; import org.jetbrains.annotations.NotNull; import org.jetbrains.kotlin.cli.jvm.compiler.KotlinCoreEnvironment; import org.jetbrains.kotlin.config.CommonConfigurationKeys; import org.jetbrains.kotlin.config.CompilerConfiguration; import org.jetbrains.kotlin.js.config.JSConfigurationKeys; import org.jetbrains.kotlin.js.config.JsConfig; import org.jetbrains.kotlin.js.facade.K2JSTranslator; import org.jetbrains.kotlin.js.facade.MainCallParameters; import org.jetbrains.kotlin.js.facade.TranslationResult; import org.jetbrains.kotlin.js.facade.exceptions.TranslationException; import org.jetbrains.kotlin.psi.KtFile; import org.jetbrains.webdemo.kotlin.datastructures.ErrorDescriptor; import org.jetbrains.webdemo.kotlin.exceptions.KotlinCoreException; import org.jetbrains.webdemo.kotlin.impl.WrapperSettings; import org.jetbrains.webdemo.kotlin.impl.analyzer.ErrorAnalyzer; import org.jetbrains.webdemo.kotlin.impl.environment.EnvironmentManager; import java.util.*; @SuppressWarnings("UnusedDeclaration") public final class WebDemoTranslatorFacade { public static final List<String> LIBRARY_FILES = Collections.singletonList(WrapperSettings.JS_LIB_ROOT.toString()); @SuppressWarnings("FieldCanBeLocal") private static String EXCEPTION = "exception="; private WebDemoTranslatorFacade() { } public static org.jetbrains.webdemo.kotlin.datastructures.TranslationResult translateProjectWithCallToMain (@NotNull List<KtFile> files, @NotNull String[] arguments) { try { return doTranslate(files, arguments); } catch (Throwable e) { throw new KotlinCoreException(e); } finally { EnvironmentManager.reinitializeJavaEnvironment(); } } @NotNull private static org.jetbrains.webdemo.kotlin.datastructures.TranslationResult doTranslate( @NotNull List<KtFile> files, @NotNull String[] arguments ) throws TranslationException { KotlinCoreEnvironment environment = EnvironmentManager.getEnvironment(); Project currentProject = environment.getProject(); CompilerConfiguration configuration = environment.getConfiguration().copy(); configuration.put(CommonConfigurationKeys.MODULE_NAME, "moduleId"); configuration.put(JSConfigurationKeys.LIBRARIES, Collections.singletonList(WrapperSettings.JS_LIB_ROOT.toString())); JsConfig config = new JsConfig(currentProject, configuration); JsConfig.Reporter reporter = new JsConfig.Reporter() { @Override public void error(@NotNull String message) { } @Override public void warning(@NotNull String message) { } }; K2JSTranslator translator = new K2JSTranslator(config); TranslationResult result = translator.translate( reporter, files, MainCallParameters.mainWithArguments(Arrays.asList(arguments))); if (result instanceof TranslationResult.Success) { TranslationResult.Success success = ((TranslationResult.Success) result); return new org.jetbrains.webdemo.kotlin.datastructures.TranslationResult( "kotlin.kotlin.io.output.flush();\n" + success.getCode() + "\n" + "kotlin.kotlin.io.output.buffer;\n"); } else { Map<String, List<ErrorDescriptor>> errors = new HashMap<>(); for (PsiFile psiFile : files) { errors.put(psiFile.getName(), new ArrayList<ErrorDescriptor>()); } ArrayList<ErrorDescriptor> errorDescriptors = new ArrayList<ErrorDescriptor>(); ErrorAnalyzer errorAnalyzer = new ErrorAnalyzer(files, currentProject); errorAnalyzer.getErrorsFromDiagnostics(result.getDiagnostics().all(), errors); return new org.jetbrains.webdemo.kotlin.datastructures.TranslationResult(errors); } } }
<filename>BOJ/1000/1057.java import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.util.StringTokenizer; public class Main { public static void main(String[] args) throws IOException { BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(System.out)); StringTokenizer strtok = new StringTokenizer(br.readLine()); int n = Integer.parseInt(strtok.nextToken()); int kim = Integer.parseInt(strtok.nextToken()); int im = Integer.parseInt(strtok.nextToken()); int round = 1; int[] board = new int[n + 2]; board[kim] = 1; board[im] = 1; boolean end = false; while(n != 1) { int[] temp = new int[n + 2]; int count = 0; for (int i = 1; i <= n; i = i + 2) { int user1 = board[i]; int user2 = board[i+1]; if (user1 + user2 == 2) { end = true; break; } temp[++count] = Math.max(user1, user2); } if (end) break; board = temp; n = count; round++; } if (end) bw.write(round + "\n"); else bw.write("-1\n"); bw.flush(); } }
""" Convert a base-10 number to a base-2 number """ def decimal_to_binary(number): binary = bin(number)[2:] return binary if __name__ == '__main__': # Test number test_number = 8 # Convert the number to binary binary = decimal_to_binary(test_number) print(binary)
#!/bin/bash TASK=19 SHOT=10 LANG=fr MODEL=m3p MODEL_CONFIG=m3p_base TASKS_CONFIG=iglue_test_tasks_X101.dtu TRTASK=XVNLI${LANG}_${SHOT} TETASK=XVNLI${LANG} TEXT_PATH=/home/projects/ku_00062/data/XVNLI/annotations/${LANG}/test.jsonl here=$(pwd) source /home/projects/ku_00062/envs/iglue/bin/activate cd ../../../../../../volta best=-1 best_lr=-1 for lr in 1e-4 5e-5 1e-5; do f=${here}/train.${lr}.log s=`tail -n1 $f | cut -d ' ' -f 4` d=$(echo "$s>$best" | bc) if [[ $d -eq 1 ]]; then best=$s best_lr=$lr fi done echo "Best lr: " $best_lr PRETRAINED=/home/projects/ku_00062/checkpoints/iglue/few_shot/xvnli/${TRTASK}/${MODEL}/${best_lr}/XVNLI_${MODEL_CONFIG}/pytorch_model_best.bin OUTPUT_DIR=/home/projects/ku_00062/results/iglue/few_shot/xvnli/${MODEL}/${best_lr}/${TRTASK}_${MODEL_CONFIG}/$TETASK/test python eval_task.py \ --bert_model /home/projects/ku_00062/huggingface/xlm-roberta-base --config_file config/${MODEL_CONFIG}.json \ --from_pretrained ${PRETRAINED} --is_m3p \ --tasks_config_file config_tasks/${TASKS_CONFIG}.yml --task $TASK \ --split test --val_annotations_jsonpath ${TEXT_PATH} \ --output_dir ${OUTPUT_DIR} --val_annotations_jsonpath ${TEXT_PATH} \ deactivate
<gh_stars>1-10 import { Component, OnInit } from '@angular/core'; declare global { interface Window { chatWindow: any; } } window.chatWindow = window.chatWindow || {}; declare const Bubbles: any; @Component({ selector: 'app-assistant', templateUrl: './assistant.component.html', styleUrls: ['./assistant.component.scss'] }) export class AssistantComponent implements OnInit { constructor() { } ngOnInit() { window.chatWindow = new Bubbles( document.getElementById('chat'), // ...passing HTML container element... "window.chatWindow" // ...and name of the function as a parameter ); window.chatWindow.talk( // pass your JSON/JavaScript object to `.talk()` function where // you define how the conversation between the bot and user will go { // "ice" (as in "breaking the ice") is a required conversation object // that maps the first thing the bot will say to the user "ice": { // "says" defines an array of sequential bubbles // that the bot will produce "says": [ "Hey!", "Can I have a banana?" ], // "reply" is an array of possible options the user can pick from // as a reply "reply" : [ { "question" : "🍌", // label for the reply option "answer" : "banana", // key for the next conversation object } ] }, // end required "ice" conversation object // another conversation object that can be queued from within // any other conversation object, including itself "banana" : { "says" : [ "Thank you!", "Can I have another banana?"], "reply": [ { "question": "🍌🍌", "answer": "banana" } ] } // end conversation object } // end conversation object ); } }
import { Injectable } from '@angular/core'; import { BolCoreEventsService } from '../modules/bol-core/services/bol-core-events.service'; import { RouterHelperService } from '../services/router-helper.service'; @Injectable() export class BolCoreEventsToRouterMediatorService { constructor(private bolCoreEventsService: BolCoreEventsService, private routerHelperService: RouterHelperService) { } init() { this.bolCoreEventsService.onUserLoggedIn(() => { this.routerHelperService.goToGames(); }); this.bolCoreEventsService.onUserLoggedOut(() => { this.routerHelperService.goToAuth(); }); this.bolCoreEventsService.onUserJoinedGame(() => { this.routerHelperService.goToGame(); }); this.bolCoreEventsService.onUserCreatedGame(() => { this.routerHelperService.goToGame(); }); } }
#!/bin/bash # Print the value of the environment variable TEST_6 echo $TEST_6 # Append a line of dashes to the file specified by the environment variable FILENAME echo "----" >> $FILENAME # Append the current date and time to the file specified by the environment variable FILENAME date >> $FILENAME
<gh_stars>1-10 // Copyright 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CCDebugRectHistory_h #define CCDebugRectHistory_h #include "base/basictypes.h" #include "base/memory/scoped_ptr.h" #include "ui/gfx/rect.h" #include "ui/gfx/rect_f.h" #include <vector> namespace cc { class LayerImpl; struct LayerTreeSettings; // There are currently six types of debug rects: // // - Paint rects (update rects): regions of a layer that needed to be re-uploaded to the // texture resource; in most cases implying that they had to be repainted, too. // // - Property-changed rects: enclosing bounds of layers that cause changes to the screen // even if the layer did not change internally. (For example, if the layer's opacity or // position changes.) // // - Surface damage rects: the aggregate damage on a target surface that is caused by all // layers and surfaces that contribute to it. This includes (1) paint rects, (2) property- // changed rects, and (3) newly exposed areas. // // - Screen space rects: this is the region the contents occupy in screen space. // // - Replica screen space rects: this is the region the replica's contents occupy in screen space. // // - Occluding rects: these are the regions that contribute to the occluded region. // enum DebugRectType { PaintRectType, PropertyChangedRectType, SurfaceDamageRectType, ScreenSpaceRectType, ReplicaScreenSpaceRectType, OccludingRectType }; struct DebugRect { DebugRect(DebugRectType newType, gfx::RectF newRect) : type(newType) , rect(newRect) { } DebugRectType type; gfx::RectF rect; }; // This class maintains a history of rects of various types that can be used // for debugging purposes. The overhead of collecting rects is performed only if // the appropriate LayerTreeSettings are enabled. class DebugRectHistory { public: static scoped_ptr<DebugRectHistory> create(); ~DebugRectHistory(); // Note: Saving debug rects must happen before layers' change tracking is reset. void saveDebugRectsForCurrentFrame(LayerImpl* rootLayer, const std::vector<LayerImpl*>& renderSurfaceLayerList, const std::vector<gfx::Rect>& occludingScreenSpaceRects, const LayerTreeSettings&); const std::vector<DebugRect>& debugRects() { return m_debugRects; } private: DebugRectHistory(); void savePaintRects(LayerImpl*); void savePropertyChangedRects(const std::vector<LayerImpl*>& renderSurfaceLayerList); void saveSurfaceDamageRects(const std::vector<LayerImpl* >& renderSurfaceLayerList); void saveScreenSpaceRects(const std::vector<LayerImpl* >& renderSurfaceLayerList); void saveOccludingRects(const std::vector<gfx::Rect>& occludingScreenSpaceRects); std::vector<DebugRect> m_debugRects; DISALLOW_COPY_AND_ASSIGN(DebugRectHistory); }; } // namespace cc #endif
#!/bin/bash # install.sh # handles installation of # 1. npm modules # 2. bower # 3. bower file/folder copying to public/_third folder BOWERDIR=`pwd`/bower_components PUBLICDIR_THIRDPARTY=`pwd`/public/_third function installNodeModules { # install local npm modules npm install # install bower globally npm install -g bower } function bowerInstall { # bower_components must exist and have content before proceeding # see README.md for details bower install --allow-root #if [ ! -d "bower_components" ]; then # echo "Install bower_components before proceeding. See README.md for instructions." # exit 1 #fi bower update if [ -d "${PUBLICDIR_THIRDPARTY}" ]; then rm -R ${PUBLICDIR_THIRDPARTY} fi mkdir -p ${PUBLICDIR_THIRDPARTY} # jquery (not defined in bower.json b/c bootstrap already requires it) mkdir -p ${PUBLICDIR_THIRDPARTY}/jquery/dist cp ${BOWERDIR}/jquery/dist/jquery.min.js ${PUBLICDIR_THIRDPARTY}/jquery/dist/jquery.min.js # bootstrap mkdir -p ${PUBLICDIR_THIRDPARTY}/bootstrap cp -R ${BOWERDIR}/bootstrap/dist ${PUBLICDIR_THIRDPARTY}/bootstrap/ # bootstrap dialog mkdir -p ${PUBLICDIR_THIRDPARTY}/bootstrap-dialog cp -R ${BOWERDIR}/bootstrap-dialog/dist ${PUBLICDIR_THIRDPARTY}/bootstrap-dialog/ # lodash mkdir -p ${PUBLICDIR_THIRDPARTY}/lodash/dist cp ${BOWERDIR}/lodash/dist/lodash.min.js ${PUBLICDIR_THIRDPARTY}/lodash/dist/lodash.min.js # moment mkdir -p ${PUBLICDIR_THIRDPARTY}/moment/min cp ${BOWERDIR}/moment/min/moment.min.js ${PUBLICDIR_THIRDPARTY}/moment/min/moment.min.js # numeral mkdir -p ${PUBLICDIR_THIRDPARTY}/numeraljs/min cp ${BOWERDIR}/numeraljs/min/numeral.min.js ${PUBLICDIR_THIRDPARTY}/numeraljs/min/numeral.min.js # jquery.serializeJSON mkdir -p ${PUBLICDIR_THIRDPARTY}/jquery.serializeJSON cp ${BOWERDIR}/jquery.serializeJSON/jquery.serializejson.min.js ${PUBLICDIR_THIRDPARTY}/jquery.serializeJSON/jquery.serializejson.min.js # zazzy-browser (zzb) mkdir -p ${PUBLICDIR_THIRDPARTY}/zazzy-browser/dist cp ${BOWERDIR}/zazzy-browser/dist/* ${PUBLICDIR_THIRDPARTY}/zazzy-browser/dist/ echo "bower folders created:" ls ${PUBLICDIR_THIRDPARTY}/ } function updateMaxMind { ./updateMaxMind.sh } installNodeModules bowerInstall updateMaxMind echo "finished install.sh"
#!/usr/bin/env bash ### 0到9之间每个数的平方 x=0 ### x小于10 while [[ ${x} -lt 10 ]]; do echo $((x * x)) x=$((x + 1)) ### x加1 done
#!/bin/bash BLUE='\033[1;34m' NO_COLOR='\033[0m' # Uncomment if we want black to fail if it finds things it would change. #set -e printf "%bRunning Black checks%b\n" "$BLUE" "$NO_COLOR" ./check-black.sh printf "%bRunning flake8 checks%b\n" "$BLUE" "$NO_COLOR" ./check-flake8.sh printf "%bRunning mypy checks%b\n" "$BLUE" "$NO_COLOR" ./check-mypy.sh printf "%bAll tests finished%b\n" "$BLUE" "$NO_COLOR"
<gh_stars>0 package models; import controllers.exceptions.ObjectForbiddenException; import play.libs.ws.*; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.Callable; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; import org.bson.types.ObjectId; import org.jongo.MongoCollection; import org.jongo.MongoCursor; import redis.clients.jedis.Tuple; @lombok.Getter public class User extends Other { private String email; @JsonProperty("email_verified") private boolean emailVerified; @lombok.Setter private String password; private Set<ObjectId> blockings; @JsonProperty("friend_requests") private Set<ObjectId> friendRequests; private int tos; @JsonProperty("last_read_notification") private ObjectId lastReadNotification; private Set<Gcm> gcms; private boolean invisibility; public User() { } public User(String email, String password, String name, String nationality, int gender, String birthday, List<Integer> nativeLanguage, List<PracticeLanguage> practiceLanguage) { id = new ObjectId(); this.email = email.toLowerCase(); this.password = <PASSWORD>(password); this.name = name; this.nationality = nationality; this.gender = gender; this.birthday = birthday; this.nativeLanguage = nativeLanguage; this.practiceLanguage = practiceLanguage; created = new Date(); activity = new Date(); privilege = Privilege.Observer.value(); tos = 1; } public boolean getInvisibility() { return invisibility; } public Set<ObjectId> getSubscribers() { Set<ObjectId> subscribers = new HashSet<ObjectId>(); if (friends != null) subscribers.addAll(friends); if (unfollowers != null) subscribers.removeAll(unfollowers); if (followers != null) subscribers.addAll(followers); return subscribers; } public void save() { MongoCollection userCol = jongo.getCollection("user"); userCol.save(this); password = <PASSWORD>; } public void update(JsonNode params) { update((ObjectNode) params); } private void update(ObjectNode params) { MongoCollection userCol = jongo.getCollection("user"); params.remove("access_token"); if (privilege == Privilege.Observer.value() && emailVerified == true) params.put("privilege", Privilege.Member.value()); if (params.has("password")) { String password = params.get("password").textValue(); params.put("password", md5(password)); } if (params.size() > 0) userCol.update(id).with("{$set:#}", params); if (params.has("invisibility")) { ObjectNode result = mapper.createObjectNode(); result.put("action", params.get("invisibility").booleanValue() ? "offline" : "online") .put("user_id", id.toString()) .put("name", name); if (avatar != null) result.put("avatar", avatar.toString()); if (friends != null) publish("user", friends + "\n" + result); } del(id); } public void updateAvatar(ObjectId avatar) { MongoCollection mediacol = jongo.getCollection("media"); Media media = mediacol.findAndModify("{_id:#,user_id:#,'type':'photo'}", avatar, id) .with("{$unset:{posted:0}}") .projection("{_id:1}") .as(Media.class); if (media == null) throw new RuntimeException(new ObjectForbiddenException()); ObjectNode params = mapper.createObjectNode(); params.putPOJO("avatar", avatar); update(params); if (followers != null) { List<Attachment> attachments = new ArrayList<Attachment>(1); attachments.add(new Attachment(AttachmentType.change_profile_photo, avatar)); Post post = new Post(id, null, attachments); post.setAutomatic(true); post.save(); new Activity(id, ActivityType.changeProfilePhoto, post.getId(), followers).queue(); } } public void updateLastReadNotificaotion(ObjectId id) { MongoCollection userCol = jongo.getCollection("user"); userCol.update(this.id).with("{$set:{last_read_notification:#}}", id); del(this.id); } // 0: send, 1: receive, 2: accept, 3: ignore public List<Friend> getFriendRequest(int status) { MongoCollection col = jongo.getCollection("friend"); MongoCursor<Friend> cursor = col.find("{user_id:#,status:#}", id, status) .projection("{friend_id:-1}") .as(Friend.class); List<Friend> friends = new ArrayList<Friend>(); while (cursor.hasNext()) friends.add(cursor.next()); try { cursor.close(); } catch (Exception e) { throw new RuntimeException(e); } return friends; } public boolean friendRequest(ObjectId userId) { //TODO cache MongoCollection col = jongo.getCollection("friend"); Friend friend = col.findOne("{user_id:#,friend_id:#,status:#}", id, userId, 0) .projection("{_id:1}") .as(Friend.class); return friend == null; } public void sendFriendRequest(ObjectId userId) { MongoCollection col = jongo.getCollection("friend"); Date date = new Date(); Friend friend = col.findAndModify("{user_id:#,friend_id:#,status:#}", id, userId, 0) .with("{$setOnInsert:{created:#}}", date) .upsert() .projection("{_id:1}") .as(Friend.class); if (friend == null) { del(id); col.update("{user_id:#,friend_id:#,status:#}", userId, id, 1) .upsert() .with("{$setOnInsert:{created:#}}", date); new Activity(id, ActivityType.friendRequest, null, userId).queue(); } } public void acceptFriendRequest(User user) { MongoCollection col = jongo.getCollection("friend"); Date date = new Date(); Friend friend = col.findAndModify("{user_id:#,friend_id:#,status:#}", id, user.getId(), 1) .with("{$set:{status:#,modified':#}}", 2, date) .projection("{_id:1}") .as(Friend.class); if (friend != null) { col.update("{user_id:#,friend_id:#,status:#}", user.getId(), id, 0) .with("{$set:{status:#,modified':#}}", 2, date); Activity.remove(user.getId(), ActivityType.friendRequest, id); unfollow(user.getId()); user.unfollow(id); new Activity(id, ActivityType.friendAccept, null, user.getId()).queue(); } } public void ignoreFriendRequest(ObjectId userId) { MongoCollection col = jongo.getCollection("friend"); Date date = new Date(); col.update("{user_id:#,friend_id:#,status:#}", id, userId, 1) .with("{$set:{status:#,modified':#}}", 3, date); Activity.remove(userId, ActivityType.friendRequest, id); } public void cancelFriendRequest(ObjectId userId) { MongoCollection col = jongo.getCollection("friend"); col.remove("{user_id:#,friend_id:#,status:#}", id, userId, 0); col.remove("{user_id:#,friend_id:#,status:#}", userId, id, 1); del(id); Activity.remove(id, ActivityType.friendRequest, userId); } public void unfriend(ObjectId userId) { MongoCollection col = jongo.getCollection("friend"); col.remove("{user_id:#,friend_id:#,status:#}", id, userId, 2); col.remove("{user_id:#,friend_id:#,status:#}", userId, id, 2); MongoCollection userCol = jongo.getCollection("user"); userCol.update(id).with("{$pull:{unfollowers:#}}", userId); userCol.update(userId).with("{$pull:{unfollowers:#}}", id); del(id, userId); } public void follow(ObjectId userId) { if (friends.contains(userId)) { MongoCollection col = jongo.getCollection("user"); col.update(userId) .with("{$pull:{unfollowers:#}}", id); del(userId); } else { MongoCollection following = jongo.getCollection("following"); MongoCollection follower = jongo.getCollection("follower"); List<Attachment> attachments = new ArrayList<Attachment>(1); attachments.add(new Attachment(AttachmentType.follow, userId)); Post post = new Post(id, null, attachments); post.setAutomatic(true); post.save(); if (followers != null) { followers.remove(userId); new Activity(id, ActivityType.follow, post.getId(), followers).queue(); } new Activity(id, ActivityType.followYou, post.getId(), userId).queue(); following.save(new Following(id, userId)); follower.save(new Follower(userId, id)); del(id, userId); } } public void unfollow(ObjectId userId) { if (friends.contains(userId)) { MongoCollection col = jongo.getCollection("user"); col.update(userId) .with("{$addToSet:{unfollowers:#}}", id); del(userId); } else { MongoCollection following = jongo.getCollection("following"); MongoCollection follower = jongo.getCollection("follower"); following.remove("{user_id:#,following_id:#}", id, userId); follower.remove("{user_id:#,follower_id:#}", userId, id); del(id, userId); } } public void blocking(ObjectId userId) { MongoCollection userCol = jongo.getCollection("user"); MongoCollection following = jongo.getCollection("following"); MongoCollection follower = jongo.getCollection("follower"); userCol.update(id).with("{$addToSet:{blockings:#}}", userId); following.remove("{user_id:#,following_id:#}", id, userId); following.remove("{user_id:#,following_id:#}", userId, id); follower.remove("{user_id:#,follower_id:#}", id, userId); follower.remove("{user_id:#,follower_id:#}", userId, id); unfriend(userId); } public void unblocking(ObjectId userId) { MongoCollection userCol = jongo.getCollection("user"); userCol.update(id).with("{$pull:{blockings:#}}", userId); del(id); } public static User verifyEmail(String token) { MongoCollection userCol = jongo.getCollection("user"); User user = userCol .findAndModify("{tokens:{$elemMatch:{token:#,modified:{$exists:false}}}}", token) .with("{$set:{email_verified:true,'tokens.$.modified':#}}", new Date()) .projection("{password:0}") .as(User.class); if (user != null) { user.emailVerified = true; ObjectNode params = mapper.createObjectNode(); user.update(params); } return user; } public String reverifyEmail() { MongoCollection userCol = jongo.getCollection("user"); String token = UUID.randomUUID().toString(); userCol.update(id) .with("{$push:{tokens:{token:#,created:#}}}", token, new Date()); return token; } public static User getByToken(String token) { MongoCollection userCol = jongo.getCollection("user"); User user = userCol .findAndModify("{tokens:{$elemMatch:{token:#,modified:{$exists:false}}}}", token) .with("{$set:{'tokens.$.modified':#}}", new Date()) .projection("{_id:1}") .as(User.class); return user; } public static String forgetPassword(String email) { MongoCollection userCol = jongo.getCollection("user"); String token = UUID.randomUUID().toString(); User user = userCol .findAndModify("{email:#}", email.toLowerCase()) .with("{$push:{tokens:{token:#,created:#}}}", token, new Date()) .projection("{_id:1}") .as(User.class); return user == null ? null : token; } public Page getFriend(int skip, int limit) { return page(friends, skip, limit, Skim.class); } public Page getFollower(int skip, int limit) { return page(followers, skip, limit, Skim.class); } public Page getFollowing(int skip, int limit) { return page(followings, skip, limit, Skim.class); } public Page getBlocking(int skip, int limit) { return page(blockings, skip, limit, Skim.class); } public static Page search(User me, JsonNode params, Date until, int limit) { MongoCollection userCol = jongo.getCollection("user"); String previous = null; String query = "activity:{$lt:#}"; if (params.has("native_language")) query += ",native_language:" + params.get("native_language").intValue(); if (params.has("practice_language")) query += ",practice_language:{$elemMatch:{id:" + params.get("practice_language").intValue() + "}}"; if (params.has("gender")) query += ",gender:" + params.get("gender").intValue(); MongoCursor<Skim> cursor = userCol.find("{" + query + "}", until) .sort("{activity:-1}") .limit(limit) .as(Skim.class); List<Skim> skims = new ArrayList<Skim>(); Skim skim = null; int count = 0; while (cursor.hasNext()) { skim = cursor.next(); count++; if (me != null) { User user = User.get(skim.getId()); if (me.getBlockings() != null && me.getBlockings().contains(user.getId())) continue; if (user.getBlockings() != null && user.getBlockings().contains(me.getId())) continue; } if (skim.privilege > Privilege.Observer.value()) skims.add(skim); } try { cursor.close(); } catch (Exception e) { throw new RuntimeException(e); } if (count == limit) previous = String.format("until=%d&limit=%d", skim.activity.getTime(), limit); return new Page(skims, previous); } public static User get(ObjectId userId) { String key = "user:" + userId; return cache(key, User.class, new Callable<User>() { public User call() { MongoCollection userCol = jongo.getCollection("user"); User user = userCol.findOne(userId).as(User.class); if (user != null) { user.friends = Friend.getFriendIds(userId); user.followings = Following.getFollowingIds(userId); user.followers = Follower.getFollowerIds(userId); user.friendRequests = Friend.getFriendRequestIds(userId); } return user; } }); } public static User getByEmail(String email) { MongoCollection userCol = jongo.getCollection("user"); User user = userCol.findOne("{email:#}", email.toLowerCase()) .projection("{password:1}").as(User.class); return user; } public static User getByAccessToken(String token) { String id = getex("token:" + token, 86400 * 7); if (id == null) return null; return get(new ObjectId(id)); } public static String getUserIdByAccessToken(String token) { String id = get("token:" + token); return id; } public String newAccessToken() { String token = UUID.randomUUID().toString(); set("token:" + token, 86400, id.toString()); return token; } public static void newAccessToken(String userId, String token) { MongoCollection col = jongo.getCollection("user"); col.update(new ObjectId(userId)).with("{$set:{activity:#}}", new Date()); set("token:" + token, 86400, userId); } public static void deleteAccessToken(String token) { del("token:" + token); } public static void online(String userId, String token) { @SuppressWarnings("unchecked") long online = (Long)evalScript("online", userId, token, Long.toString(now())); if (online == 1) { User user = get(new ObjectId(userId)); if (user.invisibility) return; ObjectNode result = mapper.createObjectNode(); result.put("action", "online") .put("user_id", userId) .put("name", user.getName()); if (user.getAvatar() != null) result.put("avatar", user.getAvatar().toString()); if (user.getFriends() != null) publish("user", user.getFriends() + "\n" + result); } } public static void offline(String userId, String token) { @SuppressWarnings("unchecked") long offline = (Long)evalScript("offline", userId, token); if (offline == 1) { User user = get(new ObjectId(userId)); if (user.invisibility) return; ObjectNode result = mapper.createObjectNode(); result.put("action", "offline") .put("user_id", userId) .put("name", user.getName()); if (user.getAvatar() != null) result.put("avatar", user.getAvatar().toString()); if (user.getFriends() != null) publish("user", user.getFriends() + "\n" + result); } } public Page getOnlineFriend(long until, int limit) { List<Skim> skims = new ArrayList<Skim>(); String previous = null; Set<ObjectId> ids = getFriends(); if (ids == null || ids.size() == 0) return new Page(skims, previous); if (!exists("friends:user_id:" + id)) { Map<String, Double>scoreMembers = new HashMap<String, Double>(ids.size()); Iterator<ObjectId> iterator = ids.iterator(); while (iterator.hasNext()) scoreMembers.put(iterator.next().toString(), 0.0); zadd("friends:user_id:" + id, scoreMembers); } zinterstore("friends:online:" + id, "online:user_id", "friends:user_id:" + id); Set<Tuple> tuple = zrevrangeByScoreWithScores("friends:online:" + id, until - 1, 0, 0, limit); if (tuple != null && tuple.size() > 0) { for (Tuple t : tuple) { User user = User.get(new ObjectId(t.getElement())); if (user != null && !user.invisibility) { Skim skim = Skim.get(new ObjectId(t.getElement())); until = (long)t.getScore(); skim.activity = new Date(until); skims.add(skim); } } } if (tuple.size() == limit) previous = String.format("until=%d&limit=%d", until, limit); return new Page(skims, previous); } private static void del(ObjectId id) { del("user:" + id, "name:" + id, "avatar:" + id, "friends:user_id:" + id); } private static void del(ObjectId id1, ObjectId id2) { del("user:" + id1, "name:" + id1, "avatar:" + id1, "friends:user_id:" + id1, "user:" + id2, "name:" + id2, "avatar:" + id2, "friends:user_id:" + id2); } public void addGCM(String id, String lang) { MongoCollection col = jongo.getCollection("user"); col.update(this.id).with("{$push:{gcms:{id:#,lang:#}}}", id, lang); del(this.id); } public void updateGCM(String id, String lang) { MongoCollection col = jongo.getCollection("user"); col.update("{_id:#,gcms:{$elemMatch:{id:#}}}", this.id, id) .with("{$set:{'gcms.$.lang':#}}", lang); del(this.id); } public void removeGCM(String id) { MongoCollection col = jongo.getCollection("user"); col.update(this.id).with("{$pull:{gcms:{id:#}}}", id); del(this.id); } public void pushNotification() { ObjectNode json = mapper.createObjectNode(); ArrayNode ids = json.putArray("registration_ids"); for (Gcm gcm : gcms) ids.add(gcm.getId()); ObjectNode data = json.putObject("data"); data.put("message", "something new?"); WS.url("https://android.googleapis.com/gcm/send") .setHeader("Authorization", "key=<KEY>") .setHeader("Content-Type", "application/json") .post(json); } public void hi(ObjectId userId) { MongoCollection col = jongo.getCollection("activity"); Activity activity = col .findOne("{type:#,user_id:#,receivers:#,created:{$gt:#}}", ActivityType.hi.value(), id, userId, new Date(now() - 86400000L)) .projection("{_id:1}") .as(Activity.class); if (activity == null) new Activity(id, ActivityType.hi, null, userId).queue(); } public Date getCreatedTime(){ return created; } }
<filename>config/initializers/omniauth.rb<gh_stars>0 # frozen_string_literal: true Rails.application.config.middleware.use OmniAuth::Builder do provider :facebook, ENV['FACEBOOK_KEY'], ENV['FACEBOOK_SECRET'], scope: 'email', info_fields: 'email' provider :google_oauth2, ENV['GOOGLE_KEY'], ENV['GOOGLE_SECRET'] provider :github, ENV['GITHUB_KEY'], ENV['GITHUB_SECRET'] end
/** * 组件的高度和宽度决定了其在屏幕上显示的尺寸。 */ import React, { Component } from 'react'; import { View } from 'react-native'; /** * 指定宽高 * 最简单的给组件设定尺寸的方式就是在样式中指定固定的width和height。 * React Native中的尺寸都是无单位的,表示的是与设备像素密度无关的逻辑像素点。 * 这样给组件设置尺寸也是一种常见的模式,比如要求在不同尺寸的屏幕上都显示成一样的大小。 */ class FixedDimensionsBasics extends Component { render() { return ( <View> <View style={{ width: 50, height: 50, backgroundColor: 'powderblue' }} /> <View style={{ width: 100, height: 100, backgroundColor: 'skyblue' }} /> <View style={{ width: 150, height: 150, backgroundColor: 'steelblue' }} /> </View> ); } } /** * 弹性(Flex)宽高 * 在组件样式中使用flex可以使其在可利用的空间中动态地扩张或收缩。 * 一般而言我们会使用flex:1来指定某个组件扩张以撑满所有剩余的空间。 * 如果有多个并列的子组件使用了flex:1,则这些子组件会平分父容器中剩余的空间。 * 如果这些并列的子组件的flex值不一样,则谁的值更大,谁占据剩余空间的比例就更大(即占据剩余空间的比等于并列组件间flex值的比)。 * * 组件能够撑满剩余空间的前提是其父容器的尺寸不为零。 * 如果父容器既没有固定的width和height,也没有设定flex,则父容器的尺寸为零。 * 其子组件如果使用了flex,也是无法显示的。 */ class FlexDimensionsBasics extends Component { render() { return ( // 试试去掉父View中的`flex: 1`。 // 则父View不再具有尺寸,因此子组件也无法再撑开。 // 然后再用`height: 300`来代替父View的`flex: 1`试试看? <View style={{ flex: 1 }}> <View style={{ flex: 1, backgroundColor: 'powderblue' }} /> <View style={{ flex: 2, backgroundColor: 'skyblue' }} /> <View style={{ flex: 3, backgroundColor: 'steelblue' }} /> </View> ); } } export default class WidthHeightApp extends Component { render() { return ( <View> <FixedDimensionsBasics /> <FlexDimensionsBasics /> </View> ); } }
module LucidOperation module Gherkin FIRST_EXCEPTION = "First must be the first one that is used and can only used once!" FINALLY_EXCEPTION = "Finally, except for Ensure and Failed, must be the last one to be used and can only be used once!" NEWLINE = /\r?\n/ OPERATION = /^\s*Operation: (.*)$/ PROCEDURE = /^\s*Procedure: (.*)$/ STAR = /^\s*\* (.*)$/ GIVEN = /^\s*Given (.*)$/ WHEN = /^\s*When (.*)$/ THEN = /^\s*Then (.*)$/ AND = /^\s*And (.*)$/ FIRST = /^\s*First (.*)$/ ENSURE = /^\s*Ensure (.*)$/ FINALLY = /^\s*Finally (.*)$/ IW_FAILING = /^\s*(?:When|If) failing (.*)$/ IF_ITT_FAILED = /^\s*If (?:that|this|it) failed (.*)$/ FAILED = /^\s*Failed (.*)$/ COMMENT = /^\s*# (.*)$/ WHITE_SPACE = /^\s*$/ def self.parse(gherkin_text) operation = { operation: '', procedure: '', steps: [], failure: [], ensure: [] } has_finally = false lines = gherkin_text.split(NEWLINE) lines.each do |line| case line when STAR, GIVEN, WHEN, THEN, AND Isomorfeus.raise_error(message: FINALLY_EXCEPTION) if has_finally operation[:steps] << $1.strip when ENSURE operation[:ensure] << $1.strip when IW_FAILING, IF_ITT_FAILED, FAILED operation[:failure] << $1.strip when FIRST Isomorfeus.raise_error(message: FIRST_EXCEPTION) if operation[:steps].size > 0 operation[:steps] << $1.strip when FINALLY Isomorfeus.raise_error(message: FINALLY_EXCEPTION) if has_finally operation[:steps] << $1.strip has_finally = true when PROCEDURE Isomorfeus.raise_error(message: 'No Operation defined!') if operation[:operation].empty? Isomorfeus.raise_error(message: 'Procedure already defined!') unless operation[:procedure].empty? operation[:procedure] = $1.strip when OPERATION Isomorfeus.raise_error(message: 'Operation already defined!') unless operation[:operation].empty? operation[:operation] = $1.strip when WHITE_SPACE, COMMENT # nothing, just skip else Isomorfeus.raise_error(message: "Unknown key word(s) at the beginning of the line: #{line}") unless operation[:procedure].empty? operation[:description] = [] unless operation.key?(:description) operation[:description] << line.strip end end operation end end end
#!/bin/bash # BUCKET=N - set bucket size, default is 10000 if not set if [ -z "$1" ] then echo 'Please specify elasticsearch url as a first argument' exit 1 fi if [ -z "$2" ] then echo 'Please specify index name as a second argument' exit 2 fi fn="${2}.json" if [ ! -z "$3" ] then fn="$3" fi temp=temp.json err=err.txt function cleanup { rm -f "$temp" "$err" } trap cleanup EXIT function fexit { cat $temp cat $err echo "$1" exit $2 } bucket=10000 if [ ! -z "$BUCKET" ] then bucket=$BUCKET fi curl -XGET "${1}/${2}/_search?scroll=1m&size=${bucket}&pretty" > "$temp" 2>"$err" || fexit 'Error initializing scroll' 4 scroll_id=`cat "$temp" | jq '._scroll_id'` hits=`cat "$temp" | jq '.hits.total.value' 2>"$err"` if [ -z "$hits" ] then hits=`cat "$temp" | jq '.hits.total'` fi if ( [ -z "$scroll_id" ] || [ -z "$hits" ] || [ "$scroll_id" = "null" ] || [ "$hits" = "null" ] ) then cat $temp echo "Scroll ID not found ($scroll_id) or no hits ($hits)" exit 5 fi echo "Scroll ID: $scroll_id, Hits: $hits" cnt=`cat "$temp" | jq '.hits.hits | length'` if [ "$cnt" = "0" ] then echo "Index $2 has no data, returning" echo "[]" > "$fn" exit 0 fi echo "Got $cnt records from initial call" data=`cat "$temp" | jq '.hits.hits'` data="${data:1:-1}" all_data="$data" loopz=0 while true do json="{\"scroll\":\"1m\",\"scroll_id\":${scroll_id}}" curl -XGET -H 'Content-Type: application/json' "${1}/_search/scroll?pretty" -d "$json" > "$temp" 2>"$err" || fexit "Error getting data: $json" 6 cnt=`cat "$temp" | jq '.hits.hits | length'` if [ "$cnt" = "0" ] then echo "No more data, done $loopz scroll API loops (doesn't include initial $bucket fetch)" json="{\"scroll_id\":${scroll_id}}" curl -XDELETE -H 'Content-Type: application/json' "${1}/_search/scroll" -d "$json" > "$temp" 2>"$err" || fexit "Error clearing scroll: $json" 7 break fi data=`cat "$temp" | jq '.hits.hits'` data="${data:1:-1}" all_data="$all_data,$data" loopz=$((loopz+1)) echo "Got $cnt records in #$loopz loop" done echo "[ $all_data ]" > "$fn" records=`cat "$fn" | jq '. | length'` echo "Done, saved $records records"