text stringlengths 1 1.05M |
|---|
public class Average {
public static double mean(int[] numbers) {
double sum = 0;
for (int i = 0; i < numbers.length; i++)
sum += numbers[i];
return sum / numbers.length;
}
public static void main(String args[]) {
int[] numbers = {1, 2, 3, 4, 5};
System.out.println("Mean = " + mean(numbers));
}
} |
import socket
import threading
# Create a network socket
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# Bind socket to port
server_address = ('127.0.0.1', 10000)
s.bind(server_address)
# Wait for incoming connection
s.listen()
# Thread for handling incoming connection
def client_thread(conn):
while True:
data = conn.recv(4096)
conn.send(data)
# Main thread loop
while True:
conn, addr = s.accept()
print("Connected to:", addr)
# Start the client thread for handling incoming connection
thread = threading.Thread(target=client_thread, args=(conn,))
thread.start() |
<gh_stars>0
import { PipeTransform } from '@angular/core';
import * as ɵngcc0 from '@angular/core';
export declare class HeadPipe implements PipeTransform {
transform(input: any): any;
static ɵfac: ɵngcc0.ɵɵFactoryDeclaration<HeadPipe, never>;
static ɵpipe: ɵngcc0.ɵɵPipeDeclaration<HeadPipe, "head">;
}
export declare class NgHeadPipeModule {
static ɵfac: ɵngcc0.ɵɵFactoryDeclaration<NgHeadPipeModule, never>;
static ɵmod: ɵngcc0.ɵɵNgModuleDeclaration<NgHeadPipeModule, [typeof HeadPipe], never, [typeof HeadPipe]>;
static ɵinj: ɵngcc0.ɵɵInjectorDeclaration<NgHeadPipeModule>;
}
//# sourceMappingURL=head.pipe.d.ts.map |
#!/bin/bash
#wget http://atlas.nmfs.hawaii.edu/cgi-bin/reynolds_extract.py?lon1=150\&lon2=180\&lat1=0\&lat2=30\&year1=2003\&day1=2\&year2=2004\&day2=57 -O test.zip
#R --vanilla < get.sst.from.server.R
#for
#> mdy.date(1,1,2003)
#[1] 1Jan2003
#> mdy.date(1:12,1,2003)-mdy.date(1,1,2003)
# [1] 0 31 59 90 120 151 181 212 243 273 304 334
#> mdy.date(1:12,1,2004)-mdy.date(1,1,2004)
# [1] 0 31 60 91 121 152 182 213 244 274 305 335
#declare -r FILES=`ls sst/*`
#for f in $FILES
#do
# echo $f ;
#done
R --vanilla < groupSSTByYearMonthLatitudeLongitude.R
|
<reponame>sonasingh46/maya
/*
Copyright 2019 The OpenEBS Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package v1alpha1
import (
stringer "github.com/openebs/maya/pkg/apis/stringer/v1alpha1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
func init() {
// Register adds UpgradeResult and UpgradeResultList objects to
// SchemeBuilder so they can be added to a Scheme
SchemeBuilder.Register(&UpgradeResult{}, &UpgradeResultList{})
}
// +genclient
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// +resource:path=upgrade
// UpgradeResult contains the desired specifications of an
// upgrade result
type UpgradeResult struct {
metav1.TypeMeta `json:",inline"`
metav1.ObjectMeta `json:"metadata,omitempty"`
Config UpgradeResultConfig `json:"config"`
// Tasks are the runtasks that needs to be
// executed to perform this upgrade
Tasks []UpgradeResultTask `json:"tasks"`
Status UpgradeResultStatus `json:"status"`
}
// UpgradeResultConfig represents the config of UpgradeResult i.e.
// It contains resource details of single unit of upgrade and
// all runtime configuration.
type UpgradeResultConfig struct {
ResourceDetails
// data is used to provide some runtime configurations to
// castemplate engine. Task executor will directly copy these
// configurations to castemplate engine.
Data []DataItem `json:"data"`
}
// UpgradeResultStatus represents the current state of UpgradeResult
type UpgradeResultStatus struct {
// DesiredCount is the total no of resources that
// needs to be upgraded to a desired version
DesiredCount int `json:"desiredCount"`
// ActualCount represents the no of resources
// that has been successfully upgraded
ActualCount int `json:"actualCount"`
// FailedCount represents the no of resources
// that has failed to upgrade
FailedCount int `json:"failedCount"`
// Resource is the resource that needs to
// be upgraded
Resource UpgradeResource `json:"resource"`
}
// UpgradeResource represents a resource that needs to
// be upgraded to a desired version
type UpgradeResource struct {
ResourceDetails
// PreState represents the state of the resource
// before upgrade
PreState ResourceState `json:"preState"`
// PostState represents the state of the resource
// after upgrade
PostState ResourceState `json:"postState"`
// SubResources are the resources related to
// this resource which needs to be upgraded
SubResources []UpgradeSubResource `json:"subResources"`
}
// UpgradeResultTask represents details of a task(runtask) required
// to be executed for upgrading a particular resource
type UpgradeResultTask struct {
// Name of the task
Name string `json:"name"`
// Status is the status of the task which
// could be successful or failed
Status string `json:"status"`
// LastTransitionTime is the last time the status
// transitioned from one status to another.
LastTransitionTime metav1.Time `json:"lastTransitionTime"`
// Message is a human readable message
// indicating details about the task
Message string `json:"message"`
// LastError is the last error occurred
// while executing this task
LastError string `json:"lastError"`
// StartTime of the task
StartTime *metav1.Time `json:"startTime"`
// EndTime of the task
EndTime *metav1.Time `json:"endTime"`
// Retries is the no of times this task
// has tried to execute
Retries int `json:"retries"`
}
// UpgradeSubResource represents the details of
// a subresource which needs to be upgraded
type UpgradeSubResource struct {
ResourceDetails
// PreState represents the state of the
// subresource before upgrade
PreState ResourceState `json:"preState"`
// PostState represents the state of the
// subresource after upgrade
PostState ResourceState `json:"postState"`
}
// ResourceDetails represents the basic details
// of a particular resource
type ResourceDetails struct {
// Name of the resource
Name string `json:"name"`
// Kind is the type of resource i.e.
// cvr, deployment, ..
Kind string `json:"kind"`
// APIVersion of the resource
APIVersion string `json:"apiVersion"`
// Namespace of the resource
Namespace string `json:"namespace"`
// Generation of resource represents last successful Generation
// observed by resource controller (ie. - deployment controller).
// Every time we patched a resource it will assign a new Generation.
// This is helpful at the time of roll back.
Generation string `json:"generation"`
}
// String implements Stringer interface
func (rd ResourceDetails) String() string {
return stringer.Yaml("resource details", rd)
}
// GoString implements GoStringer interface
func (rd ResourceDetails) GoString() string {
return rd.String()
}
// ResourceState represents the state of a resource
type ResourceState struct {
// Status of the resource
Status string `json:"status"`
// LastTransitionTime is the last time the status
// transitioned from one status to another.
LastTransitionTime metav1.Time `json:"lastTransitionTime"`
// Message is a human readable message indicating details about the transition.
Message string `json:"message"`
}
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// +resource:path=upgrades
// UpgradeResultList is a list of UpgradeResults
type UpgradeResultList struct {
metav1.TypeMeta `json:",inline"`
metav1.ListMeta `json:"metadata"`
Items []UpgradeResult `json:"items"`
}
// String implements Stringer interface
func (urList UpgradeResultList) String() string {
return stringer.Yaml("upgraderesult list", urList)
}
// GoString implements GoStringer interface
func (urList UpgradeResultList) GoString() string {
return urList.String()
}
|
<reponame>thirdkindgames/PlayFabSdk
#pragma once
#include <PlayFabComboSdk/PlayFabError.h>
#include <PlayFabComboSdk/PlayFabMatchmakerDataModels.h>
#include <PlayFabComboSdk/PlayFabHttp.h>
namespace PlayFabComboSdk
{
class PlayFabMatchmakerApi
{
public:
// ------------ Error callback
static void OnError(const PlayFabRequest& request, const PlayFabError& error);
// ------------ Generated Api calls
static int AuthUser(MatchmakerModels::AuthUserRequest& request, ProcessApiCallback<MatchmakerModels::AuthUserResponse> callback = nullptr, ErrorCallback errorCallback = nullptr, void* customData = nullptr);
static int PlayerJoined(MatchmakerModels::PlayerJoinedRequest& request, ProcessApiCallback<MatchmakerModels::PlayerJoinedResponse> callback = nullptr, ErrorCallback errorCallback = nullptr, void* customData = nullptr);
static int PlayerLeft(MatchmakerModels::PlayerLeftRequest& request, ProcessApiCallback<MatchmakerModels::PlayerLeftResponse> callback = nullptr, ErrorCallback errorCallback = nullptr, void* customData = nullptr);
static int StartGame(MatchmakerModels::StartGameRequest& request, ProcessApiCallback<MatchmakerModels::StartGameResponse> callback = nullptr, ErrorCallback errorCallback = nullptr, void* customData = nullptr);
static int UserInfo(MatchmakerModels::UserInfoRequest& request, ProcessApiCallback<MatchmakerModels::UserInfoResponse> callback = nullptr, ErrorCallback errorCallback = nullptr, void* customData = nullptr);
private:
// ------------ Private constructor, to enforce all-static class
PlayFabMatchmakerApi();
// ------------ Generated result handlers
static void OnAuthUserResult(PlayFabRequest* request);
static void OnPlayerJoinedResult(PlayFabRequest* request);
static void OnPlayerLeftResult(PlayFabRequest* request);
static void OnStartGameResult(PlayFabRequest* request);
static void OnUserInfoResult(PlayFabRequest* request);
};
};
|
<filename>src/shared/rest/index.ts<gh_stars>1-10
import { RESTDataSource } from 'apollo-datasource-rest';
import { SuitablePlanetsResponse } from 'modules/planets/types/suitablePlanets.type';
// eslint-disable-next-line import/no-extraneous-dependencies
import { DataSourceConfig } from 'apollo-datasource';
import { Service } from 'typedi';
type SuitablePlanetsRemoteResponse = Omit<
SuitablePlanetsResponse,
'hasStation'
>;
@Service()
export class NasaAPI extends RESTDataSource {
constructor() {
super();
this.baseURL =
'https://exoplanetarchive.ipac.caltech.edu/cgi-bin/nstedAPI/nph-nstedAPI';
this.initialize({} as DataSourceConfig<any>);
}
public async getPlanets(): Promise<SuitablePlanetsRemoteResponse[]> {
const response: string = await this.get('/', {
table: 'exoplanets',
format: 'json'
});
const startIndex = response.search(/\[/);
const planetsString = response.slice(startIndex);
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const objectData: any[] = JSON.parse(planetsString);
const planets: SuitablePlanetsRemoteResponse[] = [];
objectData.forEach(planet =>
planets.push({
name: planet.pl_name,
mass: planet.pl_bmassj
})
);
return planets;
}
}
|
import Employee from './Employee';
import Patient from './Patient';
class Database {
private employees : Array<Employee>;
private patients: Array<Patient>;
constructor () {
this.employees = [];
this.patients = [];
}
getAllEmployees () {
return this.employees;
}
getAllPatients () {
return this.patients;
}
}
export default new Database();
|
#!/bin/bash -xe
INPUT_PATH=$1
VERSION=$2
OUTPUT_PATH=${3:-.}
PACKAGE_VERSION=${4:-$VERSION}
PACKAGE_NAME=indy-plenum
# copy the sources to a temporary folder
TMP_DIR=$(mktemp -d)
shopt -s dotglob
cp -r ${INPUT_PATH}/. ${TMP_DIR}
# prepare the sources
cd ${TMP_DIR}/build-scripts/ubuntu-1604
./prepare-package.sh ${TMP_DIR} ${VERSION}
sed -i 's/{package_name}/'${PACKAGE_NAME}'/' "postinst"
sed -i 's/{package_name}/'${PACKAGE_NAME}'/' "prerm"
fpm --input-type "python" \
--output-type "deb" \
--architecture "amd64" \
--depends "python3-pyzmq (= 18.1.0)" \
--verbose \
--python-package-name-prefix "python3"\
--python-bin "/usr/bin/python3" \
--exclude "usr/local/lib/python3.5/dist-packages/data" \
--exclude "usr/local/bin" \
--exclude "*.pyc" \
--exclude "*.pyo" \
--maintainer "Hyperledger <hyperledger-indy@lists.hyperledger.org>" \
--after-install "postinst" \
--before-remove "prerm" \
--name ${PACKAGE_NAME} \
--version ${PACKAGE_VERSION} \
--package ${OUTPUT_PATH} \
${TMP_DIR}
rm -rf ${TMP_DIR}
|
<reponame>mvaliev/gp2s-pncc
/*
* Copyright 2018 Genentech Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import ModelView from '@/components/App/Model/View'
import ModelNew from '@/components/App/Model/New'
import ModelEdit from '@/components/App/Model/Edit'
import ModelCopy from '@/components/App/Model/Copy'
import ModelList from '@/components/App/Model/List'
export default [
{
path: '',
name: 'model',
component: ModelList,
props: true
},
{
path: 'new',
name: 'model-new',
component: ModelNew,
props: true,
},
{
path: ':id',
name: 'model-view',
component: ModelView,
props: true,
},
{
path: ':id/edit',
name: 'model-edit',
component: ModelEdit,
props: true
},
{
path: 'copy/:id',
name: 'model-copy',
component: ModelCopy,
props: true
}
]
|
<reponame>OhFinance/oh-app
import BigNumber from "bignumber.js";
export type MethodArg = string | number | BigNumber;
export type MethodArgs = Array<MethodArg | MethodArg[]>;
export type OptionalMethodInputs =
| Array<MethodArg | MethodArg[] | undefined>
| undefined;
export interface Call {
address: string;
callData: string;
}
export interface CallState {
readonly valid: boolean;
// the result, or undefined if loading or errored/no data
readonly result: Result | undefined;
// true if the result has never been fetched
readonly loading: boolean;
// true if the result is not for the latest block
readonly syncing: boolean;
// true if the call was made and is synced, but the return data is invalid
readonly error: boolean;
}
export interface CallResult {
readonly valid: boolean;
readonly data: string | undefined;
readonly blockNumber: number | undefined;
}
export interface ListenerOptions {
// how often this data should be fetched, by default 1
readonly blocksPerFetch?: number;
}
export interface Result extends ReadonlyArray<any> {
readonly [key: string]: any;
}
|
<reponame>Damian070/pimp-my-pr<filename>libs/server/repository/core/domain/src/lib/entities/reviewer.entity.ts
import { ContributorEntity } from './contributor.entity';
export class ReviewerEntity extends ContributorEntity {}
|
#=======
# Author: <NAME> (<EMAIL>)
#=======
require 'logger'
require 'pp'
require 'yaml'
module UICov
GEM_HOME = File.expand_path("#{File.dirname(__FILE__)}/..")
$LOAD_PATH.unshift GEM_HOME
require 'lib/uicov/consts'
def self.gather_coverage(opts={})
UICoverage.new.gather_coverage(opts)
end
end
###########################
# E N T R Y P O I N T
############################
if __FILE__ == $0
opts = {
:log => 'logPM.txt',
:model => "#{UICov::GEM_TESTS_DATA_DIR}/model1.puml",
:current_screen => /\s+<==\s+([^ ]+)\s+is set as current screen/,
:transition => /Transition '([^ ]+)'.*from '([^ ]+)'.*to '([^ ]+)'/
}
cov = UICov.gather_coverage(opts)
cov.to_puml('log.puml')
pp cov
end
|
<filename>src/components/mobileheader.js
import React, { useState } from 'react'
import '../styles/mobileheader.scss'
import {Link} from 'gatsby'
import * as FaIcons from 'react-icons/fa'
import * as AiIcons from 'react-icons/ai'
import * as HiIcons from "react-icons/hi";
export default () => {
const [sidebar, setSidebar] = useState(false);
const [search, setSearch] = useState(false);
const showSidebar = () => setSidebar(!sidebar);
const showSearch = () => setSearch(!search);
return(
<div className="containernav">
<div className="sidenav">
<div className="promo">
<span>Dapatkan voucher cashback s/d Rp50.000</span>
</div>
<div className="navbar1">
<Link to="/">
<div className="mebo__logo"/>
</Link>
<div className="searchIcon">
<FaIcons.FaSearch onClick={showSearch}/>
</div>
<div className="shopIcon">
<Link to="/whatsapp">
<HiIcons.HiOutlineShoppingCart />
</Link>
</div>
<div className="burgerIcon">
<FaIcons.FaBars onClick={showSidebar}/>
</div>
</div>
</div>
<nav className={sidebar ? 'navMenu active' : 'navMenu'}>
<div className="topitems">
<div className="mebo__logo"/>
<AiIcons.AiOutlineClose onClick={showSidebar}/>
</div>
<ul className="navitems">
<Link to="/products"><li className="navcontent">Produk</li></Link>
<Link to="/products/aksesoris" onClick={showSidebar}><li className="navcontent">Aksesoris</li></Link>
<Link to="/products/kasur-busa" onClick={showSidebar}><li className="navcontent">Kasur busa</li></Link>
<Link to="/products/kasur-lipat" onClick={showSidebar}><li className="navcontent">Kasur lipat</li></Link>
<Link to="/products/sofa-bed" style={{borderBottom: "0.1rem solid black"}} onClick={showSidebar}><li className="navcontent">Sofa bed</li></Link>
</ul>
</nav>
<nav className={search ? 'navSearch active' : 'navSearch'}>
<div className="topitems">
<form action="/searchlist/barang">
<input type="text" name="item" placeholder="bantal...." required/>
{/* <button type="submit"><FaIcons.FaSearch /></button> */}
</form>
<AiIcons.AiOutlineClose className="xbtn" onClick={showSearch}/>
</div>
</nav>
</div>
)
}
|
#!/usr/bin/env bash
oc get routes --all-namespaces | awk 'NR>1' | python check_urls_parallel.py
|
public class MainActivity extends AppCompatActivity {
private List<Question> questions;
private ListView listView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
//Get the data from the API
new GetQuestionsTask().execute();
// Setup the list view
listView = (ListView) findViewById(R.id.lvQuestions);
listView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
//Open the question activity when a question is tapped
Intent intent = new Intent(MainActivity.this, QuestionActivity.class);
intent.putExtra("question", questions.get(position));
startActivity(intent);
}
});
}
//Task to fetch data from the API
private class GetQuestionsTask extends AsyncTask<Void, Void, Void> {
@Override
protected Void doInBackground(Void... params) {
String url = "https://leetcode.com/api/problems/all/";
String response = NetworkUtils.getResponseFromHttpUrl(url);
questions = JsonUtils.parseQuestions(response);
return null;
}
@Override
protected void onPostExecute(Void aVoid) {
//Set the adapter for the list view
listView.setAdapter(new QuestionListAdapter(MainActivity.this, questions));
}
}
} |
Implement a decision tree algorithm using a greedy approach to determine the best attribute to split a node at each step. The algorithm should use Gini index as the metric to select the best attributes and should run in O(n^2) time. After training, the algorithm should be able to classify test data with accuracy greater than 0.8. |
import numpy as np
def sigmoid(x):
return 1 / (1 + np.exp(-x))
def calculate_gamma(policy_num, gamma0, gamma1, gamma2):
if policy_num == 0:
return gamma0
elif 1 <= policy_num <= 6:
g = 10 * ((2 * policy_num - 6) / 6)
g = gamma1 + (gamma0 - gamma1) * sigmoid(g)
return g
elif policy_num == 7:
return gamma1
else:
g = (policy_num - 9) * np.log(1 - gamma1) + (policy_num - 8) * np.log(1 - gamma2)
return g |
<reponame>Zac-Garby/Radon
package runtime
const initialStorePoolSize = 32
// A StorePool contains a number of Stores, which can be released quickly. This avoids
// creating stores, which is slower than just reusing existing ones.
type StorePool struct {
stores []*Store
}
// NewStorePool makes a new store pool with initialStorePoolSize stores.
func NewStorePool() *StorePool {
pool := &StorePool{
stores: make([]*Store, initialStorePoolSize),
}
for i := 0; i < initialStorePoolSize; i++ {
pool.stores[i] = NewStore(nil)
}
return pool
}
// IsEmpty checks whether or not the pool is empty.
func (s *StorePool) IsEmpty() bool {
return len(s.stores) == 0
}
// Release releases a store from the pool, allowing for use elsewhere.
func (s *StorePool) Release(enclosing *Store) *Store {
if s.IsEmpty() {
return NewStore(enclosing)
}
store := s.stores[0]
s.stores = s.stores[1:]
store.Enclosing = enclosing
return store
}
// Add adds a store back into the pool.
func (s *StorePool) Add(sto *Store) {
sto.Data = make(map[string]*Variable)
sto.Enclosing = nil
s.stores = append(s.stores, sto)
}
|
<reponame>ritaswc/wechat_app_template
Page({
data: {
src: '',
controls: true,
loading: true,
},
hideControl() {
this.setData({
controls: !this.data.controls,
});
},
onLoad(params) {
//console.log(params);
this.setData({
src: params.videoUrl,
loading: false,
});
},
// 当开始/继续播放时触发play事件
bindplay() {
},
// 当暂停播放时触发 pause 事件
bindpause() {
},
//EventHandle 当播放到末尾时触发 ended 事件
bindended() {
},
// 播放进度变化时触发,event.detail = {currentTime: '当前播放时间'} 。触发频率应该在 250ms 一次
bindtimeupdate() {
},
// //右上角分享功能
// onShareAppMessage: function (res) {
// var that = this;
// return {
// title: 'Sports News',
// //path: '/pa
// //右上角分享功能
// onShareAppMessage: function (res) {
// var that = this;
// return {
// title: 'Sports News',
// //path: '/pages/main-page/main-page?id=' + that.data.scratchId,
// success: function (res) {
// // 转发成功
// wx.showToast({
// title: '转发成功!',
// })
// that.shareClick();
// },
// fail: function (res) {
// // 转发失败
// wx.showToast({
// icon: 'none',
// title: '转发失败',
// })
// }
// }
// }
// }
// }
}); |
package generic
import (
"github.com/benthosdev/benthos/v4/internal/bundle"
"github.com/benthosdev/benthos/v4/internal/component/metrics"
"github.com/benthosdev/benthos/v4/internal/docs"
"github.com/benthosdev/benthos/v4/internal/log"
)
func init() {
_ = bundle.AllMetrics.Add(func(metrics.Config, log.Modular) (metrics.Type, error) {
return metrics.Noop(), nil
}, docs.ComponentSpec{
Name: "none",
Type: docs.TypeMetrics,
Summary: `Disable metrics entirely.`,
Config: docs.FieldObject("", ""),
})
}
|
#!/bin/sh
echo "*** Initial system setup"
apt update -y
apt upgrade -y
apt install -y docker.io
echo ""
echo "*** Setup script"
echo "#!/bin/bash" > ~/run
echo "" >> ~/run
echo "docker stop presearch-node ; docker rm presearch-node ; docker stop presearch-auto-updater ; docker rm presearch-auto-updater ; docker run -d --name presearch-auto-updater --restart=unless-stopped -v /var/run/docker.sock:/var/run/docker.sock presearch/auto-updater --cleanup --interval 900 presearch-auto-updater presearch-node ; docker pull presearch/node ; docker run -dt --name presearch-node --restart=unless-stopped -v presearch-node-storage:/app/node -e REGISTRATION_CODE=$1 presearch/node ; docker logs -f presearch-node" >> ~/run
chmod 755 ~/run
echo ""
echo "*** Setup systemd"
echo "cp presearch.service /etc/systemd/system/"
cp presearch.service /etc/systemd/system/
echo "systemctl daemon-reload"
systemctl daemon-reload
echo "systemctl enable presearch.service"
systemctl enable presearch.service
echo ""
echo "*** Running presearch for the first time"
echo "systemctl start presearch.service"
systemctl start presearch.service
|
import { assert } from 'chai'
import * as jsdocx from '../dist/jsdocx'
describe('#ES6', () => {
describe('#import', () => {
it('should import the library', () => {
let doc = new jsdocx.Document()
assert.equal(doc.hasOwnProperty('files'), true)
assert.equal(typeof doc.files, 'object')
})
})
}) |
import Vue from 'vue'
import Vuex from 'vuex'
import { firebaseMutations, firebaseAction } from 'vuexfire'
import { db } from '@/store/utils/firestore'
import auth from './auth'
import user from './user'
import roles from './roles'
import attendance from './attendance'
import markattendance from './attendance/mark'
import course from './course'
import courseregistration from './course/registration'
Vue.use(Vuex);
export const store = new Vuex.Store({
modules: { auth, user, roles, course, courseregistration, attendance, markattendance },
state: {
departments: []
},
mutations: firebaseMutations,
actions: {
bindDepartments: firebaseAction(({ bindFirebaseRef }) => {
bindFirebaseRef('departments', db().collection('departments'))
})
}
});
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Authors: <NAME>, <NAME>
*
*/
package org.rumbledb.expressions.postfix;
import org.rumbledb.compiler.VisitorConfig;
import org.rumbledb.exceptions.ExceptionMetadata;
import org.rumbledb.exceptions.OurBadException;
import org.rumbledb.expressions.AbstractNodeVisitor;
import org.rumbledb.expressions.ExecutionMode;
import org.rumbledb.expressions.Expression;
import org.rumbledb.expressions.Node;
import org.rumbledb.expressions.primary.IntegerLiteralExpression;
import org.rumbledb.items.ItemFactory;
import java.util.ArrayList;
import java.util.List;
public class FilterExpression extends Expression {
private static final long serialVersionUID = 1L;
private Expression mainExpression;
private Expression predicateExpression;
public FilterExpression(Expression mainExpression, Expression predicateExpression, ExceptionMetadata metadata) {
super(metadata);
if (mainExpression == null) {
throw new OurBadException("Main expression cannot be null in a postfix expression.");
}
this.mainExpression = mainExpression;
this.predicateExpression = predicateExpression;
}
@Override
public List<Node> getChildren() {
List<Node> result = new ArrayList<>();
result.add(this.mainExpression);
result.add(this.predicateExpression);
return result;
}
@Override
public void serializeToJSONiq(StringBuffer sb, int indent) {
indentIt(sb, indent);
this.mainExpression.serializeToJSONiq(sb, 0);
sb.append("[");
this.predicateExpression.serializeToJSONiq(sb, 0);
sb.append("]\n");
}
public Expression getPredicateExpression() {
return this.predicateExpression;
}
@Override
public <T> T accept(AbstractNodeVisitor<T> visitor, T argument) {
return visitor.visitFilterExpression(this, argument);
}
public Expression getMainExpression() {
return this.mainExpression;
}
@Override
public void initHighestExecutionMode(VisitorConfig visitorConfig) {
if (this.predicateExpression instanceof IntegerLiteralExpression) {
String lexicalValue = ((IntegerLiteralExpression) this.predicateExpression).getLexicalValue();
if (ItemFactory.getInstance().createIntegerItem(lexicalValue).isInt()) {
if (
ItemFactory.getInstance().createIntegerItem(lexicalValue).getIntValue() <= this.staticContext
.getRumbleCOnfiguration()
.getResultSizeCap()
) {
this.highestExecutionMode = ExecutionMode.LOCAL;
return;
}
}
}
this.highestExecutionMode = this.mainExpression.getHighestExecutionMode(visitorConfig);
if (this.highestExecutionMode.equals(ExecutionMode.DATAFRAME)) {
this.highestExecutionMode = ExecutionMode.RDD;
}
}
}
|
(((1)))
(((1 * 2)))
((1 * (2 + 3)))
(( (2 + 3) / 2 ))
(( (2 + 3) / (1/2 + (3*4) / 5) ))
# Error:
(( (3) * ))
|
/**
* Created by desaroger on 26/02/17.
*/
module.exports = require('require-dir')();
|
<filename>electron/lib/errhandler.ts
import log from 'electron-log';
export function errHandler(err: never): void {
log.error(err);
}
|
<gh_stars>1000+
function $(str){
return page.getHtml().$(str).toString();
}
function xpath(str){
return page.getHtml().xpath(str).toString();
}
function urls(str){
links = page.getHtml().links().regex(str).all();
page.addTargetRequests(links);
}
|
#!/usr/bin/env sh
set -e
cd /etc/shlink
echo "Creating fresh database if needed..."
php bin/cli db:create -n -q
echo "Updating database..."
php bin/cli db:migrate -n -q
echo "Generating proxies..."
php vendor/doctrine/orm/bin/doctrine.php orm:generate-proxies -n -q
echo "Clearing entities cache..."
php vendor/doctrine/orm/bin/doctrine.php orm:clear-cache:metadata -n -q
# Try to download GeoLite2 db file only if the license key env var was defined
if [ ! -z "${GEOLITE_LICENSE_KEY}" ]; then
echo "Downloading GeoLite2 db file..."
php bin/cli visit:download-db -n -q
fi
# Periodicaly run visit:locate every hour
# https://shlink.io/documentation/long-running-tasks/#locate-visits
# set env var "ENABLE_PERIODIC_VISIT_LOCATE=true" to enable
if [ $ENABLE_PERIODIC_VISIT_LOCATE ]; then
echo "Starting periodic visite locate..."
echo "0 * * * * php /etc/shlink/bin/cli visit:locate -q" > /etc/crontabs/root
/usr/sbin/crond &
fi
# When restarting the container, swoole might think it is already in execution
# This forces the app to be started every second until the exit code is 0
until php vendor/bin/laminas mezzio:swoole:start; do sleep 1 ; done
|
# frozen_string_literal: true
DiscourseDev::Engine.routes.draw do
get ':username_or_email/become' => 'admin/impersonate#create', constraints: AdminConstraint.new
end
|
<reponame>Preeti240/Online-Proctoring
# -*- coding: utf-8 -*-
import cv2
import numpy as np
import math
from face_detector import get_face_detector, find_faces
from face_landmarks import get_landmark_model, detect_marks
def get_2d_points(img, rotation_vector, translation_vector, camera_matrix, val):
"""Return the 3D points present as 2D for making annotation box"""
point_3d = []
dist_coeffs = np.zeros((4,1))
rear_size = val[0]
rear_depth = val[1]
point_3d.append((-rear_size, -rear_size, rear_depth))
point_3d.append((-rear_size, rear_size, rear_depth))
point_3d.append((rear_size, rear_size, rear_depth))
point_3d.append((rear_size, -rear_size, rear_depth))
point_3d.append((-rear_size, -rear_size, rear_depth))
front_size = val[2]
front_depth = val[3]
point_3d.append((-front_size, -front_size, front_depth))
point_3d.append((-front_size, front_size, front_depth))
point_3d.append((front_size, front_size, front_depth))
point_3d.append((front_size, -front_size, front_depth))
point_3d.append((-front_size, -front_size, front_depth))
point_3d = np.array(point_3d, dtype=np.float).reshape(-1, 3)
# Map to 2d img points
(point_2d, _) = cv2.projectPoints(point_3d,
rotation_vector,
translation_vector,
camera_matrix,
dist_coeffs)
point_2d = np.int32(point_2d.reshape(-1, 2))
return point_2d
def draw_annotation_box(img, rotation_vector, translation_vector, camera_matrix,
rear_size=300, rear_depth=0, front_size=500, front_depth=400,
color=(255, 255, 0), line_width=2):
"""
Draw a 3D anotation box on the face for head pose estimation
Parameters
----------
img : np.unit8
Original Image.
rotation_vector : Array of float64
Rotation Vector obtained from cv2.solvePnP
translation_vector : Array of float64
Translation Vector obtained from cv2.solvePnP
camera_matrix : Array of float64
The camera matrix
rear_size : int, optional
Size of rear box. The default is 300.
rear_depth : int, optional
The default is 0.
front_size : int, optional
Size of front box. The default is 500.
front_depth : int, optional
Front depth. The default is 400.
color : tuple, optional
The color with which to draw annotation box. The default is (255, 255, 0).
line_width : int, optional
line width of lines drawn. The default is 2.
Returns
-------
None.
"""
rear_size = 1
rear_depth = 0
front_size = img.shape[1]
front_depth = front_size*2
val = [rear_size, rear_depth, front_size, front_depth]
point_2d = get_2d_points(img, rotation_vector, translation_vector, camera_matrix, val)
# # Draw all the lines
cv2.polylines(img, [point_2d], True, color, line_width, cv2.LINE_AA)
cv2.line(img, tuple(point_2d[1]), tuple(
point_2d[6]), color, line_width, cv2.LINE_AA)
cv2.line(img, tuple(point_2d[2]), tuple(
point_2d[7]), color, line_width, cv2.LINE_AA)
cv2.line(img, tuple(point_2d[3]), tuple(
point_2d[8]), color, line_width, cv2.LINE_AA)
def head_pose_points(img, rotation_vector, translation_vector, camera_matrix):
"""
Get the points to estimate head pose sideways
Parameters
----------
img : np.unit8
Original Image.
rotation_vector : Array of float64
Rotation Vector obtained from cv2.solvePnP
translation_vector : Array of float64
Translation Vector obtained from cv2.solvePnP
camera_matrix : Array of float64
The camera matrix
Returns
-------
(x, y) : tuple
Coordinates of line to estimate head pose
"""
rear_size = 1
rear_depth = 0
front_size = img.shape[1]
front_depth = front_size*2
val = [rear_size, rear_depth, front_size, front_depth]
point_2d = get_2d_points(img, rotation_vector, translation_vector, camera_matrix, val)
y = (point_2d[5] + point_2d[8])//2
x = point_2d[2]
return (x, y)
face_model = get_face_detector()
landmark_model = get_landmark_model()
cap = cv2.VideoCapture(0)
ret, img = cap.read()
size = img.shape
font = cv2.FONT_HERSHEY_SIMPLEX
# 3D model points.
model_points = np.array([
(0.0, 0.0, 0.0), # Nose tip
(0.0, -330.0, -65.0), # Chin
(-225.0, 170.0, -135.0), # Left eye left corner
(225.0, 170.0, -135.0), # Right eye right corne
(-150.0, -150.0, -125.0), # Left Mouth corner
(150.0, -150.0, -125.0) # Right mouth corner
])
# Camera internals
focal_length = size[1]
center = (size[1]/2, size[0]/2)
camera_matrix = np.array(
[[focal_length, 0, center[0]],
[0, focal_length, center[1]],
[0, 0, 1]], dtype = "double"
)
while True:
ret, img = cap.read()
if ret == True:
faces = find_faces(img, face_model)
for face in faces:
marks = detect_marks(img, landmark_model, face)
# mark_detector.draw_marks(img, marks, color=(0, 255, 0))
image_points = np.array([
marks[30], # Nose tip
marks[8], # Chin
marks[36], # Left eye left corner
marks[45], # Right eye right corne
marks[48], # Left Mouth corner
marks[54] # Right mouth corner
], dtype="double")
dist_coeffs = np.zeros((4,1)) # Assuming no lens distortion
(success, rotation_vector, translation_vector) = cv2.solvePnP(model_points, image_points, camera_matrix, dist_coeffs, flags=cv2.SOLVEPNP_UPNP)
# Project a 3D point (0, 0, 1000.0) onto the image plane.
# We use this to draw a line sticking out of the nose
(nose_end_point2D, jacobian) = cv2.projectPoints(np.array([(0.0, 0.0, 1000.0)]), rotation_vector, translation_vector, camera_matrix, dist_coeffs)
for p in image_points:
cv2.circle(img, (int(p[0]), int(p[1])), 3, (0,0,255), -1)
p1 = ( int(image_points[0][0]), int(image_points[0][1]))
p2 = ( int(nose_end_point2D[0][0][0]), int(nose_end_point2D[0][0][1]))
x1, x2 = head_pose_points(img, rotation_vector, translation_vector, camera_matrix)
cv2.line(img, p1, p2, (0, 255, 255), 2)
cv2.line(img, tuple(x1), tuple(x2), (255, 255, 0), 2)
# for (x, y) in marks:
# cv2.circle(img, (x, y), 4, (255, 255, 0), -1)
# cv2.putText(img, str(p1), p1, font, 1, (0, 255, 255), 1)
try:
m = (p2[1] - p1[1])/(p2[0] - p1[0])
ang1 = int(math.degrees(math.atan(m)))
except:
ang1 = 90
try:
m = (x2[1] - x1[1])/(x2[0] - x1[0])
ang2 = int(math.degrees(math.atan(-1/m)))
except:
ang2 = 90
# print('div by zero error')
if ang1 >= 48:
print('Head down')
cv2.putText(img, 'Head down', (30, 30), font, 2, (255, 255, 128), 3)
elif ang1 <= -48:
print('Head up')
cv2.putText(img, 'Head up', (30, 30), font, 2, (255, 255, 128), 3)
if ang2 >= 48:
print('Head right')
cv2.putText(img, 'Head right', (90, 30), font, 2, (255, 255, 128), 3)
elif ang2 <= -48:
print('Head left')
cv2.putText(img, 'Head left', (90, 30), font, 2, (255, 255, 128), 3)
cv2.putText(img, str(ang1), tuple(p1), font, 2, (128, 255, 255), 3)
cv2.putText(img, str(ang2), tuple(x1), font, 2, (255, 255, 128), 3)
cv2.imshow('img', img)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
else:
break
cv2.destroyAllWindows()
cap.release()
|
<reponame>fernandosev/Tetris---React-Native-Expo
import React, { useState, useEffect } from 'react';
import {
SafeAreaView,
View,
Text,
TouchableOpacity,
StyleSheet,
Image,
AsyncStorage
} from 'react-native';
//animacoes
import * as Animatable from 'react-native-animatable';
import logo from './assets/logo.png';
import BestScores from './Components/BestScores';
export default function GameMenu({navigation}){
const[gameOver, setGameOver] = useState(true);
const[bestScores, setBestScores] = useState(null);
useEffect(() => {
async function scores(){
try{
let data = await AsyncStorage.getItem('Best_Scores')
setBestScores(JSON.parse(data));
setGameOver(false);
}catch(e){
console.log(e);
}
}
if(gameOver == true){
scores();
}
}, [gameOver]);
return(
<SafeAreaView style={styles.container}>
<Image source={logo} style={styles.logo}/>
<Animatable.View animation="rubberBand" easing="ease-out" iterationCount="infinite">
<TouchableOpacity style={styles.btnStart} onPress={() => navigation.navigate('Game', { setGameOver })}>
<Text style={styles.txtBtnStart}>Start</Text>
</TouchableOpacity>
</Animatable.View>
<BestScores data={bestScores}></BestScores>
</SafeAreaView>
)
}
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: '#006400',
alignItems: 'center',
justifyContent: 'center'
},
logo: {
width: 200,
height: 40,
marginBottom: 30
},
btnStart: {
backgroundColor: '#FFF'
},
txtBtnStart: {
fontSize: 20,
color: '#006400',
paddingHorizontal: 30,
paddingVertical: 20
}
}); |
# Prompt the user to input the counting direction
direction = input('Which direction do you want to count? (up/down)').strip().lower()
# Check the user's input and perform the counting accordingly
if direction == 'up':
for i in range(1, 11):
print(i)
elif direction == 'down':
for i in range(10, 0, -1):
print(i)
else:
print("Invalid direction. Please enter 'up' or 'down'.")
# Sample Output:
# If the user inputs 'up':
# 1
# 2
# 3
# 4
# 5
# 6
# 7
# 8
# 9
# 10
# If the user inputs 'down':
# 10
# 9
# 8
# 7
# 6
# 5
# 4
# 3
# 2
# 1 |
class Solution {
public:
bool containsDuplicate(vector<int>& nums) {
sort(nums.begin(),nums.end());
int i=0;
int flag = 0;
for(int j=1;j<nums.size();j++){
if(nums[i]!=nums[j] && i!=j){
i=j;
}
else if(nums[i]==nums[j] && i!=j){
flag=1;
break;
}
else{
continue;
}
}
if(flag==1) return true;
else return false;
}
}; |
#!/usr/bin/env bash
# Copyright 2018 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script automates deployment of a federation - as documented in
# the README - to the current kubectl context. It also joins the
# hosting cluster as a member of the federation.
#
# WARNING: The service account for the kubefed namespace will be
# granted the cluster-admin role. Until more restrictive permissions
# are used, access to the kubefed namespace should be restricted to
# trusted users.
#
# If using minikube, a cluster must be started prior to invoking this
# script:
#
# $ minikube start
#
# This script depends on kubectl and kubebuilder being installed in
# the path. If you want to install Federation via helm chart, you may
# also need to install helm in the path. These and other test binaries
# can be installed via the download-binaries.sh script, which downloads
# to ./bin:
#
# $ ./scripts/download-binaries.sh
# $ export PATH=$(pwd)/bin:${PATH}
#
# To redeploy federation from scratch, prefix the deploy invocation with the deletion script:
#
# # WARNING: The deletion script will remove federation data
# $ ./scripts/delete-federation.sh [join-cluster]... && ./scripts/deploy-federation.sh <image> [join-cluster]...
#
set -o errexit
set -o nounset
set -o pipefail
source "$(dirname "${BASH_SOURCE}")/util.sh"
function deploy-with-helm() {
# Don't install tiller if we already have a working install.
if ! helm version --server 2>/dev/null; then
# RBAC should be enabled to avoid CI fail because CI K8s uses RBAC for Tiller
cat <<EOF | kubectl apply -f -
apiVersion: v1
kind: ServiceAccount
metadata:
name: tiller
namespace: kube-system
---
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
metadata:
name: tiller
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: cluster-admin
subjects:
- kind: ServiceAccount
name: tiller
namespace: kube-system
EOF
helm init --service-account tiller
util::wait-for-condition "Tiller to become ready" "helm version --server &> /dev/null" 120
fi
local repository=${IMAGE_NAME%/*}
local image_tag=${IMAGE_NAME##*/}
local image=${image_tag%:*}
local tag=${image_tag#*:}
local cmd
if [[ "${NAMESPACED}" ]]; then
cmd="$(helm-deploy-cmd kubefed-${NS} ${NS} ${repository} ${image} ${tag})"
cmd="${cmd} --set global.scope=Namespaced"
else
cmd="$(helm-deploy-cmd kubefed ${NS} ${repository} ${image} ${tag})"
fi
if [[ "${IMAGE_PULL_POLICY:-}" ]]; then
cmd="${cmd} --set controllermanager.imagePullPolicy=${IMAGE_PULL_POLICY}"
fi
${cmd}
}
function helm-deploy-cmd {
# Required arguments
local name="${1}"
local ns="${2}"
local repo="${3}"
local image="${4}"
local tag="${5}"
echo "helm install charts/kubefed --name ${name} --namespace ${ns} \
--set controllermanager.repository=${repo} --set controllermanager.image=${image} \
--set controllermanager.tag=${tag}"
}
NS="${KUBEFED_NAMESPACE:-kube-federation-system}"
IMAGE_NAME="${1:-}"
NAMESPACED="${NAMESPACED:-}"
LATEST_IMAGE_NAME=quay.io/kubernetes-multicluster/kubefed:latest
if [[ "${IMAGE_NAME}" == "$LATEST_IMAGE_NAME" ]]; then
USE_LATEST=y
else
USE_LATEST=
fi
KF_NS_ARGS="--kubefed-namespace=${NS} "
if [[ -z "${IMAGE_NAME}" ]]; then
>&2 echo "Usage: $0 <image> [join-cluster]...
<image> should be in the form <containerregistry>/<username>/<imagename>:<tagname>
Example: docker.io/<username>/kubefed:test
If intending to use the docker hub as the container registry to push
the federation image to, make sure to login to the local docker daemon
to ensure credentials are available for push:
$ docker login --username <username>
<join-cluster> should be the kubeconfig context name for the additional cluster to join.
NOTE: The host cluster is already included in the join.
"
exit 2
fi
shift
# Allow for no specific JOIN_CLUSTERS: they probably want to kubefedctl themselves.
JOIN_CLUSTERS="${*-}"
# Use DOCKER_PUSH= ./scripts/deploy-federation.sh <image> to skip docker
# push on container image when not using latest image.
DOCKER_PUSH="${DOCKER_PUSH:-y}"
DOCKER_PUSH_CMD="docker push ${IMAGE_NAME}"
if [[ ! "${DOCKER_PUSH}" ]]; then
DOCKER_PUSH_CMD=
fi
# Build federation binaries and image
if [[ ! "${USE_LATEST}" ]]; then
cd "$(dirname "$0")/.."
make container IMAGE_NAME=${IMAGE_NAME}
cd -
${DOCKER_PUSH_CMD}
fi
cd "$(dirname "$0")/.."
make kubefedctl
cd -
if ! kubectl get ns "${NS}" > /dev/null 2>&1; then
kubectl create ns "${NS}"
fi
# Deploy federation resources
deploy-with-helm
# Join the host cluster
CONTEXT="$(kubectl config current-context)"
./bin/kubefedctl join "${CONTEXT}" --host-cluster-context "${CONTEXT}" --v=2 ${KF_NS_ARGS}
for c in ${JOIN_CLUSTERS}; do
./bin/kubefedctl join "${c}" --host-cluster-context "${CONTEXT}" --v=2 ${KF_NS_ARGS}
done
|
def on_line(points):
x1, y1 = points[0]
x2, y2 = points[1]
x3, y3 = points[2]
x4, y4 = points[3]
# Calculating line equation
a = x1 * (y2 - y3) + x2 * (y3 - y1) + x3 * (y1 - y2)
# Slope of line
b = (x1**2 + y1**2) * (y3 - y2) + (x2**2 + y2**2) * (y1 - y3) + (x3**2 + y3**2) * (y2 - y1)
# Constant of line
c = (x1**2 + y1**2) * (x2 - x3) + (x2**2 + y2**2) * (x3 - x1) + (x3**2 + y3**2) * (x1 - x2)
# Checking the fourth point
d = a * x4 + b * y4 + c
if (d == 0):
return True
else:
return False |
#!/usr/bin/env sh
python3 -m unittest discover tests
./polyprofile.py --config cfg/server_config.yaml --serverInfo --destination .
|
/* eslint no-unused-vars: "off" */
const Discord = require('discord.js');
const ytdl = require('ytdl-core');
const YouTube = require('simple-youtube-api');
const urlCheck = require('is-playlist');
const youtube = new YouTube(process.env.API_TOKEN);
module.exports = {
name: 'play',
category: 'Music',
usage: '<video name | video/playlist url>',
aliases: ['search', 'music', 'p'],
guildOnly: true,
args: true,
description: 'Play music from youtube by stalker bot',
execute(message, _args, _client, options) {
process.setMaxListeners(0);
let Songs = options.songQ.get(message.guild.id) || {};
let Status = options.isplay.get(message.guild.id) || {};
let BackSongs = options.backQ.get(message.guild.id) || {};
let isPlaylist = false;
if (!Songs.Queue) Songs.Queue = [];
if (!BackSongs.Queue) BackSongs.Queue = [];
if (!Status.Playing) Status.Playing = false;
options.isplay.set(message.guild.id, Status);
const { voiceChannel } = message.member;
if (urlCheck(`${_args}`.split(',').join(' ')) == false) {
youtube.searchVideos(_args, 1)
.then(results => {
if (!voiceChannel) {
return message.reply('please join a voice channel first!');
}
const permissions = voiceChannel.permissionsFor(message.client.user);
if (!permissions.has('CONNECT')) {
return message.channel.send('I cannot connect to your voice channel, make sure I have the proper permissions!');
}
if (!permissions.has('SPEAK')) {
return message.channel.send('I cannot speak in this voice channel, make sure I have the proper permissions!');
}
const ytEmbed = new Discord.RichEmbed()
.setAuthor('Stalker Music', 'https://i.imgur.com/Xr28Jxy.png')
.setColor('#7f1515')
.addField('Title', `** [${results[0].title}](https://youtu.be/${results[0].id}) **`, false)
.addField('Channel', results[0].channel.title, true)
.addField('Queue Position', `**${Songs.Queue.length == 0 ? 'Now Playing' : Songs.Queue.length}**`, true)
.setImage(results[0].thumbnails.high.url)
.setTimestamp()
.setFooter('Powered by Stalker bot', 'https://i.imgur.com/Xr28Jxy.png');
if (Status.Playing == true) {
addToQueue(results);
}
else {
addToQueue(results);
play();
}
message.channel.send(ytEmbed);
})
.catch(console.log);
}
else {
isPlaylist = true;
youtube.getPlaylist(_args[0])
.then(playlist => {
const ytEmbed = new Discord.RichEmbed()
.setAuthor('Stalker Music', 'https://i.imgur.com/Xr28Jxy.png')
.setColor('#7f1515')
.addField('Requested playlist: ', `** [${playlist.title}](${playlist.url}) ** :notes:`, false)
.addField('Publish Date', `** ${playlist.publishedAt.getUTCFullYear()} **`, false)
.addField('By', `** ${playlist.channel.title} **`)
.setTimestamp()
.setFooter('Powered by Stalker bot', 'https://i.imgur.com/Xr28Jxy.png');
message.channel.send(ytEmbed);
const playtitle = playlist.title;
playlist.getVideos()
.then(videos => {
const playlen = videos.length;
if (Status.Playing == true) {
addToQueue(videos, playlen, playtitle);
}
else {
addToQueue(videos, playlen, playtitle);
play();
}
})
.catch(console.log);
})
.catch(console.log);
}
function addToQueue(_videos, _plNumber, _plTitle) {
const ytEmbed = new Discord.RichEmbed()
.setAuthor('Stalker Music', 'https://i.imgur.com/Xr28Jxy.png')
.setColor('#7f1515')
.addField(':white_check_mark: Added: ', `**${isPlaylist ? `${_plTitle} with ${_plNumber} ` : Songs.Queue.length + 1}** songs are now in the queue :notes: :notes:`, true)
.setTimestamp()
.setFooter('Powered by Stalker bot', 'https://i.imgur.com/Xr28Jxy.png');
if (Status.Playing == false) Status.Playing = true;
options.isplay.set(message.guild.id, Status);
_videos.forEach(video => {
Songs.Queue.push({
'url': `https://www.youtube.com/watch?v=${video.id}`,
'requestby': message.author.id,
'songName': video.title,
});
options.songQ.set(message.guild.id, Songs);
});
message.channel.send(ytEmbed);
}
function play() {
Songs = options.songQ.get(message.guild.id);
let currentplay = (Songs !== undefined && Songs !== null) ? Songs.Queue[0] : null;
voiceChannel.join().then(connection => {
if (currentplay !== null) {
let stream = ytdl(currentplay.url, {
filter: 'audioonly',
});
let dispatcher = connection.playStream(stream);
dispatcher.on('end', () => {
if(BackSongs.Queue.some(s => s.url !== currentplay.url) || BackSongs.Queue.length <= 0) {
BackSongs.Queue.unshift(currentplay);
if(BackSongs.Queue.length > 5) BackSongs.Queue.length = 5;
options.backQ.set(message.guild.id, BackSongs);
}
Songs.Queue.shift();
if (Songs.Queue.length > 0) {
currentplay = Songs.Queue[0];
dispatcher = null;
play();
}
else {
dispatcher = null;
Status.Playing = false;
options.isplay.set(message.guild.id, Status);
options.songQ.delete(message.guild.id);
message.channel.send('** There are no more songs in the queue, I\'ll wait for more songs or you can use the "leave" command to leave the channel. **');
}
});
}
});
}
},
};
|
<reponame>addcolouragency/craft_storefront
import { Repository } from "typeorm";
import { ShippingProfile } from "../models/shipping-profile";
export declare class ShippingProfileRepository extends Repository<ShippingProfile> {
}
|
import { expect } from 'chai';
import { shallowMount } from '@vue/test-utils';
import Header from '@/components/Header.vue';
describe('Header.vue', () => {
it('mounts and creates the nav element', () => {
const wrapper = shallowMount(Header);
expect(wrapper.find('nav').exists()).to.be.true;
});
});
|
<reponame>oliverselinger/failsafe-executor
package os.failsafe.executor;
import java.sql.Connection;
/**
* This is a functional interface which represents a lambda, accepting a connection and a parameter.
*/
public interface TransactionalTaskFunction<T> {
void accept(Connection connection, T param) throws Exception;
}
|
<filename>src/main/java/io/github/theindifferent/completionresult/ResultError.java
/*
* BSD 3-Clause License
*
* Copyright (c) 2018, Stanislav "The Indifferent" Baiduzhyi
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package io.github.theindifferent.completionresult;
import java.util.Objects;
import org.checkerframework.checker.nullness.qual.NonNull;
import org.checkerframework.checker.nullness.qual.Nullable;
class ResultError<V, E extends Enum<E>> implements Result<V, E> {
private final E error;
ResultError(final E error) {
this.error = error;
}
@NonNull
@Override
public V value() {
throw new IllegalStateException("Error result does not have value");
}
@NonNull
@Override
public E error() {
return error;
}
@Override
public boolean isValue() {
return false;
}
@Override
public boolean isError() {
return true;
}
@Override
public boolean equals(@Nullable final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final ResultError<?, ?> that = (ResultError<?, ?>) o;
return Objects.equals(error, that.error);
}
@Override
public int hashCode() {
return Objects.hash(error);
}
@Override
public String toString() {
return "Result{"
+ "error="
+ error
+ '}';
}
}
|
#!/usr/bin/env bash
set -Eeuo pipefail
# usage: mkdir -p output && ./run-script.sh ./examples/debian.sh output ...
thisDir="$(readlink -f "$BASH_SOURCE")"
thisDir="$(dirname "$thisDir")"
source "$thisDir/scripts/.constants.sh" \
--flags 'image:' \
--flags 'no-bind' \
--flags 'no-build' \
-- \
'[--image=foo/bar:baz] [--no-build] [--no-bind] [script/command]' \
'./examples/debian.sh output stretch 2017-05-08T00:00:00Z
--no-build --image=debuerreotype:ubuntu ./examples/ubuntu.sh output xenial'
eval "$dgetopt"
image=
build=1
bindMount=1
while true; do
flag="$1"; shift
dgetopt-case "$flag"
case "$flag" in
--image) image="$1"; shift ;;
--no-bind) bindMount= ;;
--no-build) build= ;;
--) break ;;
*) eusage "unknown flag '$flag'" ;;
esac
done
if [ -z "$image" ]; then
image="$("$thisDir/.docker-image.sh")"
fi
if [ -n "$build" ]; then
docker build --tag "$image" "$thisDir"
fi
args=(
--hostname debuerreotype
--init
--interactive
--rm
# we ought to be able to mount/unshare
--cap-add SYS_ADMIN
# make sure we don't get extended attributes
--cap-drop SETFCAP
# AppArmor also blocks mount/unshare :)
--security-opt apparmor=unconfined
# --debian-eol potato wants to run "chroot ... mount ... /proc" which gets blocked (i386, ancient binaries, blah blah blah)
--security-opt seccomp=unconfined
# (other arches see this occasionally too)
--tmpfs /tmp:dev,exec,suid,noatime
--env TMPDIR=/tmp
--workdir /workdir
)
if [ -n "$bindMount" ]; then
args+=( --mount "type=bind,src=$PWD,dst=/workdir" )
else
args+=( --volume /workdir )
fi
if [ -t 0 ] && [ -t 1 ]; then
args+=( --tty )
fi
exec docker run "${args[@]}" "$image" "$@"
|
<gh_stars>100-1000
var searchData=
[
['page_5f4k',['PAGE_4k',['../core__ca_8h.html#gab184b824a6d7cb728bd46c6abcd0c21aa99ce0ce05e9c418dc6bddcc47b2fa05a',1,'core_ca.h']]],
['page_5f64k',['PAGE_64k',['../core__ca_8h.html#gab184b824a6d7cb728bd46c6abcd0c21aafc53512bbf834739fcb97ad1c0f444fc',1,'core_ca.h']]],
['privtimer_5firqn',['PrivTimer_IRQn',['../ARMCA9_8h.html#a7e1129cd8a196f4284d41db3e82ad5c8a116d3d8a9fcc5fef99becc9d25a56249',1,'ARMCA9.h']]],
['privwatchdog_5firqn',['PrivWatchdog_IRQn',['../ARMCA9_8h.html#a7e1129cd8a196f4284d41db3e82ad5c8aeb45b2fc32150bf94ecf305ee223f28f',1,'ARMCA9.h']]]
];
|
#!/bin/bash
bootstrap_dnf() {
systemctl enable postfix.service
systemctl start postfix.service
}
group_repo_post() {
# Nothing to do for EL
:
}
distro_custom() {
# install avocado
dnf -y install python3-avocado{,-plugins-{output-html,varianter-yaml-to-mux}} \
clustershell
# New Rocky images don't have debuginfo baked into them
if ! dnf --enablerepo=\*-debuginfo repolist 2>/dev/null | grep -e -debuginfo; then
if [ "$(lsb_release -s -i)" = "Rocky" ]; then
# Need to remove the upstream [debuginfo] repos
# But need to have the files present so that re-installation is blocked
for file in /etc/yum.repos.d/{Rocky-Debuginfo,epel{,{,-testing}-modular}}.repo; do
true > $file
done
# add local debuginfo repos
if [ -f /etc/yum.repos.d/daos_ci-rocky8-artifactory.repo ]; then
echo >> /etc/yum.repos.d/daos_ci-rocky8-artifactory.repo
fi
cat <<EOF >> /etc/yum.repos.d/daos_ci-rocky8-artifactory.repo
[daos_ci-rocky8-base-artifactory-debuginfo]
name=daos_ci-rocky8-base-artifactory-debuginfo
baseurl=${ARTIFACTORY_URL}artifactory/rocky-\$releasever-proxy/BaseOS/\$arch/debug/tree/
enabled=0
gpgcheck=1
gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-rockyofficial
[daos_ci-rocky8-appstream-artifactory-debuginfo]
name=daos_ci-rocky8-appstream-artifactory-debuginfo
baseurl=${ARTIFACTORY_URL}artifactory/rocky-\$releasever-proxy/AppStream/\$arch/debug/tree/
enabled=0
gpgcheck=1
gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-rockyofficial
[daos_ci-rocky8-powertools-artifactory-debuginfo]
name=daos_ci-rocky8-powertools-artifactory-debuginfo
baseurl=${ARTIFACTORY_URL}artifactory/rocky-\$releasever-proxy/PowerTools/\$arch/debug/tree/
enabled=0
gpgcheck=1
gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-rockyofficial
[daos_ci-rocky8-extras-artifactory-debuginfo]
name=daos_ci-rocky8-extras-artifactory-debuginfo
baseurl=${ARTIFACTORY_URL}artifactory/rocky-\$releasever-proxy/extras/\$arch/debug/tree/
enabled=0
gpgcheck=1
gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-rockyofficial
EOF
else
if [ -f /etc/yum.repos.d/daos_ci-centos8.repo ]; then
echo >> /etc/yum.repos.d/daos_ci-centos8.repo
fi
cat <<EOF >> /etc/yum.repos.d/daos_ci-centos8.repo
[daos_ci-centos8-artifactory-debuginfo]
name=daos_ci-centos8-artifactory-debuginfo
baseurl=${ARTIFACTORY_URL}artifactory/centos-debuginfo-proxy/\$releasever/\$basearch/
enabled=0
gpgcheck=1
gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-centosofficial
EOF
fi
fi
# Mellanox OFED hack
if ls -d /usr/mpi/gcc/openmpi-*; then
version="$(rpm -q --qf "%{version}" openmpi)"
mkdir -p /etc/modulefiles/mpi/
cat <<EOF > /etc/modulefiles/mpi/mlnx_openmpi-x86_64
#%Module 1.0
#
# OpenMPI module for use with 'environment-modules' package:
#
conflict mpi
prepend-path PATH /usr/mpi/gcc/openmpi-${version}/bin
prepend-path LD_LIBRARY_PATH /usr/mpi/gcc/openmpi-${version}/lib64
prepend-path PKG_CONFIG_PATH /usr/mpi/gcc/openmpi-${version}/lib64/pkgconfig
prepend-path MANPATH /usr/mpi/gcc/openmpi-${version}/share/man
setenv MPI_BIN /usr/mpi/gcc/openmpi-${version}/bin
setenv MPI_SYSCONFIG /usr/mpi/gcc/openmpi-${version}/etc
setenv MPI_FORTRAN_MOD_DIR /usr/mpi/gcc/openmpi-${version}/lib64
setenv MPI_INCLUDE /usr/mpi/gcc/openmpi-${version}/include
setenv MPI_LIB /usr/mpi/gcc/openmpi-${version}/lib64
setenv MPI_MAN /usr/mpi/gcc/openmpi-${version}/share/man
setenv MPI_COMPILER openmpi-x86_64
setenv MPI_SUFFIX _openmpi
setenv MPI_HOME /usr/mpi/gcc/openmpi-${version}
EOF
fi
}
|
import React, { Component } from 'react';
import PlayerAlbumArt from './album-art';
import PlayerTrack from './player-track';
import PlayerArtists from './artists';
import PlayerBackgroundAlbumArt from './background-album-art';
import Slider from '../slider';
export default class Player extends Component {
constructor() {
super()
this.state = {};
}
handleCroppingChange = ([beginPercentage, endPercentage]) => {
window.Demo.WebPlaybackSDK.getCurrentState().then(res => {
const { duration } = res
const snippetBeginning = Math.floor((duration / 100) * beginPercentage)
const snippetEnding = Math.floor((duration / 100) * endPercentage)
this.setState({ snippetBeginning, snippetEnding });
window.Demo.WebPlaybackSDK.seek(snippetBeginning).then(res => {
window.Demo.WebPlaybackSDK.resume();
});
});
}
restartSong = () => {
window.Demo.WebPlaybackSDK.seek(this.state.snippetBeginning).then(res => {
window.Demo.WebPlaybackSDK.resume();
});
}
componentDidMount () {
setInterval(() => {
window.Demo.WebPlaybackSDK.getCurrentState().then(playState => {
if(playState && this.state.snippetEnding && (playState.position >= this.state.snippetEnding || playState.position >= playState.duration)) {
this.restartSong()
this.setState(state => ({
playState
}))
}
}).catch(e => console.error(e))
}, 1000)
}
render() {
let { current_track } = this.props.currentState.track_window;
let image = current_track.album.images[2];
return (
<div className="screen screen-player">
<div className="player">
<div className="row">
<div className="col-sm-12">
<PlayerAlbumArt image_url={image.url} />
<PlayerTrack track={current_track} />
<PlayerArtists artists={current_track.artists} />
<Slider handleChange={this.handleCroppingChange} />
</div>
</div>
</div>
</div>
);
}
}; |
import {AsyncSocketConnection} from "../AsyncSocketConnection";
import {uuid} from "uuidv4";
import {Room} from "./Room";
import {IPlayerData} from "../../_types/game/IPlayerData";
import {AnswerCard} from "./cards/AnswerCard";
import {withErrorHandling} from "../services/withErrorHandling";
export class Player {
protected ID: string = uuid();
protected socket: AsyncSocketConnection;
// The name the player chose for him/her self
protected name: string = "Guest" + Math.floor(Math.random() * 1e3);
// The room this player is currently in
protected room: Room | null;
protected score = 0;
protected hand = [] as AnswerCard[];
protected selection = [] as AnswerCard[];
/**
* Creates a new player from a socket
* @param socket The socket to create a player for
*/
public constructor(socket: AsyncSocketConnection) {
this.socket = socket;
this.initSocketListener();
}
// Setup
/**
* Listens for socket events affecting the player's properties
*/
protected initSocketListener() {
this.socket.on(`players/${this.ID}/setName`, (name: string) =>
withErrorHandling(() => {
this.setName(name);
return {success: true};
})
);
this.socket.on(`players/${this.ID}/setSelection`, (selection: string[]) =>
withErrorHandling(() => {
this.setSelection(selection);
return {success: true};
})
);
// Make sure the player can request their own data
this.share(this);
}
/**
* Sets up listeners such that this player data can be retrieved by the other player
* @param player The other player to share the data with
*/
public share(player: Player): void {
player.getSocket().on(
`players/${this.ID}/retrieve`,
(): IPlayerData =>
withErrorHandling(() => ({
ID: this.ID,
name: this.name,
score: this.score,
selection: this.selection.map(card => card.getText()),
hand:
player == this
? this.hand.map(card => card.getText())
: undefined,
})),
this.ID
);
}
/**
* Removes all the listeners from the given player related to this player
* @param player The player to remove the listeners from
*/
public unshare(player: Player): void {
player.getSocket().off(`players/${this.ID}/retrieve`, this.ID);
}
// Getters
/**
* Retrieves the socket that can be used for communication with this player
* @returns THe socket
*/
public getSocket(): AsyncSocketConnection {
return this.socket;
}
/**
* Retrieves the identifier of this player
* @returns The identifier
*/
public getID(): string {
return this.ID;
}
/**
* Retrieves the current name of this player
* @returns The name
*/
public getName(): string {
return this.name;
}
/**
* Retrieves the score of this player
* @returns The score
*/
public getScore(): number {
return this.score;
}
/**
* Retrieves the hand of the player
* @returns The hand
*/
public getHand(): AnswerCard[] {
return this.hand;
}
/**
* Retrieves the selection of the player
* @returns The selection
*/
public getSelection(): AnswerCard[] {
return this.selection;
}
/**
* Retrieves the room that the player is currently in
* @returns The room
*/
public getRoom(): Room | null {
return this.room;
}
// Setters
/**
* Sets the room the player is now in,
* also removes the player from the previous room if he/she is still connected
* and adds the player to the new room if he/she isn't yet added
* @param room The room that the player was moved to
*/
public setRoom(room: Room | null): void {
if (room == this.room) return;
if (this.room) this.room.removePlayer(this);
this.room = room;
this.setScore(0);
if (this.room) this.room.addPlayer(this);
}
/**
* Sets the name of this player
* @param name The new name of the player
*/
public setName(name: string): void {
this.name = name;
this.broadcast(`players/${this.ID}/setName`, name);
}
/**
* Sets the score of this player
* @param score The new score
*/
public setScore(score: number): void {
this.score = score;
this.broadcast(`players/${this.ID}/setScore`, score);
}
/**
* Sets the selected cards of the player
* @param selection The new selection
*/
public setSelection(selection: string[] | AnswerCard[]): void {
const allCards = [...this.hand, ...this.selection];
if (typeof selection[0] == "string") {
this.selection = (selection as string[])
.map(card => allCards.find(c => c.getText() == card))
.filter(card => card != null) as AnswerCard[];
} else {
this.selection = (selection as AnswerCard[]).filter(card =>
allCards.includes(card)
);
}
this.broadcast(
`players/${this.ID}/setSelection`,
this.selection.map(card => card.getText())
);
// Update the hand to not include the selection
this.setHand(allCards.filter(card => !this.selection.includes(card)));
}
/**
* Clears the selected cards of the player
*/
public clearSelection(): void {
this.selection = [];
this.broadcast(
`players/${this.ID}/setSelection`,
this.selection.map(card => card.getText())
);
}
/**
* Sets the hand of the player
* @param hand The new hand
*/
public setHand(hand: AnswerCard[]): void {
this.hand = hand;
this.getSocket().emit(
`players/${this.ID}/setHand`,
this.hand.map(card => card.getText())
);
}
// Utility
/**
* Broadcasts a message to all players in the room this player is part of
* @param message The message to be broadcasted
* @param args The arguments to supply
*/
protected broadcast<T extends any[]>(message: string, ...args: T): void {
if (this.room) this.room.broadcast(message, ...args);
}
}
|
import { html, LitElement } from 'lit-element/lit-element.js';
import { ButtonMixin } from '../button-mixin.js';
class TestButtonElem extends ButtonMixin(LitElement) {
render() {
return html`
<button>Test Button</button>
`;
}
}
customElements.define('test-button-elem', TestButtonElem);
|
<gh_stars>1-10
import React from 'react';
import { CodeDemo, Api } from '../CommonDispalyComponents';
import './avatar.example.scss';
import CodeDemo1 from './avatar.codeDemo1';
const code1 = require('!!raw-loader!./avatar.codeDemo1.tsx');
import CodeDemo2 from './avatar.codeDemo2';
const code2 = require('!!raw-loader!./avatar.codeDemo2.tsx');
const AvatarExample: React.FunctionComponent = () => {
return (
<div className="avatar-example-page">
<section>
<h1>Avatar 头像</h1>
<p>用来代表用户或事物,支持图片、图标或者字符。</p>
</section>
<section>
<h2>何时使用</h2>
<p>展示用户头像信息。</p>
</section>
<section>
<h2>代码演示</h2>
<div className="code-demonstration">
<div className="code-demo-column">
<CodeDemo
title="基本使用"
content={<p>可设置形状和大小。</p>}
code={code1.default}
>
<CodeDemo1 />
</CodeDemo>
</div>
<div className="code-demo-column">
<CodeDemo
title="自定义类型"
content={
<p>
可设置图片、字符串和<code>icon</code>
,字符串支持自定义颜色和背景色,<code>icon</code>
支持自定义背景色。
</p>
}
code={code2.default}
>
<CodeDemo2 />
</CodeDemo>
</div>
</div>
</section>
<section>
<h2>API</h2>
<Api
data={[
['icon', '设置头像图标的类型', 'string', 'avatar'],
['size', '设置头像的大小', 'number', '32'],
['shape', '设置头像的形状', 'circle | square', 'circle'],
['src', '设置头像的图片地址', 'string', '-'],
['alt', '设置图片的 alt', 'string', '-'],
['style', '设置头像的 style', 'React.CSSProperties', '-'],
['children', '设置文字类型图像', 'string', '-']
]}
/>
</section>
</div>
);
};
export default AvatarExample;
|
<gh_stars>0
package simulator.fitness;
import simulator.Node;
public class ImprovedFitnessCalculator extends AbstractFitnessCalculator {
@Override
public void updateFitnessForNode(Node n) {
double current_fittness_value = n.getEs().getEnergy_spent_sad_from_last_update()-(n.getEs().getEnergy_spent_cooperator_from_last_update()+n.getEs().getEnergy_spent_defector_from_last_update());
n.addFitnessValue(current_fittness_value);
}
@Override
public String toString() {
return "improved";
}
}
|
#!/bin/sh
test -f /data/.shadow/.etc/wpa_supplicant.enabled -a -f /data/.shadow/.etc/wpa_supplicant.conf
|
def generatePolicy(principalId, userDetails, effect, methodArn):
policy = {
'principalId': principalId,
'policyDocument': {
'Version': '2012-10-17',
'Statement': [
{
'Action': 'execute-api:Invoke',
'Effect': effect,
'Resource': methodArn
}
]
}
}
return policy |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alipay.sofa.ark.spi.service;
/**
* {@code PriorityOrdered} is an interface that can be implemented by objects that
* should be ordered.
*
* @author qilong.zql
* @since 0.4.0
*/
public interface PriorityOrdered {
/**
* Useful constant for the highest precedence value.
* @see java.lang.Integer#MIN_VALUE
*/
int HIGHEST_PRECEDENCE = Integer.MIN_VALUE;
/**
* Useful constant for the lowest precedence value.
* @see java.lang.Integer#MAX_VALUE
*/
int LOWEST_PRECEDENCE = Integer.MAX_VALUE;
/**
* Default priority
*/
int DEFAULT_PRECEDENCE = 100;
/**
* Get the order value of this object. Higher values are interpreted as lower
* priority. As a consequence, the object with the lowest value has the highest
* priority.
* @return
*/
int getPriority();
} |
<reponame>afilippov-ua/data-validation-tool
/*
* Copyright 2018-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.filippov.data.validation.tool.controller;
import com.filippov.data.validation.tool.dto.ColumnPairDto;
import com.filippov.data.validation.tool.dto.TablePairDto;
import com.filippov.data.validation.tool.dto.datasource.DatasourceDefinitionDto;
import com.filippov.data.validation.tool.dto.workspace.WorkspaceDto;
import com.filippov.data.validation.tool.dto.workspace.WorkspaceMetadataDto;
import org.junit.jupiter.api.Test;
import org.springframework.core.ParameterizedTypeReference;
import org.springframework.http.HttpMethod;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import java.util.List;
import java.util.Map;
import static com.filippov.data.validation.tool.model.datasource.DatasourceType.TEST_IN_MEMORY_DATASOURCE;
import static java.util.Arrays.asList;
import static java.util.Collections.singletonList;
import static java.util.stream.Collectors.toList;
import static org.assertj.core.api.Assertions.assertThat;
import static org.springframework.http.HttpHeaders.CONTENT_TYPE;
import static org.springframework.http.MediaType.APPLICATION_JSON_VALUE;
import static org.springframework.http.MediaType.TEXT_PLAIN_VALUE;
public class WorkspaceControllerTest extends AbstractDataValidationApplicationTest {
private static final WorkspaceDto TEST_WORKSPACE_1 = WorkspaceDto.builder()
.name("test-workspace-name-1")
.left(DatasourceDefinitionDto.builder()
.datasourceType(TEST_IN_MEMORY_DATASOURCE)
.maxConnections(1)
.configParams(Map.of("relationType", "LEFT"))
.build())
.right(DatasourceDefinitionDto.builder()
.datasourceType(TEST_IN_MEMORY_DATASOURCE)
.maxConnections(2)
.configParams(Map.of("relationType", "RIGHT"))
.build())
.build();
private static final WorkspaceDto TEST_WORKSPACE_2 = WorkspaceDto.builder()
.name("test-workspace-name-2")
.left(DatasourceDefinitionDto.builder()
.datasourceType(TEST_IN_MEMORY_DATASOURCE)
.maxConnections(1)
.configParams(Map.of("relationType", "LEFT"))
.build())
.right(DatasourceDefinitionDto.builder()
.datasourceType(TEST_IN_MEMORY_DATASOURCE)
.maxConnections(2)
.configParams(Map.of("relationType", "RIGHT"))
.build())
.build();
@Test
public void getWorkspacesTest() {
final String workspaceId_1 = createWorkspace(TEST_WORKSPACE_1).getBody();
final String workspaceId_2 = createWorkspace(TEST_WORKSPACE_2).getBody();
final ParameterizedTypeReference<List<WorkspaceDto>> responseType = new ParameterizedTypeReference<>() {
};
final ResponseEntity<List<WorkspaceDto>> response =
restTemplate.exchange(buildUrl(WORKSPACES_PATH), HttpMethod.GET, null, responseType);
assertThat(response).isNotNull();
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.OK);
assertThat(response.getHeaders().containsKey(CONTENT_TYPE)).isTrue();
assertThat(response.getHeaders().get(CONTENT_TYPE)).isEqualTo(singletonList(APPLICATION_JSON_VALUE));
assertThat(response.getBody()).isNotEmpty().hasSize(2);
deleteWorkspace(workspaceId_1);
deleteWorkspace(workspaceId_2);
}
@Test
public void createAndGetWorkspaceTest() {
final ResponseEntity<String> response = createWorkspace(TEST_WORKSPACE_1);
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.OK);
assertThat(response.getHeaders().containsKey(CONTENT_TYPE)).isTrue();
assertThat(response.getHeaders().get(CONTENT_TYPE).stream()
.anyMatch(val -> val.contains(TEXT_PLAIN_VALUE))).isTrue();
final String workspaceId = response.getBody();
assertThat(workspaceId).isNotEmpty();
final ResponseEntity<WorkspaceDto> fetchedResponse = getWorkspace(workspaceId);
assertThat(response).isNotNull();
assertThat(fetchedResponse.getStatusCode()).isEqualTo(HttpStatus.OK);
assertThat(fetchedResponse.getHeaders().containsKey(CONTENT_TYPE)).isTrue();
assertThat(fetchedResponse.getHeaders().get(CONTENT_TYPE)).isEqualTo(singletonList(APPLICATION_JSON_VALUE));
final WorkspaceDto fetchedWorkspace = fetchedResponse.getBody();
assertThat(fetchedWorkspace).isNotNull();
assertThat(fetchedWorkspace.getId()).isEqualTo(workspaceId);
assertThat(fetchedWorkspace.getName()).isEqualTo(TEST_WORKSPACE_1.getName());
assertThat(fetchedWorkspace.getLeft()).isEqualTo(TEST_WORKSPACE_1.getLeft());
assertThat(fetchedWorkspace.getRight()).isEqualTo(TEST_WORKSPACE_1.getRight());
deleteWorkspace(workspaceId);
}
@Test
public void deleteWorkspaceTest() {
final String workspaceId = createWorkspace(TEST_WORKSPACE_1).getBody();
assertThat(workspaceId).isNotEmpty();
final ResponseEntity<String> response = deleteWorkspace(workspaceId);
assertThat(response).isNotNull();
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.OK);
}
@Test
public void getMetadataTest() {
final String workspaceId = createWorkspace(TEST_WORKSPACE_1).getBody();
assertThat(workspaceId).isNotEmpty();
final ResponseEntity<WorkspaceMetadataDto> response = restTemplate.exchange(
buildUrl(WORKSPACES_PATH + "/" + workspaceId + "/metadata"),
HttpMethod.GET,
null,
WorkspaceMetadataDto.class);
assertThat(response).isNotNull();
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.OK);
assertThat(response.getHeaders().containsKey(CONTENT_TYPE)).isTrue();
assertThat(response.getHeaders().get(CONTENT_TYPE)).isEqualTo(singletonList(APPLICATION_JSON_VALUE));
final WorkspaceMetadataDto metadata = response.getBody();
assertThat(metadata).isNotNull();
assertThat(metadata.getTablePairs()).isNotEmpty();
assertThat(metadata.getTablePairs().keySet().stream().sorted().collect(toList()))
.isEqualTo(asList("departments", "users"));
assertThat(metadata.getColumnPairs()).isNotEmpty();
assertThat(metadata.getColumnPairs().get("departments").keySet().stream().sorted().collect(toList()))
.isEqualTo(asList("id", "name", "number_of_employees"));
assertThat(metadata.getColumnPairs().get("users").keySet().stream().sorted().collect(toList()))
.isEqualTo(asList("id", "password", "username"));
deleteWorkspace(workspaceId);
}
@Test
public void getTablePairsTest() {
final String workspaceId = createWorkspace(TEST_WORKSPACE_1).getBody();
assertThat(workspaceId).isNotEmpty();
final ParameterizedTypeReference<List<TablePairDto>> responseType = new ParameterizedTypeReference<>() {
};
final ResponseEntity<List<TablePairDto>> response = restTemplate.exchange(
buildUrl(WORKSPACES_PATH + "/" + workspaceId + "/metadata/tablePairs"),
HttpMethod.GET,
null,
responseType);
assertThat(response).isNotNull();
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.OK);
assertThat(response.getHeaders().containsKey(CONTENT_TYPE)).isTrue();
assertThat(response.getHeaders().get(CONTENT_TYPE)).isEqualTo(singletonList(APPLICATION_JSON_VALUE));
final List<TablePairDto> tablePairs = response.getBody();
assertThat(tablePairs).isNotNull();
assertThat(tablePairs.stream().map(TablePairDto::getName).sorted().collect(toList()))
.isEqualTo(asList("departments", "users"));
deleteWorkspace(workspaceId);
}
@Test
public void getColumnPairsTest() {
final String workspaceId = createWorkspace(TEST_WORKSPACE_1).getBody();
assertThat(workspaceId).isNotEmpty();
final ParameterizedTypeReference<List<ColumnPairDto>> responseType = new ParameterizedTypeReference<>() {
};
final ResponseEntity<List<ColumnPairDto>> response = restTemplate.exchange(
buildUrl(WORKSPACES_PATH + "/" + workspaceId + "/metadata/tablePairs/departments/columnPairs"),
HttpMethod.GET,
null,
responseType);
assertThat(response).isNotNull();
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.OK);
assertThat(response.getHeaders().containsKey(CONTENT_TYPE)).isTrue();
assertThat(response.getHeaders().get(CONTENT_TYPE)).isEqualTo(singletonList(APPLICATION_JSON_VALUE));
final List<ColumnPairDto> tablePairs = response.getBody();
assertThat(tablePairs).isNotNull();
assertThat(tablePairs.stream().map(ColumnPairDto::getName).sorted().collect(toList()))
.isEqualTo(asList("id", "name", "number_of_employees"));
deleteWorkspace(workspaceId);
}
}
|
<filename>lib/ftp4j-1.7.2/src/it/sauronsoftware/ftp4j/NVTASCIIReader.java
/*
* ftp4j - A pure Java FTP client library
*
* Copyright (C) 2008-2010 <NAME> (www.sauronsoftware.it)
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License version
* 2.1, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License 2.1 for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License version 2.1 along with this program.
* If not, see <http://www.gnu.org/licenses/>.
*/
package it.sauronsoftware.ftp4j;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
/**
* This is an NVT-ASCII character stream reader.
*
* @author <NAME>
* @version 1.1
*/
class NVTASCIIReader extends Reader {
/**
* This system line separator chars sequence.
*/
private static final String SYSTEM_LINE_SEPARATOR = System
.getProperty("line.separator");
/**
* The wrapped stream.
*/
private InputStream stream;
/**
* The underlying reader.
*/
private Reader reader;
/**
* Builds the reader.
*
* @param stream
* The underlying stream.
* @param charsetName
* The name of a supported charset.
* @throws IOException
* If an I/O error occurs.
*/
public NVTASCIIReader(InputStream stream, String charsetName)
throws IOException {
this.stream = stream;
reader = new InputStreamReader(stream, charsetName);
}
public void close() throws IOException {
synchronized (this) {
reader.close();
}
}
public int read(char[] cbuf, int off, int len) throws IOException {
synchronized (this) {
return reader.read(cbuf, off, len);
}
}
/**
* Changes the current charset.
*
* @param charsetName
* The new charset.
* @throws IOException
* If I/O error occurs.
* @since 1.1
*/
public void changeCharset(String charsetName) throws IOException {
synchronized (this) {
reader = new InputStreamReader(stream, charsetName);
}
}
/**
* Reads a line from the stream.
*
* @return The line read, or null if the end of the stream is reached.
* @throws IOException
* If an I/O error occurs.
*/
public String readLine() throws IOException {
StringBuffer buffer = new StringBuffer();
int previous = -1;
int current = -1;
do {
int i = reader.read();
if (i == -1) {
if (buffer.length() == 0) {
return null;
} else {
return buffer.toString();
}
}
previous = current;
current = i;
if (/* previous == '\r' && */current == '\n') {
// End of line.
return buffer.toString();
} else if (previous == '\r' && current == 0) {
// Literal new line.
buffer.append(SYSTEM_LINE_SEPARATOR);
} else if (current != 0 && current != '\r') {
buffer.append((char) current);
}
} while (true);
}
}
|
import imaplib
import email
import os
# Source mailbox settings, assuming IMAP
source_mailbox = "<path to source mailbox>"
source_mailbox_username = "<source mailbox username>"
source_mailbox_password = "<source mailbox password>"
# Destination mailbox settings, assuming IMAP
destination_mailbox = "<path to destination mailbox>"
destination_mailbox_username = "<destination mailbox username>"
destination_mailbox_password = "<destination mailbox password>"
# Establish connection to source mailbox
try:
source_imap = imaplib.IMAP4_SSL(source_mailbox)
except imaplib.IMAP4.error:
print('Failed to connect to source mailbox. Exiting...')
sys.exit(-1)
# Establish connection to destination mailbox
try:
destination_imap = imaplib.IMAP4_SSL(destination_mailbox)
except imaplib.IMAP4.error:
print('Failed to connect to destination mailbox. Exiting...')
sys.exit(-1)
# Login to source mailbox
try:
source_imap.login(source_mailbox_username, source_mailbox_password)
except imaplib.IMAP4.error:
print('Failed to login to source mailbox. Exiting...')
sys.exit(-1)
# Login to destination mailbox
try:
destination_imap.login(destination_mailbox_username, destination_mailbox_password)
except imaplib.IMAP4.error:
print('Failed to login to destination mailbox. Exiting...')
sys.exit(-1)
# Establish connection to source mailbox
try:
source_imap.select()
except imaplib.IMAP4.error:
print('Failed to select source mailbox. Exiting...')
sys.exit(-1)
# Establish connection to destination mailbox
try:
destination_imap.select()
except imaplib.IMAP4.error:
print('Failed to select destination mailbox. Exiting...')
sys.exit(-1)
# Read emails from source and copy to destination mailbox
_, messages = source_imap.search(None, 'ALL')
for message in messages[0].split():
_, data = source_imap.fetch(message, '(RFC822)')
msg = email.message_from_bytes(data[0][1])
source_imap.store(message, '+FLAGS', '\\Deleted')
destination_imap.append(
destination_mailbox,
None,
None,
msg.as_bytes())
# Commit changes to source mailbox
source_imap.expunge()
# Log out of source and destination mailbox
source_imap.close()
source_imap.logout()
destination_imap.close()
destination_imap.logout() |
import { URI } from "../../primitives";
import { CaptionDescriptor } from "../images/caption-descriptor";
/**
* Signature/interface for a `GalleryItem` object
* @see https://developer.apple.com/documentation/apple_news/galleryitem
*/
export interface GalleryItem {
URL: URI;
accessibilityCaption?: string;
caption?: CaptionDescriptor | string;
explicitContent?: boolean;
}
|
#!/usr/bin/etc/ bash
set -e
# get or locate the project
# build it as a shared library
cd ./FooBar
sh build.sh
# library is here => ./FooBar/build/libfoobar.so
# copy the .so file to a convinient location
echo
echo "copying raw native library for convinience ... ...."
cp ./build/libfoobar.so ../java_api_build/lib/
# create or locate the SWIG interface to this project
# cmake build a new API linking :
#- SWIG interface
#- project .so
#- source file referenced in the SWIG interface (if any)
cd ../java_api_build
sh build_JAVA_api.sh
# copy API - jar and - so into javasmt
echo
echo "copying library files into JavaSMT ... ...."
cp ./build/FoobarJavaAPI.jar /home/lubuntu/SAHEED/gsoc/CODE/java-smt/lib/
cp ./build/libfoobarapi.so /home/lubuntu/SAHEED/gsoc/CODE/java-smt/lib/native/x86_64-linux/
echo "SUCCESS"
|
<filename>src/main/java/cc/javajobs/buildtools/JavaVersion.java
package cc.javajobs.buildtools;
/**
* An index of Java version names to their respective class versions.
* <p>
* Thanks to: https://en.wikipedia.org/wiki/Java_class_file#General_layout
* </p>
*
* @author <NAME>
* @since 11/07/2021 - 11:39
*/
public enum JavaVersion {
/**
* Java 16 - JDK 16
*/
JAVA_16(60),
/**
* Java 16 - JDK 15
*/
JAVA_15(59),
/**
* Java 14 - JDK 14
*/
JAVA_14(58),
/**
* Java 13 - JDK 13
*/
JAVA_13(57),
/**
* Java 12 - JDK 12
*/
JAVA_12(56),
/**
* Java 11 - JDK 11 (LTS)
*/
JAVA_11(55),
/**
* Java 10 - JDK 10
*/
JAVA_10(54),
/**
* Java 9 - JDK 9
*/
JAVA_9(53),
/**
* Java 1.8 / 8 - JDK 8 / 1.8 (LTS)
*/
JAVA_8(52),
/**
* Unsupported Java Version for this Project.
*/
UNSUPPORTED(-1);
/**
* The Version linked to the Java/JDK version.
*/
private final int versionIndex;
/**
* Constructor to initialise a JavaVersion.
*
* @param index of the version.
* @see JavaVersion
*/
JavaVersion(int index) {
this.versionIndex = index;
}
/**
* Method to obtain the Version Index for the given Java Version.
*
* @return {@link #versionIndex}
*/
public int getVersionIndex() {
return versionIndex;
}
}
|
#!/bin/bash
source /environment.sh
source /opt/ros/noetic/setup.bash
source /code/catkin_ws/devel/setup.bash
source /code/exercise_ws/devel/setup.bash
python3 /code/solution.py &
roslaunch --wait car_interface all.launch veh:=$VEHICLE_NAME &
|
<gh_stars>1-10
from rofl import setUpExperiment, retrieveConfigYaml
import argparse
import sys
def argParser(args):
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description="Parse arguments for Train RoLas Experiments",
epilog="python train.py -a dqn -c dqn_bulldozer")
parser.add_argument(
'--alg', '-a', type=str,
help= 'Name of the algorithm to run')
parser.add_argument(
'--config', '-c', type=str,
help = 'Name of the yaml config file on the scripts folder'
)
parser.add_argument(
'--dm', type=bool, default=True,
help= 'If the experiment should have a dummy manager, this will incur in not saving anyhing'
)
parser.add_argument(
'--cuda', type=bool, default=True,
help= 'Enable trying to use any available CUDA devices'
)
return parser.parse_known_args(args)[0]
if __name__ == '__main__':
args = argParser(sys.argv[:])
configUser = retrieveConfigYaml(args.config)
config, agent, policy, train, manager =\
setUpExperiment(args.alg, configUser, dummyManager = args.dm, cuda = args.cuda)
results = train(config, agent, policy, saver = manager.startSaver())
agent.close()
manager.close()
|
<filename>dist/providers/database/ResourceManager.d.ts
import { CollectionReference } from "@firebase/firestore-types";
import { RAFirebaseOptions } from "../RAFirebaseOptions";
import { IFirebaseWrapper } from "./firebase/IFirebaseWrapper";
import { User } from "@firebase/auth-types";
import { messageTypes } from "../../misc";
export interface IResource {
path: string;
pathAbsolute: string;
collection: CollectionReference;
list: Array<{}>;
}
export declare class ResourceManager {
private fireWrapper;
private options;
private resources;
private db;
constructor(fireWrapper: IFirebaseWrapper, options: RAFirebaseOptions);
GetResource(relativePath: string): IResource;
TryGetResourcePromise(relativePath: string, collectionQuery: messageTypes.CollectionQueryType): Promise<IResource>;
RefreshResource(relativePath: string, collectionQuery: messageTypes.CollectionQueryType): Promise<void>;
GetSingleDoc(relativePath: string, docId: string): Promise<{}>;
private initPath;
private parseFireStoreDocument;
getUserLogin(): Promise<User>;
private isCollectionAccessible;
private removeResource;
private applyQuery;
}
|
#!/bin/bash
#### Download code from GitHub
sudo rsync -r /home/ec2-user/.aws /opt/datavirtuality/
sudo chown -R datavirtuality:datavirtuality /opt/datavirtuality/.aws
|
<gh_stars>10-100
export const { DIP_API_KEY = '', DIP_API_ENDPOINT = 'https://search.dip.bundestag.de' } = process.env;
export const PORT = process.env.PORT ? parseInt(process.env.PORT) : 3101;
export const RATE_LIMIT = process.env.RATE_LIMIT ? parseInt(process.env.RATE_LIMIT) : 1;
|
<gh_stars>0
#include "CashShopBarter.h"
namespace Lunia {
namespace XRated {
namespace Database {
namespace Info {
void CashStampInfo::Serialize(Serializer::IStreamWriter& out) const
{
out.Begin(L"AllM::XRated::Database::Info::CashStampInfo");
out.Write(L"ItemHash", Hash);
out.Write(L"Fee", Fee);
out.Write(L"AutoResponse", AutoResponse);
out.Write(L"MaxAttachedItems", MaxAttachedItems);
out.Write(L"MaxAttachedMoney", MaxAttachedMoney);
out.Write(L"ExpiredDay", ExpiredDay);
out.Write(L"Delay", Delay);
}
void CashStampInfo::Deserialize(Serializer::IStreamReader& in)
{
in.Begin(L"AllM::XRated::Database::Info::CashStampInfo");
in.Read(L"ItemHash", Hash);
in.Read(L"Fee", Fee);//, 0);
in.Read(L"AutoResponse", AutoResponse);//, false);
in.Read(L"MaxAttachedItems", MaxAttachedItems);//, 1);
in.Read(L"MaxAttachedMoney", MaxAttachedMoney);//, 100000);
in.Read(L"ExpiredDay", ExpiredDay);//, 14);
in.Read(L"Delay", Delay);//, 60);
}
/* 3.1 by ultimate */
void BarterItemInfo::Serialize(Serializer::IStreamWriter& out) const
{
out.Begin(L"AllM::XRated::Database::Info::BarterItemInfo");
out.Write(L"BuyItemId", BuyItemId);
out.Write(L"BuyItemPrice", BuyItemPrice);
out.Write(L"NeedItemId", NeedItemId);
out.Write(L"NeedItemCount", NeedItemCount);
}
void BarterItemInfo::Deserialize(Serializer::IStreamReader& in)
{
in.Begin(L"AllM::XRated::Database::Info::BarterItemInfo");
in.Read(L"BuyItemId", BuyItemId);
in.Read(L"BuyItemPrice", BuyItemPrice);
in.Read(L"NeedItemId", NeedItemId);
in.Read(L"NeedItemCount", NeedItemCount);
}
void ShopInfo::Category::Item::Serialize(Serializer::IStreamWriter& out) const
{
out.Begin(L"AllM::XRated::Database::Info::ShopInfo::Category::Item");
out.Write(L"ItemHash", ItemHash);
out.Write(L"BuyCount", BuyCount);
out.Write(L"Tags", Tags);
}
void ShopInfo::Category::Item::Deserialize(Serializer::IStreamReader& in)
{
in.Begin(L"AllM::XRated::Database::Info::ShopInfo::Category::Item");
in.Read(L"ItemHash", ItemHash);
in.Read(L"BuyCount", BuyCount, static_cast<uint16>(1));
in.Read(L"Tags", Tags, std::vector< std::wstring >());
}
void ShopInfo::CategoryTabInfo::Serialize(Serializer::IStreamWriter& out) const
{
out.Begin(L"AllM::XRated::Database::Info::ShopInfo::CategoryTabInfo");
out.Write(L"DisplayName", DisplayName);
out.Write(L"ToolTip", ToolTip);
out.Write(L"ShopCategoryWindowInfoHash", ShopCategoryWindowInfoHash);
out.Write(L"CashSlotID", CashSlotID);
out.Write(L"ShopItemWindowType", ShopItemWindowType);
}
void ShopInfo::CategoryTabInfo::Deserialize(Serializer::IStreamReader& in)
{
in.Begin(L"AllM::XRated::Database::Info::ShopInfo::CategoryTabInfo");
in.Read(L"DisplayName", DisplayName);
in.Read(L"ToolTip", ToolTip);
in.Read(L"ShopCategoryWindowInfoHash", ShopCategoryWindowInfoHash);
in.Read(L"CashSlotID", CashSlotID, std::wstring());
in.Read(L"ShopItemWindowType", ShopItemWindowType, (uint8)0);
}
void ShopInfo::Category::Serialize(Serializer::IStreamWriter& out) const
{
out.Begin(L"AllM::XRated::Database::Info::ShopInfo::Category");
out.Write(L"DisplayName", DisplayName);
out.Write(L"ToolTip", ToolTip);
out.Write(L"ShopCategoryWindowInfoHash", ShopCategoryWindowInfoHash);
out.Write(L"CashSlotID", CashSlotID);
out.Write(L"ShopItemWindowType", ShopItemWindowType);
out.Write(L"Items", Items);
}
void ShopInfo::Category::Deserialize(Serializer::IStreamReader& in)
{
in.Begin(L"AllM::XRated::Database::Info::ShopInfo::Category");
in.Read(L"DisplayName", DisplayName);
in.Read(L"ToolTip", ToolTip);
in.Read(L"ShopCategoryWindowInfoHash", ShopCategoryWindowInfoHash);
in.Read(L"CashSlotID", CashSlotID, std::wstring());
in.Read(L"ShopItemWindowType", ShopItemWindowType, (uint8)0);
in.Read(L"Items", Items);
}
void ShopInfo::Serialize(Serializer::IStreamWriter& out) const
{
out.Begin(L"AllM::XRated::Database::Info::ShopInfo");
out.Write(L"DisplayName", DisplayName);
out.Write(L"Categorys", Categorys);
out.Write(L"AmbienceSound", AmbienceSound);
out.Write(L"BackWindowInfoHash", BackWindowInfoHash);
}
void ShopInfo::Deserialize(Serializer::IStreamReader& in)
{
in.Begin(L"AllM::XRated::Database::Info::ShopInfo");
in.Read(L"DisplayName", DisplayName);
in.Read(L"Categorys", Categorys);
in.Read(L"AmbienceSound", AmbienceSound);
in.Read(L"BackWindowInfoHash", BackWindowInfoHash);
}
bool ShopInfo::Category::Item::Finder::operator()(ShopInfo::Category::Item& lhs) {
return (lhs.ItemHash == What);
}
bool BarterItemInfo::Finder::operator()(const BarterItemInfo& info) const {
return info.BuyItemId == barterItemId;
}
bool CashStampInfo::Finder::operator()(const CashStampInfo& info) const {
return info.Hash == Hash;
}
}
}
}
} |
<gh_stars>0
$(document).ready(function() {
var wrcbc, n;
try {
if (typeof(Storage) !== "undefined") {
wrcbc = JSON.parse(localStorage.getItem("wrcbc"));
n = wrcbc.chain.length - 1;
if (n > 0) {
$("#wrc-no").attr("placeholder", "Please enter wrc no (1-" + n + ") ...");
} else {
$("#wrc-no").attr("placeholder", "Please enter wrc no (0) ...");
}
} else {
}
} catch (err) {
}
$("#issuance-back").click(function() {
location.href = "index-1.html";
});
$("#download-wrc").click(function() {
var wrcno = parseInt($("#wrc-no").val());
if (n == 0) {
$("#result").html("The organization has not earned any wrcs.").fadeIn();
} else if (wrcno > n || wrcno < 1) {
$("#result").html("The wrc no. is out of range.").fadeIn();
} else if (isNaN(wrcno)) {
$("#result").html("The wrc no. is required.").fadeIn();
} else {
var doc = new jsPDF();
doc.text("WRC-" + $("#wrc-no").val() + "\n" + "Issued on: " + (new Date()).toString(), 10, 15);
doc.save("wrc-" + $("#wrc-no").val() + ".pdf");
$("#result").html("The wrc " + $("#wrc-no").val() + " will get downloaded shortly.").fadeIn();
}
});
}); |
<html>
<head>
<title>Input Validation</title>
<script>
function validateForm() {
var name = document.forms['myForm']['name'].value;
var age = document.forms['myForm']['age'].value;
if (name == "" || age == "") {
alert("Name and age must be filled out");
return false;
}
if (isNaN(age) || age < 0 || age > 120) {
alert("Age must be a valid number between 0 and 120");
return false;
}
return true;
}
</script>
</head>
<body>
<form name='myForm' onsubmit='return validateForm()'>
Name: <input type='text' name='name'><br>
Age: <input type='text' name='age'><br>
<input type='submit' value='Submit'>
</form>
</body>
</html> |
def max_of_two(x, y):
if x > y:
return x
else:
return y
x = 3
y = 6
maximum = max_of_two(x, y)
print('The maximum of', x, 'and', y, 'is', maximum) |
<reponame>Wlisfes/lisfes-service<gh_stars>1-10
import { Injectable, HttpException, HttpStatus } from '@nestjs/common'
import { InjectRepository } from '@nestjs/typeorm'
import { Repository, Brackets, getManager } from 'typeorm'
import { CloudEntity } from '@/entity/cloud.entity'
import { ArticleEntity } from '@/entity/article.entity'
import { MinuteEntity } from '@/entity/minute.entity'
import { SourceEntity } from '@/entity/source.entity'
import { UserEntity } from '@/entity/user.entity'
import { LoggerEntity } from '@/entity/logger.entity'
import * as DTO from './compute.interface'
import * as day from 'dayjs'
@Injectable()
export class ComputeService {
constructor(
@InjectRepository(CloudEntity) private readonly cloudModel: Repository<CloudEntity>,
@InjectRepository(ArticleEntity) private readonly articleModel: Repository<ArticleEntity>,
@InjectRepository(MinuteEntity) private readonly minuteModel: Repository<MinuteEntity>,
@InjectRepository(SourceEntity) private readonly sourceModel: Repository<SourceEntity>,
@InjectRepository(UserEntity) private readonly userModel: Repository<UserEntity>,
@InjectRepository(LoggerEntity) private readonly loggerModel: Repository<LoggerEntity>
) {}
/**各类总数统计**/
public async nodeComputeTotal() {
try {
const user = await this.nodeComputeMonthTotal(this.userModel)
const cloud = await this.nodeComputeMonthTotal(this.cloudModel)
const article = await this.nodeComputeMonthTotal(this.articleModel)
const minute = await this.nodeComputeMonthTotal(this.minuteModel)
const source = await this.nodeComputeMonthTotal(this.sourceModel)
return { user, cloud, article, minute, source }
} catch (e) {
throw new HttpException(e.message || e.toString(), HttpStatus.BAD_REQUEST)
}
}
/**查询各类当月数据和总数**/
public async nodeComputeMonthTotal<Entity>(model: Repository<Entity>) {
try {
const total = await model.createQueryBuilder().getCount()
const count = await model
.createQueryBuilder()
.where(
new Brackets(Q => {
Q.andWhere('createTime BETWEEN :start AND :end', {
start: `${day(new Date().setDate(1)).format('YYYY-MM-DD')} 00:00:00`,
end: day().format('YYYY-MM-DD HH:mm:ss')
})
})
)
.getCount()
return { total, count }
} catch (e) {
throw new HttpException(e.message || e.toString(), HttpStatus.BAD_REQUEST)
}
}
/**查询各类时间段数据**/
public async nodeComputeGroup(props: DTO.NodeComputeGroupParameter) {
try {
const table = { 1: 'cloud', 2: 'article', 3: 'source', 4: 'minute', 5: 'user', 6: 'logger' }
const currTime = day().format('YYYY-MM-DD HH:mm:ss')
const prevTime = day()
.add(-1, 'year')
.format('YYYY-MM-DD HH:mm:ss')
const month = Object.keys([...Array(12)])
.sort((a: any, b: any) => Number(b) - Number(a))
.map(index =>
day()
.add(-index, 'month')
.format('YYYY-MM')
)
const list: Array<{ month: string; total: string }> = await getManager().query(
`SELECT DATE_FORMAT(t.createTime,'%Y-%m') AS month,
count(1) AS total FROM ${table[props.current]} AS t
WHERE createTime >= '${prevTime}' AND createTime <= '${currTime}'
GROUP BY month`
)
return {
list: month.map(item => {
const curr = list.find(k => k.month === item) || null
return {
key: item,
value: Number(curr?.total || 0)
}
})
}
} catch (e) {
throw new HttpException(e.message || e.toString(), HttpStatus.BAD_REQUEST)
}
}
}
|
PASSWORD="hussein15"
#PASSWORD=$varname3
#username=jack
#ADDR="cosmos1fz0vzrc5kawwa343tcu84hcglmcsgj4xcyuzxz"
RECEIVER="cosmos1cjlufmfz03rd6r74jcmdesth3yf268467x8c0q"
VALIDATOR="cosmosvaloper1fz0vzrc5kawwa343tcu84hcglmcsgj4xasgh23"
AMOUNT="1000000stake"
CHAIN="smartcity"
#PROPOSALID="2"
#HOME="~/.sd"
file="/root/go/src/github.com/hussein1571/smartcity/n1.txt"
# sleeping a whole second between each step is a conservative precaution
# check lcd_test/testdata/state.tar.gz -> .gaiad/config/config.toml precommit_timeout = 500ms
sleep 1s
source ~/.bash_profile
clear
echo -n "Do you want to create new node? [y / n / a]: "
read yno
case $yno in
[yY] | [yY][Ee][Ss] )
echo "Starting new node"
echo Hello, what do you want to name your node?
read -p 'node: ' varname
echo what do you want to name your chain-id?
read -p 'chain-id: ' varname1
sleep 4s
rm -rf ~/.s*
sleep 1s
sd unsafe-reset-all
sleep 1s
gnome-terminal --geometry=87x20+100+300 -e "bash -c 'node /root/go/src/github.com/hussein1571/smartcity/t/script.js $foo'"
sleep 2s
sd init $varname --chain-id $varname1
sleep 1s
echo ${PASSWORD}| scli keys add jack
yes|
sleep 1s
sd add-genesis-account $(scli keys show jack -a) 100000000000000stake
sleep 1s
scli config chain-id $varname1
scli config output json
scli config indent true
scli config trust-node true
echo ${PASSWORD}| sd gentx --name jack
yes |
#sleep 1s
sd collect-gentxs
#sleep 1s
sd validate-genesis
sleep 2s
gnome-terminal --geometry=87x20 -e "bash -c 'sd start $foo'"
sleep 4s
#gnome-terminal --geometry=87x20 -e "bash -c '/root/Desktop/qt-interface/blockchain-avec-tendermint-master/build-BlockChain-Qt_5_12_5_qt5-Release/BlockChain $foo'"
n=0
while IFS=" " read -r f1 f2 f3 f4 f5 f6 f7 ; do
sleep 18s
if [ -s /root/go/src/github.com/hussein1571/smartcity/n1.txt ]
then
echo "TTN data incomming"
echo Co2: $f1
echo Co: $f2
echo ph: $f3
echo turbi: $f4
echo timestamp: $f5
echo pays: $f6
echo region: $f7
sleep 2s
start_time=`date +%s`
echo ${PASSWORD}| scli tx smartcity get-info $f5 $f1 $f2 $f3 $f4 $f6 $f7 --from jack --yes
end_time=`date +%s`
echo execution time was `expr $end_time - $start_time` s.
sleep 2s
scli query smartcity GiveInfo $f5
sleep 2s
((n++))
echo "The number of transaction is:$n"
else if [ -s ! /root/go/src/github.com/hussein1571/smartcity/n1.txt ]
then
echo "Waiting for TTN Data"
fi
fi
done <"$file"
;;
[nN] | [n|N][O|o] )
echo "Starting current node"
sleep 1s
gnome-terminal --geometry=87x20+100+300 -e "bash -c 'node /root/go/src/github.com/hussein1571/smartcity/t/script.js $foo'"
sleep 6s
gnome-terminal --geometry=87x20 -e "bash -c 'sd start $foo'"
sleep 4s
#gnome-terminal --geometry=87x20 -e "bash -c '/root/Desktop/qt-interface/blockchain-avec-tendermint-master/build-BlockChain-Qt_5_12_5_qt5-Release/BlockChain $foo'"
n=0
while IFS=" " read -r f1 f2 f3 f4 f5 f6 f7; do
sleep 16s
if [ -f "root/go/src/github.com/hussein1571/smartcity/n1.txt" ]
then
echo "waiting for TTN DATA"
else
sleep 1s
echo "New TTN data incomming "
sleep 1s
echo Co2: $f1
echo Co: $f2
echo ph: $f3
echo turbi: $f4
echo timestamp: $f5
echo Pays: $f6
echo region: $f7
date -d "UTC 1970-01-01 $f5 secs"
sleep 2s
start_time=`date +%s`
echo ${PASSWORD}| scli tx smartcity get-info $f5 $f1 $f2 $f3 $f4 $f6 $f7 --from jack --yes
end_time=`date +%s`
echo execution time was `expr $end_time - $start_time` s.
((n++))
sleep 4s
echo "The number of transactions is:$n"
fi
done <"$file"
;;
[aA] | [a|A][A|a] )
n=0
while IFS=" " read -r f1 f2 f3 f4 f5 f6 f7; do
sleep 4s
if [ -s /root/go/src/github.com/hussein1571/smartcity/n1.txt ]
then
echo "New TTN data incomming "
echo Co2: $f1
echo Co: $f2
echo ph: $f3
echo turbi: $f4
echo timestamp: $f5
echo Pays: $f6
echo region: $f7
sleep 5s
#date -ud @$f5
sleep 5s
echo ${PASSWORD}| scli tx smartcity get-info $f5 $f1 $f2 $f3 $f4 $f6 $f7 --from jack --yes
sleep 8s
#scli query smartcity GiveInfo $f5
#OUTPUT="$(scli query smartcity GiveInfo $f5)"
#echo "the values for this timestamp $f5 is :${OUTPUT}"
#sleep 5s
#OUTPUT1="$(scli query smartcity times)"
#echo "the number of timestamps is :${OUTPUT1}"
((n++))
sleep 1s
echo "The number of transactions is:$n"
else
echo "waiting for TTN DATA"
fi
done <"$file"
;;
*) echo "Invalid input"
;;
esac
|
<filename>d6p2.py
def stringify(banks):
return ' '.join([str(x) for x in banks])
# the final banks in part 1,
# have to find how many cycles it takes to get back to this configuration
banks = [1, 1, 0, 15, 14, 13, 12, 10, 10, 9, 8, 7, 6, 4, 3, 5]
num_banks = len(banks)
found = []
while True:
print(banks)
max_blocks = 0
max_idx = 0
for i in range(num_banks):
if banks[i] > max_blocks:
max_blocks = banks[i]
max_idx = i
idx = max_idx
banks[idx] = 0
while max_blocks > 0:
idx = (idx + 1) % num_banks
banks[idx] += 1
max_blocks -= 1
if "1 1 0 15 14 13 12 10 10 9 8 7 6 4 3 5" in found:
break
else:
found.append(stringify(banks)) # reference to same variable issue
print(len(found))
|
import React from 'react';
export default ({ className = '', style = {} }) => (
<svg
className={className}
style={style}
version="1.1"
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 140 140"
>
<path
fill="#39A1B7"
d="M123.57,23.09l3.43-8.64H13l3.11,7.85c2.58,6.5,3.9,13.42,3.9,20.41V70v28.91c0,5.92-1.12,11.78-3.3,17.28 L13,125.55h114l-4.61-11.61c-1.59-4.01-2.41-8.29-2.41-12.61V70V41.87C119.98,35.44,121.2,29.06,123.57,23.09z"
/>
<path
fill="#43BDD7"
d="M13,14.45l3.11,7.85c2.58,6.5,3.9,13.42,3.9,20.41V70v28.91c0,5.92-1.12,11.78-3.3,17.28L13,125.55h57V14.45 H13z"
/>
<rect x="13" y="125.55" fill="#3793A5" width="114" height="14.45" />
<rect x="70" y="125.55" fill="#328193" width="57" height="14.45" />
<rect x="13" fill="#3793A5" width="114" height="14.45" />
<rect x="70" fill="#328193" width="57" height="14.45" />
<rect x="35" y="30" fill="#FFCD00" width="70" height="80" />
<rect x="70" y="30" fill="#EDAB07" width="35" height="80" />
</svg>
);
|
<reponame>michael-conway/policy-domains<gh_stars>0
/**
*
*/
package org.angrygoat.domain.ingest.config;
import java.util.ArrayList;
import java.util.List;
import org.angrygoat.domainmachine.exception.PolicyDomainRuntimeException;
import org.irods.jargon.core.connection.IRODSAccount;
import org.irods.jargon.core.exception.JargonException;
import org.irods.jargon.core.pub.IRODSFileSystem;
import org.irods.jargon.core.pub.RuleProcessingAO;
import org.irods.jargon.core.pub.RuleProcessingAO.RuleProcessingType;
import org.irods.jargon.core.rule.IRODSRuleExecResult;
import org.irods.jargon.core.rule.IRODSRuleParameter;
import org.irods.jargon.core.rule.RuleInvocationConfiguration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.statemachine.StateContext;
import org.springframework.statemachine.action.Action;
import org.springframework.statemachine.config.EnableStateMachineFactory;
import org.springframework.statemachine.config.StateMachineConfigurerAdapter;
import org.springframework.statemachine.config.builders.StateMachineStateConfigurer;
import org.springframework.statemachine.config.builders.StateMachineTransitionConfigurer;
import org.springframework.statemachine.guard.Guard;
/**
* Config and factory for an ingest domain
*
* @author mcc
*
*/
@Configuration
@EnableStateMachineFactory
public class IngestDomainConfig extends StateMachineConfigurerAdapter<String, String> {
public static final Logger log = LoggerFactory.getLogger(IngestDomainConfig.class);
private IRODSFileSystem irodsFileSystem;
public IngestDomainConfig() {
try {
irodsFileSystem = IRODSFileSystem.instance();
} catch (JargonException e) {
log.error("unable to create IRODSFileSystem", e);
throw new PolicyDomainRuntimeException("unable to create IRODSfileSystem", e);
}
}
@Override
public void configure(StateMachineStateConfigurer<String, String> states) throws Exception {
log.info("configure(StateMachineStateConfigurer))");
states.withStates().initial(IngestStates.READY.toString())
.state(IngestStates.SIP_DEPOSITED_IN_ARCHIVE.toString(), sipDepositedAction());
}
@Bean
public Action<String, String> sipDepositedAction() {
return new Action<String, String>() {
@Override
public void execute(StateContext<String, String> context) {
log.info("execute of sip deposited");
log.info("context:{}", context);
try {
IRODSAccount irodsAccount = IRODSAccount.instance("localhost", 1247, "test1", "test", "", "zone1", "");
RuleProcessingAO ruleProcessingAO = irodsFileSystem.getIRODSAccessObjectFactory().getRuleProcessingAO(irodsAccount);
List<IRODSRuleParameter> inputOverrides = new ArrayList<IRODSRuleParameter>();
inputOverrides.add(new IRODSRuleParameter("*dataObjectPath", '"' + "foo" + '"'));
inputOverrides.add(new IRODSRuleParameter("*user", '"' + "user" + '"'));
inputOverrides.add(new IRODSRuleParameter("*zone", '"' + "zone" + '"'));
RuleInvocationConfiguration ruleInvocationConfiguration = RuleInvocationConfiguration.instanceWithDefaultAutoSettings(irodsFileSystem.getJargonProperties());
IRODSRuleExecResult result = ruleProcessingAO.executeRuleFromResource("/rules/callIngestPostProcSipDepositInLandingZone.r", inputOverrides, ruleInvocationConfiguration);
log.info("result of call:", result);
} catch (JargonException e) {
log.error("unable to trigger rule", e);
throw new PolicyDomainRuntimeException("unable to trigger proper rule", e);
} finally {
irodsFileSystem.closeAndEatExceptions();
}
}
};
}
@Override
public void configure(StateMachineTransitionConfigurer<String, String> transitions) throws Exception {
transitions.withExternal().source(IngestStates.READY.toString())
.target(IngestStates.SIP_DEPOSITED_IN_ARCHIVE.toString())
.event(IngestEvents.DEPOSIT_SIP_IN_LANDING_AREA.toString());
}
@Bean
public Guard<String, String> guard() {
return new Guard<String, String>() {
@Override
public boolean evaluate(StateContext<String, String> context) {
log.info("guard for context:{}", context);
return true;
}
};
}
}
|
#!/bin/bash
set -o nounset errexit pipefail
# Collect the API Proxy and Hosted Target (Sandbox server)
# files into build/apiproxy/ and deploy to Apigee
rm -rf build/proxies
mkdir -p build/proxies/sandbox
mkdir -p build/proxies/live
cp -Rv proxies/sandbox/apiproxy build/proxies/sandbox
cp -Rv proxies/live/apiproxy build/proxies/live
mkdir -p build/proxies/sandbox/apiproxy/resources/hosted
rsync -av --copy-links --exclude="node_modules" --filter=':- .gitignore' docker/mesh-api/ build/proxies/sandbox/apiproxy/resources/hosted
|
<reponame>mist8kengas/mal-ts
enum API {
V2 = 'https://api.myanimelist.net/v2',
}
enum Type {
Anime = '/anime',
Manga = '/manga',
}
export { API, Type };
export default function malURL(
api: API,
endpoint: Type,
id: string | number,
fields?: string[]
) {
const malURL = new URL(`${api}${endpoint}/${id}`);
if (fields) malURL.searchParams.set('fields', fields.join(','));
return malURL;
}
|
from typing import Any
class content_property:
def __init__(self):
self.send_on = None
self.resources = []
class GenericRecipientCollection:
def __init__(self, session: Any, type: str):
self.session = session
self.type = type
self.recipients = []
class ResourceRecipientCollection(GenericRecipientCollection):
def __init__(self, session: Any):
super().__init__(session, type='resource')
# Example usage
session = "example_session"
resource_recipient_collection = ResourceRecipientCollection(session)
print(resource_recipient_collection.type) # Output: resource |
macro_rules! impl_bit_count {
($t:ty, $w:expr) => {
#[cfg(target_pointer_width = "64")]
impl BitCount for $t {
fn bit_count() -> usize {
$w
}
}
};
}
trait BitCount {
fn bit_count() -> usize;
}
#[cfg(target_pointer_width = "64")]
impl_bit_count!(isize, 64);
#[cfg(target_pointer_width = "64")]
impl_bit_count!(usize, 64);
fn main() {
println!("Number of bits for isize: {}", isize::bit_count());
println!("Number of bits for usize: {}", usize::bit_count());
} |
def group_by_criteria(list_obj, criteria):
grouped = {}
for obj in list_obj:
key = obj[criteria]
if key not in grouped:
grouped[key] = []
grouped[key].append(obj)
return grouped
groups = group_by_criteria([{'name': 'Alice', 'age': 20}, {'name': 'Bob', 'age': 22}, {'name': 'Carol', 'age': 18}], 'age')
print(groups) |
#!/bin/sh
echo "START: insmod"
sudo /sbin/modprobe uio
sudo /sbin/insmod $RTE_SDK/build/kmod/igb_uio.ko
sudo /sbin/insmod $RTE_SDK/build/kmod/rte_kni.ko
|
<filename>app/src/main/java/com/weilaiweather/android/gsons/Suggestion.java
package com.weilaiweather.android.gsons;
/**
* Created by Lucky on 2017/6/27.
*/
public class Suggestion {
public Comf comf;
public CW cw;
public Drsg drsg;
public Flu flu;
public Sport sport;
public Trav trav;
public UV uv;
public class Comf{
public String brf;
public String txt;
}
public class CW{
public String brf;
public String txt;
}
public class Drsg{
public String brf;
public String txt;
}
public class Flu{
public String brf;
public String txt;
}
public class Sport{
public String brf;
public String txt;
}
public class Trav{
public String brf;
public String txt;
}
public class UV{
public String brf;
public String txt;
}
}
|
#!/usr/bin/env bash
# Copyright 2019 The Tekton Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
export GO111MODULE=on
source $(dirname $0)/../vendor/github.com/tektoncd/plumbing/scripts/e2e-tests.sh
# Script entry point.
initialize $@
header "Running operator-sdk test"
operator-sdk test local ./test/e2e \
--up-local --namespace operators \
--debug \
--verbose || fail_test
success
|
import { Field, ObjectType, ID } from '@nestjs/graphql';
import { Prop, Schema, SchemaFactory } from '@nestjs/mongoose';
import { Document, Types } from 'mongoose';
import { TypeEnum } from './project.dto';
@ObjectType()
@Schema({ timestamps: true })
export class Project extends Document {
@Field(() => ID)
id: string;
@Field(() => ID)
@Prop({ auto: false, required: true })
team: Types.ObjectId;
@Field(() => String)
@Prop({ maxlength: 32, required: true })
name: string;
@Field(() => String)
@Prop({ required: true })
type: TypeEnum;
@Field(() => String, { nullable: true })
@Prop()
file?: string;
}
export const ProjectSchema = SchemaFactory.createForClass(Project);
|
#!/bin/bash
#SBATCH --account=def-dkulic
#SBATCH --mem=8000M # memory per node
#SBATCH --time=23:00:00 # time (DD-HH:MM)
#SBATCH --output=/project/6001934/lingheng/Double_DDPG_Job_output/continuous_BipedalWalkerHardcore-v2_ddpg_hardcopy_action_noise_seed1_run1_%N-%j.out # %N for node name, %j for jobID
module load qt/5.9.6 python/3.6.3 nixpkgs/16.09 gcc/7.3.0 boost/1.68.0 cuda cudnn
source ~/tf_cpu/bin/activate
python ./ddpg_discrete_action.py --env BipedalWalkerHardcore-v2 --random-seed 1 --exploration-strategy action_noise --summary-dir ../Double_DDPG_Results_no_monitor/continuous/BipedalWalkerHardcore-v2/ddpg_hardcopy_action_noise_seed1_run1 --continuous-act-space-flag --double-ddpg-flag --target-hard-copy-flag
|
<reponame>10088/spring-data-mongodb<gh_stars>0
/*
* Copyright 2017-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import static org.assertj.core.api.Assertions.*;
import static org.mockito.ArgumentMatchers.*;
import static org.mockito.Mockito.*;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.anyList;
import static org.mockito.Mockito.eq;
import static org.springframework.data.mongodb.core.query.Criteria.*;
import static org.springframework.data.mongodb.core.query.Query.*;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import org.bson.BsonDocument;
import org.bson.BsonString;
import org.bson.Document;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Answers;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.context.ApplicationEventPublisher;
import org.springframework.dao.DataAccessException;
import org.springframework.dao.DataIntegrityViolationException;
import org.springframework.dao.support.PersistenceExceptionTranslator;
import org.springframework.data.annotation.Id;
import org.springframework.data.mapping.callback.EntityCallbacks;
import org.springframework.data.mongodb.BulkOperationException;
import org.springframework.data.mongodb.MongoDatabaseFactory;
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
import org.springframework.data.mongodb.core.DefaultBulkOperations.BulkOperationContext;
import org.springframework.data.mongodb.core.convert.DbRefResolver;
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
import org.springframework.data.mongodb.core.convert.MongoConverter;
import org.springframework.data.mongodb.core.convert.QueryMapper;
import org.springframework.data.mongodb.core.convert.UpdateMapper;
import org.springframework.data.mongodb.core.mapping.Field;
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback;
import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent;
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback;
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent;
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveCallback;
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent;
import org.springframework.data.mongodb.core.query.BasicQuery;
import org.springframework.data.mongodb.core.query.Collation;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.Update;
import com.mongodb.MongoBulkWriteException;
import com.mongodb.MongoWriteException;
import com.mongodb.ServerAddress;
import com.mongodb.WriteConcern;
import com.mongodb.WriteError;
import com.mongodb.bulk.BulkWriteError;
import com.mongodb.bulk.WriteConcernError;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoDatabase;
import com.mongodb.client.model.BulkWriteOptions;
import com.mongodb.client.model.DeleteManyModel;
import com.mongodb.client.model.InsertOneModel;
import com.mongodb.client.model.ReplaceOneModel;
import com.mongodb.client.model.UpdateManyModel;
import com.mongodb.client.model.UpdateOneModel;
import com.mongodb.client.model.WriteModel;
/**
* Unit tests for {@link DefaultBulkOperations}.
*
* @author <NAME>
* @author <NAME>
* @author <NAME>
* @author <NAME>
* @author <NAME>
* @author <NAME>
*/
@ExtendWith(MockitoExtension.class)
class DefaultBulkOperationsUnitTests {
private MongoTemplate template;
@Mock MongoDatabase database;
@Mock(answer = Answers.RETURNS_DEEP_STUBS) MongoCollection<Document> collection;
@Mock MongoDatabaseFactory factory;
@Mock DbRefResolver dbRefResolver;
@Captor ArgumentCaptor<List<WriteModel<Document>>> captor;
private MongoConverter converter;
private MongoMappingContext mappingContext;
private DefaultBulkOperations ops;
@BeforeEach
void setUp() {
when(factory.getMongoDatabase()).thenReturn(database);
when(factory.getExceptionTranslator()).thenReturn(new NullExceptionTranslator());
when(database.getCollection(anyString(), eq(Document.class))).thenReturn(collection);
mappingContext = new MongoMappingContext();
mappingContext.afterPropertiesSet();
converter = new MappingMongoConverter(dbRefResolver, mappingContext);
template = new MongoTemplate(factory, converter);
ops = new DefaultBulkOperations(template, "collection-1",
new BulkOperationContext(BulkMode.ORDERED,
Optional.of(mappingContext.getPersistentEntity(SomeDomainType.class)), new QueryMapper(converter),
new UpdateMapper(converter), null, null));
}
@Test // DATAMONGO-1518
void updateOneShouldUseCollationWhenPresent() {
ops.updateOne(new BasicQuery("{}").collation(Collation.of("de")), new Update().set("lastName", "targaryen"))
.execute();
verify(collection).bulkWrite(captor.capture(), any());
assertThat(captor.getValue().get(0)).isInstanceOf(UpdateOneModel.class);
assertThat(((UpdateOneModel<Document>) captor.getValue().get(0)).getOptions().getCollation())
.isEqualTo(com.mongodb.client.model.Collation.builder().locale("de").build());
}
@Test // DATAMONGO-1518
void updateManyShouldUseCollationWhenPresent() {
ops.updateMulti(new BasicQuery("{}").collation(Collation.of("de")), new Update().set("lastName", "targaryen"))
.execute();
verify(collection).bulkWrite(captor.capture(), any());
assertThat(captor.getValue().get(0)).isInstanceOf(UpdateManyModel.class);
assertThat(((UpdateManyModel<Document>) captor.getValue().get(0)).getOptions().getCollation())
.isEqualTo(com.mongodb.client.model.Collation.builder().locale("de").build());
}
@Test // DATAMONGO-1518
void removeShouldUseCollationWhenPresent() {
ops.remove(new BasicQuery("{}").collation(Collation.of("de"))).execute();
verify(collection).bulkWrite(captor.capture(), any());
assertThat(captor.getValue().get(0)).isInstanceOf(DeleteManyModel.class);
assertThat(((DeleteManyModel<Document>) captor.getValue().get(0)).getOptions().getCollation())
.isEqualTo(com.mongodb.client.model.Collation.builder().locale("de").build());
}
@Test // DATAMONGO-2218
void replaceOneShouldUseCollationWhenPresent() {
ops.replaceOne(new BasicQuery("{}").collation(Collation.of("de")), new SomeDomainType()).execute();
verify(collection).bulkWrite(captor.capture(), any());
assertThat(captor.getValue().get(0)).isInstanceOf(ReplaceOneModel.class);
assertThat(((ReplaceOneModel<Document>) captor.getValue().get(0)).getReplaceOptions().getCollation())
.isEqualTo(com.mongodb.client.model.Collation.builder().locale("de").build());
}
@Test // DATAMONGO-1678
void bulkUpdateShouldMapQueryAndUpdateCorrectly() {
ops.updateOne(query(where("firstName").is("danerys")), Update.update("firstName", "<NAME>")).execute();
verify(collection).bulkWrite(captor.capture(), any());
UpdateOneModel<Document> updateModel = (UpdateOneModel<Document>) captor.getValue().get(0);
assertThat(updateModel.getFilter()).isEqualTo(new Document("first_name", "danerys"));
assertThat(updateModel.getUpdate()).isEqualTo(new Document("$set", new Document("first_name", "<NAME>")));
}
@Test // DATAMONGO-1678
void bulkRemoveShouldMapQueryCorrectly() {
ops.remove(query(where("firstName").is("danerys"))).execute();
verify(collection).bulkWrite(captor.capture(), any());
DeleteManyModel<Document> updateModel = (DeleteManyModel<Document>) captor.getValue().get(0);
assertThat(updateModel.getFilter()).isEqualTo(new Document("first_name", "danerys"));
}
@Test // DATAMONGO-2218
void bulkReplaceOneShouldMapQueryCorrectly() {
SomeDomainType replacement = new SomeDomainType();
replacement.firstName = "Minsu";
replacement.lastName = "Kim";
ops.replaceOne(query(where("firstName").is("danerys")), replacement).execute();
verify(collection).bulkWrite(captor.capture(), any());
ReplaceOneModel<Document> updateModel = (ReplaceOneModel<Document>) captor.getValue().get(0);
assertThat(updateModel.getFilter()).isEqualTo(new Document("first_name", "danerys"));
assertThat(updateModel.getReplacement().getString("first_name")).isEqualTo("Minsu");
assertThat(updateModel.getReplacement().getString("lastName")).isEqualTo("Kim");
}
@Test // DATAMONGO-2261, DATAMONGO-2479
void bulkInsertInvokesEntityCallbacks() {
BeforeConvertPersonCallback beforeConvertCallback = spy(new BeforeConvertPersonCallback());
BeforeSavePersonCallback beforeSaveCallback = spy(new BeforeSavePersonCallback());
AfterSavePersonCallback afterSaveCallback = spy(new AfterSavePersonCallback());
ops = new DefaultBulkOperations(template, "collection-1",
new BulkOperationContext(BulkMode.ORDERED, Optional.of(mappingContext.getPersistentEntity(Person.class)),
new QueryMapper(converter), new UpdateMapper(converter), null,
EntityCallbacks.create(beforeConvertCallback, beforeSaveCallback, afterSaveCallback)));
Person entity = new Person("init");
ops.insert(entity);
ArgumentCaptor<Person> personArgumentCaptor = ArgumentCaptor.forClass(Person.class);
verify(beforeConvertCallback).onBeforeConvert(personArgumentCaptor.capture(), eq("collection-1"));
verifyNoInteractions(beforeSaveCallback);
ops.execute();
verify(beforeSaveCallback).onBeforeSave(personArgumentCaptor.capture(), any(), eq("collection-1"));
verify(afterSaveCallback).onAfterSave(personArgumentCaptor.capture(), any(), eq("collection-1"));
assertThat(personArgumentCaptor.getAllValues()).extracting("firstName").containsExactly("init", "before-convert",
"before-convert");
verify(collection).bulkWrite(captor.capture(), any());
InsertOneModel<Document> updateModel = (InsertOneModel<Document>) captor.getValue().get(0);
assertThat(updateModel.getDocument()).containsEntry("firstName", "after-save");
}
@Test // DATAMONGO-2290
void bulkReplaceOneEmitsEventsCorrectly() {
ApplicationEventPublisher eventPublisher = mock(ApplicationEventPublisher.class);
ops = new DefaultBulkOperations(template, "collection-1",
new BulkOperationContext(BulkMode.ORDERED, Optional.of(mappingContext.getPersistentEntity(Person.class)),
new QueryMapper(converter), new UpdateMapper(converter), eventPublisher, null));
ops.replaceOne(query(where("firstName").is("danerys")), new SomeDomainType());
verify(eventPublisher).publishEvent(any(BeforeConvertEvent.class));
verify(eventPublisher, never()).publishEvent(any(BeforeSaveEvent.class));
verify(eventPublisher, never()).publishEvent(any(AfterSaveEvent.class));
ops.execute();
verify(eventPublisher).publishEvent(any(BeforeSaveEvent.class));
verify(eventPublisher).publishEvent(any(AfterSaveEvent.class));
}
@Test // DATAMONGO-2290
void bulkInsertEmitsEventsCorrectly() {
ApplicationEventPublisher eventPublisher = mock(ApplicationEventPublisher.class);
ops = new DefaultBulkOperations(template, "collection-1",
new BulkOperationContext(BulkMode.ORDERED, Optional.of(mappingContext.getPersistentEntity(Person.class)),
new QueryMapper(converter), new UpdateMapper(converter), eventPublisher, null));
ops.insert(new SomeDomainType());
verify(eventPublisher).publishEvent(any(BeforeConvertEvent.class));
verify(eventPublisher, never()).publishEvent(any(BeforeSaveEvent.class));
verify(eventPublisher, never()).publishEvent(any(AfterSaveEvent.class));
ops.execute();
verify(eventPublisher).publishEvent(any(BeforeSaveEvent.class));
verify(eventPublisher).publishEvent(any(AfterSaveEvent.class));
}
@Test // DATAMONGO-2290
void noAfterSaveEventOnFailure() {
ApplicationEventPublisher eventPublisher = mock(ApplicationEventPublisher.class);
when(collection.bulkWrite(anyList(), any(BulkWriteOptions.class))).thenThrow(new MongoWriteException(
new WriteError(89, "NetworkTimeout", new BsonDocument("hi", new BsonString("there"))), null));
ops = new DefaultBulkOperations(template, "collection-1",
new BulkOperationContext(BulkMode.ORDERED, Optional.of(mappingContext.getPersistentEntity(Person.class)),
new QueryMapper(converter), new UpdateMapper(converter), eventPublisher, null));
ops.insert(new SomeDomainType());
verify(eventPublisher).publishEvent(any(BeforeConvertEvent.class));
try {
ops.execute();
fail("Missing MongoWriteException");
} catch (MongoWriteException expected) {
}
verify(eventPublisher).publishEvent(any(BeforeSaveEvent.class));
}
@Test // DATAMONGO-2330
void writeConcernNotAppliedWhenNotSet() {
ops.updateOne(new BasicQuery("{}").collation(Collation.of("de")), new Update().set("lastName", "targaryen"))
.execute();
verify(collection, never()).withWriteConcern(any());
}
@Test // DATAMONGO-2330
void writeConcernAppliedCorrectlyWhenSet() {
ops.setDefaultWriteConcern(WriteConcern.MAJORITY);
ops.updateOne(new BasicQuery("{}").collation(Collation.of("de")), new Update().set("lastName", "targaryen"))
.execute();
verify(collection).withWriteConcern(eq(WriteConcern.MAJORITY));
}
@Test // DATAMONGO-2450
void appliesArrayFilterWhenPresent() {
ops.updateOne(new BasicQuery("{}"), new Update().filterArray(Criteria.where("element").gte(100))).execute();
verify(collection).bulkWrite(captor.capture(), any());
UpdateOneModel<Document> updateModel = (UpdateOneModel<Document>) captor.getValue().get(0);
assertThat(updateModel.getOptions().getArrayFilters().get(0))
.isEqualTo(new org.bson.Document("element", new Document("$gte", 100)));
}
@Test // DATAMONGO-2502
void shouldRetainNestedArrayPathWithPlaceholdersForNoMatchingPaths() {
ops.updateOne(new BasicQuery("{}"), new Update().set("items.$.documents.0.fileId", "new-id")).execute();
verify(collection).bulkWrite(captor.capture(), any());
UpdateOneModel<Document> updateModel = (UpdateOneModel<Document>) captor.getValue().get(0);
assertThat(updateModel.getUpdate())
.isEqualTo(new Document("$set", new Document("items.$.documents.0.fileId", "new-id")));
}
@Test // DATAMONGO-2502
void shouldRetainNestedArrayPathWithPlaceholdersForMappedEntity() {
DefaultBulkOperations ops = new DefaultBulkOperations(template, "collection-1",
new BulkOperationContext(BulkMode.ORDERED, Optional.of(mappingContext.getPersistentEntity(OrderTest.class)),
new QueryMapper(converter), new UpdateMapper(converter), null, null));
ops.updateOne(new BasicQuery("{}"), Update.update("items.$.documents.0.fileId", "file-id")).execute();
verify(collection).bulkWrite(captor.capture(), any());
UpdateOneModel<Document> updateModel = (UpdateOneModel<Document>) captor.getValue().get(0);
assertThat(updateModel.getUpdate())
.isEqualTo(new Document("$set", new Document("items.$.documents.0.the_file_id", "file-id")));
}
@Test // DATAMONGO-2285
public void translateMongoBulkOperationExceptionWithWriteConcernError() {
when(collection.bulkWrite(anyList(), any(BulkWriteOptions.class))).thenThrow(new MongoBulkWriteException(null,
Collections.emptyList(),
new WriteConcernError(42, "codename", "writeconcern error happened", new BsonDocument()), new ServerAddress()));
assertThatExceptionOfType(DataIntegrityViolationException.class)
.isThrownBy(() -> ops.insert(new SomeDomainType()).execute());
}
@Test // DATAMONGO-2285
public void translateMongoBulkOperationExceptionWithoutWriteConcernError() {
when(collection.bulkWrite(anyList(), any(BulkWriteOptions.class))).thenThrow(new MongoBulkWriteException(null,
Collections.singletonList(new BulkWriteError(42, "a write error happened", new BsonDocument(), 49)), null,
new ServerAddress()));
assertThatExceptionOfType(BulkOperationException.class)
.isThrownBy(() -> ops.insert(new SomeDomainType()).execute());
}
static class OrderTest {
String id;
List<OrderTestItem> items;
}
static class OrderTestItem {
private String cartId;
private List<OrderTestDocument> documents;
}
static class OrderTestDocument {
@Field("the_file_id")
private String fileId;
}
class SomeDomainType {
@Id String id;
Gender gender;
@Field("first_name") String firstName;
@Field String lastName;
}
enum Gender {
M, F
}
static class BeforeConvertPersonCallback implements BeforeConvertCallback<Person> {
@Override
public Person onBeforeConvert(Person entity, String collection) {
return new Person("before-convert");
}
}
static class BeforeSavePersonCallback implements BeforeSaveCallback<Person> {
@Override
public Person onBeforeSave(Person entity, Document document, String collection) {
document.put("firstName", "before-save");
return new Person("before-save");
}
}
static class AfterSavePersonCallback implements AfterSaveCallback<Person> {
@Override
public Person onAfterSave(Person entity, Document document, String collection) {
document.put("firstName", "after-save");
return new Person("after-save");
}
}
static class NullExceptionTranslator implements PersistenceExceptionTranslator {
@Override
public DataAccessException translateExceptionIfPossible(RuntimeException ex) {
return null;
}
}
}
|
name=Ableitung
path=/tmp/$name-dc40
mkdir -p $path
pdflatex -output-format dvi -output-directory $path $name.tex
dvipdfmx $path/$name.dvi
# mv $path/$name.pdf ./
|
public class HSVtoRGB {
public static int[] hsvToRgb(float[] hsv){
float r = 0, g = 0, b = 0;
int h = (int) hsv[0];
float s = hsv[1], v = hsv[2];
if( s == 0 ) {
r = v;
g = v;
b = v;
} else {
float var_h = h * 6;
if ( var_h == 6 ) var_h = 0; // H must be < 1
int var_i = (int) var_h; // Or ... var_i = floor( var_h )
float var_1 = v * ( 1 - s );
float var_2 = v * ( 1 - s * ( var_h - var_i ) );
float var_3 = v * ( 1 - s * ( 1 - ( var_h - var_i ) ) );
if ( var_i == 0 ) {
r = v ;
g = var_3 ;
b = var_1 ;
} else if ( var_i == 1 ) {
r = var_2 ;
g = v ;
b = var_1 ;
} else if ( var_i == 2 ) {
r = var_1 ;
g = v ;
b = var_3 ;
} else if ( var_i == 3 ) {
r = var_1 ;
g = var_2 ;
b = v ;
} else if ( var_i == 4 ) {
r = var_3 ;
g = var_1 ;
b = v ;
} else {
r = v ;
g = var_1 ;
b = var_2 ;
}
}
int[] rgb = new int[3];
rgb[0] = (int)(r * 255);
rgb[1] = (int)(g * 255);
rgb[2] = (int)(b * 255);
return rgb;
}
public static void main(String args[]){
float[] hsv = {20f, 0.6f, 0.8f};
int[] rgb = hsvToRgb(hsv);
System.out.println("RGB Value : " + rgb[0] + "," + rgb[1] + "," + rgb[2]);
}
}
//Output: RGB Value : 244,197,203 |
function maxProfit(prices) {
let maxProfit = 0;
for (let i = 0; i <= prices.length - 1; i++) {
for (let j = i + 1; j <= prices.length; j++) {
let profit = prices[j] - prices[i];
if (profit > maxProfit) maxProfit = profit;
}
}
return maxProfit;
}
// Test
const prices = [10, 7, 5, 8, 11, 9]
console.log(maxProfit(prices)) // Output: 6 |
package dk.kvalitetsit.hjemmebehandling.service.access;
import dk.kvalitetsit.hjemmebehandling.constants.Systems;
import dk.kvalitetsit.hjemmebehandling.context.UserContext;
import dk.kvalitetsit.hjemmebehandling.context.UserContextProvider;
import dk.kvalitetsit.hjemmebehandling.fhir.FhirClient;
import dk.kvalitetsit.hjemmebehandling.service.exception.AccessValidationException;
import org.hl7.fhir.r4.model.CarePlan;
import org.hl7.fhir.r4.model.DomainResource;
import org.hl7.fhir.r4.model.Organization;
import org.hl7.fhir.r4.model.Reference;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.jupiter.MockitoExtension;
import java.util.List;
import java.util.Optional;
import static org.junit.jupiter.api.Assertions.*;
@ExtendWith(MockitoExtension.class)
public class AccessValidatorTest {
@InjectMocks
private AccessValidator subject;
@Mock
private UserContextProvider userContextProvider;
@Mock
private FhirClient fhirClient;
private static final String ORGANIZATION_ID_1 = "organization-1";
private static final String ORGANIZATION_ID_2 = "organization-2";
private static final String SOR_CODE_1 = "123456";
@Test
public void validateAccess_contextNotInitialized() {
// Arrange
var resource = buildResource();
// Act
// Assert
assertThrows(IllegalStateException.class, () -> subject.validateAccess(resource));
}
@Test
public void validateAccess_unknownOrganization() {
// Arrange
var resource = buildResource();
var context = new UserContext(SOR_CODE_1);
Mockito.when(userContextProvider.getUserContext()).thenReturn(context);
Mockito.when(fhirClient.lookupOrganizationBySorCode(context.getOrgId())).thenReturn(Optional.empty());
// Act
// Assert
assertThrows(IllegalStateException.class, () -> subject.validateAccess(resource));
}
@Test
public void validateAccess_noOrganizationTag() {
// Arrange
var resource = buildResource();
var context = new UserContext(SOR_CODE_1);
Mockito.when(userContextProvider.getUserContext()).thenReturn(context);
var organization = buildOrganization(ORGANIZATION_ID_1);
Mockito.when(fhirClient.lookupOrganizationBySorCode(context.getOrgId())).thenReturn(Optional.of(organization));
// Act
// Assert
assertThrows(IllegalStateException.class, () -> subject.validateAccess(resource));
}
@Test
public void validateAccess_wrongOrganization_accessViolation() {
// Arrange
var resource = buildResource(ORGANIZATION_ID_2);
var context = new UserContext(SOR_CODE_1);
Mockito.when(userContextProvider.getUserContext()).thenReturn(context);
var organization = buildOrganization(ORGANIZATION_ID_1);
Mockito.when(fhirClient.lookupOrganizationBySorCode(context.getOrgId())).thenReturn(Optional.of(organization));
// Act
// Assert
assertThrows(AccessValidationException.class, () -> subject.validateAccess(resource));
}
@Test
public void validateAccess_correctOrganization_success() {
// Arrange
var resource = buildResource(ORGANIZATION_ID_1);
var context = new UserContext(SOR_CODE_1);
Mockito.when(userContextProvider.getUserContext()).thenReturn(context);
var organization = buildOrganization(ORGANIZATION_ID_1);
Mockito.when(fhirClient.lookupOrganizationBySorCode(context.getOrgId())).thenReturn(Optional.of(organization));
// Act
// Assert
assertDoesNotThrow(() -> subject.validateAccess(resource));
}
@Test
public void validateAccess_conjunction_failure() {
// Arrange
var resource1 = buildResource(ORGANIZATION_ID_1);
var resource2 = buildResource(ORGANIZATION_ID_2);
var context = new UserContext(SOR_CODE_1);
Mockito.when(userContextProvider.getUserContext()).thenReturn(context);
var organization = buildOrganization(ORGANIZATION_ID_1);
Mockito.when(fhirClient.lookupOrganizationBySorCode(context.getOrgId())).thenReturn(Optional.of(organization));
// Act
// Assert
assertThrows(AccessValidationException.class, () -> subject.validateAccess(List.of(resource1, resource2)));
}
@Test
public void validateAccess_conjunction_success() {
// Arrange
var resource1 = buildResource(ORGANIZATION_ID_1);
var resource2 = buildResource(ORGANIZATION_ID_1);
var context = new UserContext(SOR_CODE_1);
Mockito.when(userContextProvider.getUserContext()).thenReturn(context);
var organization = buildOrganization(ORGANIZATION_ID_1);
Mockito.when(fhirClient.lookupOrganizationBySorCode(context.getOrgId())).thenReturn(Optional.of(organization));
// Act
// Assert
assertDoesNotThrow(() -> subject.validateAccess(List.of(resource1, resource2)));
}
private DomainResource buildResource() {
return buildResource(null);
}
private DomainResource buildResource(String organizationId) {
var resource = new CarePlan();
if(organizationId != null) {
resource.addExtension(Systems.ORGANIZATION, new Reference(organizationId));
}
return resource;
}
private Organization buildOrganization(String organizationId) {
var organization = new Organization();
organization.setId(organizationId);
return organization;
}
} |
<gh_stars>0
/**
* Created by amirbakhtiari on 6/1/17.
*/
(function() {
'use strict';
angular.module('admin.controllers', [])
.controller('LoginController', ['$scope', 'User', '$state', function($scope, User, $state) {
$scope.login = function() {
User.login($scope.loginForm).then(function(response) {
Materialize.toast("شما با موفقیت وارد شدید.", 2000);
$state.go('dashboard');
}, function(error) {
angular.forEach(error.data, function(value, key) {
Materialize.toast(value, 2000);
})
});
};
}])
.controller('DashboardController', ['$scope', '$state', '$auth', 'User', function($scope, $state, $auth, User) {
if($auth.isAuthenticated()) {
User.profile($auth.getToken()).then(function (response) {
$scope.user = {current: response.data.first_name + ' ' + response.data.last_name};
})
}
if(!$auth.isAuthenticated())
return;
$scope.logout = function() {
$auth.logout().then(function() {
$auth.removeToken();
$state.go('login');
});
};
}])
.controller('FactorsController', ['$scope', '$stateParams', function($scope, $stateParams) {
if($stateParams.confirmed == 'all' && $stateParams.today == 'today') {
$scope.factors = {title: 'فاکتورهای تایید شده امروز'};
} else if($stateParams.confirmed == 'unconfirmed') {
$scope.factors = {title: 'فاکتورهای تایید نشده امروز'};
} else if($stateParams.confirmed == 'all' && $stateParams.today == 'all') {
$scope.factors = {title: 'تمام فاکتورها'};
} else {
$scope.factors = {title: 'تمام فاکتورها'};
}
}])
.controller('LogoutController', ['$scope', '$state', function($scope, $state) {
}])
.controller('CategoriesController', ['$scope', function($scope) {
}])
.controller('ProductsController', ['$scope', function($scope) {
$(".button-collapse").sideNav(
{
menuWidth: 245, // Default is 300
edge: 'right', // Choose the horizontal origin
closeOnClick: true, // Closes side-nav on <a> clicks, useful for Angular/Meteor
draggable: true // Choose whether you can drag to open on touch screens
}
);
$(".button-collapse-products").sideNav(
{
menuWidth: 245, // Default is 300
edge: 'left', // Choose the horizontal origin
closeOnClick: true, // Closes side-nav on <a> clicks, useful for Angular/Meteor
draggable: true // Choose whether you can drag to open on touch screens
}
);
$('.collapsible').collapsible();
}])
.controller('MessageController', ['$scope', function($scope) {
$scope.replyMessage = function(id) {
console.log($('#modal-reply').modal('open'));
}
}])
.controller('PersonsController', ['$scope', function($scope) {
angular.extend($scope, {
addPerson: function() {
},
save: function() {
}
});
}])
.controller('ProfileController', ['$scope', 'profile', '$rootScope', function($scope, profile, $rootScope) {
$scope.profile = profile.data;
}]);
})(); |
#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -x
cd profiles/base/tests
# Clean out previous build artifacts
./clean_all.sh
# Create new build artifacts (e.g., JAR, ZIP, and their base64 encodings)
./build_all.sh
# Update the HTTP (body) payloads for OW and Knative (with base64 encoded archives)
./update_payloads.sh
# execute the functions that will cause the JVM cache to be populated for the profile
# (e.g., invoke the /init and /run methods for each profile's "test" functions).
./exec_tests.sh
cd ../../..
|
import React, { ReactElement, ReactNode } from 'react';
import { renderHook } from '@testing-library/react-hooks';
import { render, screen } from '@testing-library/react';
import '@testing-library/jest-dom/extend-expect';
import { useLoading, LoaderProvider } from '../src';
describe('useLoading', () => {
test('renders element passed in', () => {
let loading = false;
const { result, rerender } = renderHook(() =>
useLoading({
loading,
indicator: <p>loader</p>,
})
);
expect(result.current.containerProps['aria-busy']).toBe(false);
expect(result.current.indicatorEl).toBeFalsy();
loading = true;
rerender();
expect(result.current.containerProps['aria-busy']).toBe(true);
render(result.current.indicatorEl as ReactElement);
expect(screen.getByText('loader')).toBeInTheDocument();
});
test('renders element from context if no element passed in', () => {
const wrapper = ({ children }: { children: ReactNode }) => (
<LoaderProvider indicator={<span>context loader</span>}>
{children}
</LoaderProvider>
);
let indicator: ReactElement | undefined;
const { result, rerender } = renderHook(
() =>
useLoading({
loading: true,
indicator,
}),
{ wrapper }
);
const { unmount } = render(result.current.indicatorEl as ReactElement);
expect(screen.getByText('context loader')).toBeInTheDocument();
unmount();
indicator = <p>custom loader</p>;
rerender();
render(result.current.indicatorEl as ReactElement);
expect(screen.getByText('custom loader')).toBeInTheDocument();
});
});
|
package edu.mdamle.beans;
import java.time.LocalDate;
import java.time.Period;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class TuitionReimbursementRequest {
private static Logger log = LogManager.getLogger(TuitionReimbursementRequest.class);
public enum GradingFormat {
NULL, GRADE, PRESENTATION
}
public enum EventType {
NULL(0), UNICOURSE(0.8), SEMINAR(0.6), CERT_PREP(0.75), CERT(1), TECHNICAL_TRAINING(0.9), OTHER(0.3);
public final double maxCoverage;
private EventType(double maxCoverage) {
this.maxCoverage = maxCoverage;
}
}
public enum Status {
NULL(0), DRAFT(0), CANCELLED(0), FAILED(0), PASSED(0),
DIRSUP_APPROVED(1), DEPTHEAD_APPROVED(2), BENCO_APPROVED(3);
public final int level;
private Status(int level) {
this.level = level;
}
}
private String username; //default is (unassigned)
private int id; //default is -1
private EventType eventType; //default is NULL
private GradingFormat gradingFormat; //default is NULL, but reset by child constructor as GRADE or PRESENTATION
private LocalDate eventStartDate; //default is 2100-01-01
private LocalDate eventEndDate; //default is 2100-01-02
private Status status; //default is DRAFT
private boolean isUrgent; //default is computed from present date & eventStartDate
private String location; //default is (unassigned)
private String description; //default is (unassigned)
private String justification; //default is (unassigned)
private double cost; //default is 0
private double costCoverage; //default is 0
//readonly &/or system
private final int URGENCY_WINDOW; //set to 7 days, x >= 0
private final double PASSING_GRADE_DEFAULT; //set to 0.75, 0 <= x <= 1
private LocalDate submissionDate; //default is present date
private boolean isReimbursementAdjustedByBenCo; //default is false
private boolean isFinalAssesmentReviewed; //default is false
//optional or separately set
private double workHoursMissed; //default is 0
private double passingGradePercentage; //default is PASSING_GRADE_DEFAULT
private boolean exceedsAvailableFunds; //default is false
private String availableFundsExcessJustification; //default is (unassigned)
//VERIFY BLOCK
public TuitionReimbursementRequest() {
super();
log.trace("constuctor "+this.getClass()+"() invoked"); //log flag
this.PASSING_GRADE_DEFAULT = 0.75;
this.URGENCY_WINDOW = 7;
this.setUsername("(unassigned)");
this.setId(-1);
this.setEventType(EventType.NULL);
this.setGradingFormat(GradingFormat.NULL);
this.setEventStartDate(LocalDate.of(2100,1,1));
this.setEventEndDate(LocalDate.of(2100,1,2));
this.setStatus(Status.DRAFT);
this.setUrgent();
this.setLocation("(unassigned)");
this.setDescription("(unassigned)");
this.setJustification("(unassigned)");
this.setCost(0);
this.setCostCoverage(0);
this.setSubmissionDate();
this.setReimbursementAdjustedByBenCo(false);
this.setFinalAssesmentReviewed(false);
this.setWorkHoursMissed(0);
this.setPassingGradePercentage(PASSING_GRADE_DEFAULT);
this.setExceedsAvailableFunds(false);
this.setAvailableFundsExcessJustification("(unassigned)");
log.trace("new "+this.getClass()+" instantiated\n"); //log flag
log.trace(this.toString()); //log flag
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public EventType getEventType() {
return eventType;
}
public void setEventType(EventType eventType) {
this.eventType = eventType;
}
public GradingFormat getGradingFormat() {
return gradingFormat;
}
public void setGradingFormat(GradingFormat gradingFormat) {
this.gradingFormat = gradingFormat;
}
public LocalDate getEventStartDate() {
return eventStartDate;
}
public void setEventStartDate(LocalDate eventStartDate) {
this.eventStartDate = eventStartDate;
}
public LocalDate getEventEndDate() {
return eventEndDate;
}
public void setEventEndDate(LocalDate eventEndDate) {
this.eventEndDate = eventEndDate;
}
public double getPASSING_GRADE_DEFAULT() {
return PASSING_GRADE_DEFAULT;
}
public int getURGENCY_WINDOW() {
return URGENCY_WINDOW;
}
public boolean isUrgent() {
return isUrgent;
}
public void setUrgent() {
LocalDate start = getEventStartDate();
Period nowTillStart = LocalDate.now().until(start);
if(nowTillStart.getDays() < 0) {
isUrgent = false;
setStatus(Status.CANCELLED);
} else {
isUrgent = nowTillStart.getDays() <= getURGENCY_WINDOW();
}
}
public Status getStatus() {
return status;
}
public void setStatus(Status status) {
this.status = status;
}
public String getLocation() {
return location;
}
public void setLocation(String location) {
this.location = location;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getJustification() {
return justification;
}
public void setJustification(String justification) {
this.justification = justification;
}
public double getCost() {
return cost;
}
public void setCost(double cost) {
this.cost = cost;
}
public double getCostCoverage() {
return costCoverage;
}
public void setCostCoverage(double costCoverage) {
this.costCoverage = costCoverage;
}
public LocalDate getSubmissionDate() {
return submissionDate;
}
public void setSubmissionDate() {
this.submissionDate = LocalDate.now();
}
public boolean isReimbursementAdjustedByBenCo() {
return isReimbursementAdjustedByBenCo;
}
public void setReimbursementAdjustedByBenCo(boolean isReimbursementAdjustedByBenCo) {
this.isReimbursementAdjustedByBenCo = isReimbursementAdjustedByBenCo;
}
public boolean isFinalAssesmentReviewed() {
return isFinalAssesmentReviewed;
}
public void setFinalAssesmentReviewed(boolean isFinalAssesmentReviewed) {
this.isFinalAssesmentReviewed = isFinalAssesmentReviewed;
}
public double getWorkHoursMissed() {
return workHoursMissed;
}
public void setWorkHoursMissed(double workHoursMissed) {
this.workHoursMissed = workHoursMissed;
}
public double getPassingGradePercentage() {
return passingGradePercentage;
}
public void setPassingGradePercentage(double passingGradePercentage) {
this.passingGradePercentage = passingGradePercentage;
}
public boolean isExceedsAvailableFunds() {
return exceedsAvailableFunds;
}
public void setExceedsAvailableFunds(boolean exceedsAvailableFunds) {
this.exceedsAvailableFunds = exceedsAvailableFunds;
}
public String getAvailableFundsExcessJustification() {
return availableFundsExcessJustification;
}
public void setAvailableFundsExcessJustification(String availableFundsExcessJustification) {
this.availableFundsExcessJustification = availableFundsExcessJustification;
}
@Override
public String toString() {
return "Employee: "+getUsername()
+"\nTRR ID: "+getId()
+"\nEvent Type: "+getEventType().toString()
+"\nGrading Format: "+getGradingFormat().toString()
+"\nCost: "+getCost()
+"\nCost Coverage: "+getCostCoverage()
+"\nStatus: "+getStatus()
+"\nUrgent?: "+isUrgent();
}
}
|
set -x
echo "start running"
for i in {1..5}
do
echo "##### run for test ${i} #####"
bash run.sh
mv *.npz out
mv out/* ../train/test-right-y
done
for i in {1..15}
do
echo "##### run for train ${i} #####"
bash run.sh
mv *.npz out
mv out/* ../train/train-right-y
done
#cd ../train
#python preprocess_image.py
echo "done"
|
<reponame>andreiox/challenges
import util
import functools
class Loteria:
def __init__(self, quantidade_dezenas, total_jogos):
if quantidade_dezenas < 6 or quantidade_dezenas > 10:
raise Exception('Quantidade de dezenas deve ser entre 6 e 10')
self.__quantidade_dezenas = quantidade_dezenas
self.__total_jogos = total_jogos
def gera_todos_jogos(self):
self.jogos = []
for i in range(0, self.total_jogos):
self.jogos.append(self.__gera_dezenas())
return self.jogos
def realiza_sorteio(self):
self.resultado = util.get_numeros_aleatorios_ordenados((1, 60), 6)
return self.resultado
def visualiza_resultado(self):
table = []
table.append(f'<html><head /><body>')
table.append(f'<span>Resultado: {self.resultado}</span><br /><br />')
table.append('<table border=1>')
for jogo in self.jogos:
acertos = 0
for i in jogo:
acertos += 1 if i in self.resultado else 0
table.append(f'<tr><td>Jogo {jogo}</td><td>{acertos} acertos</td></tr>')
table.append('</table></body></html>')
return ''.join(table)
def __gera_dezenas(self):
return util.get_numeros_aleatorios_ordenados((1, 60), self.quantidade_dezenas)
@property
def total_jogos(self):
return self.__total_jogos
@total_jogos.setter
def total_jogos(self, value):
self.__total_jogos = value
@property
def quantidade_dezenas(self):
return self.__quantidade_dezenas
@quantidade_dezenas.setter
def quantidade_dezenas(self, value):
self.__quantidade_dezenas = value
@property
def jogos(self):
return self.__jogos
@jogos.setter
def jogos(self, value):
self.__jogos = value
@property
def resultado(self):
return self.__resultado
@resultado.setter
def resultado(self, value):
self.__resultado = value
|
<!DOCTYPE html>
<html>
<head>
<title>Input Form</title>
</head>
<body>
<form>
<input type="text" id="textInput" name="inputValue" />
<input type="submit" value="Submit" onClick="alertInputValue()" />
</form>
<script>
function alertInputValue() {
let inputValue = document.getElementById("textInput").value;
alert(inputValue);
}
</script>
</body>
</html> |
#!/usr/bin/env bash
cd "$(dirname "$0")"
PBJS=./node_modules/protobufjs/bin/pbjs
PBTS=./node_modules/protobufjs/bin/pbts
OUTDIR=./src/generated
rm -r $OUTDIR
mkdir $OUTDIR
$PBJS -t static-module -w commonjs -o $OUTDIR/grpc_gcp.js protos/grpc_gcp.proto
echo "Generated src/generated/grpc_gcp.js"
$PBTS -o $OUTDIR/grpc_gcp.d.ts $OUTDIR/grpc_gcp.js
echo "Generated src/generated/grpc_gcp.d.ts"
|
#!/bin/sh
###
#
# This is my script for ArchLinux
# I'll install Arch specific stuff here.
#
##
# https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html
if [ ! -f /etc/zsh/zshenv ]; then
mkdir -p /etc/zsh
cp etc/zsh/zshenv /etc/zsh/zshenv
source /etc/zsh/zshenv
echo "file /etc/zsh/zshenv created.."
else
echo "file /etc/zsh/zshenv already exists..."
echo "================================="
cat /etc/zsh/zshenv
echo "================================="
if grep -q "#XDG Configs" /etc/zsh/zshenv; then
echo "#XDG Configs found in file, skipping ..."
else
cat <etc/zsh/zshenv >>/etc/zsh/zshenv
echo "#XDG Configs added to file."
fi
fi
[ -z "$XDG_CONFIG_HOME" ] && export XDG_CONFIG_HOME="$HOME/.config"
[ -z "$XDG_DATA_HOME" ] && export XDG_DATA_HOME="$HOME/.local/share"
[ -z "$XDG_CACHE_HOME" ] && export XDG_CACHE_HOME="$HOME/.cache"
[ -z "$ZDOTDIR" ] && export ZDOTDIR="$XDG_CONFIG_HOME/zsh"
printenv | grep XDG
if type git >/dev/null 2>&1; then
echo "git already installed"
else
echo "installing git..."
pacman -S git
touch "$XDG_CONFIG_HOME/git/config"
fi
#Install AUR package manager
# apacman: https://github.com/oshazard/apacman
if type apacman >/dev/null 2>&1; then
echo "apacman already installed"
else
echo "installing apacman..."
mkdir -p $HOME/Git
cd $HOME/Git
pacman -S --needed --asdeps jshon
curl -O "https://raw.githubusercontent.com/oshazard/apacman/master/apacman"
cd apacman
bash ./apacman -S apacman
apacman -S apacman-deps
fi
if type packer-color >/dev/null 2>&1; then
echo "packer-color already installed"
else
echo "installing packer-color..."
apacman -S packer-color
fi
CHECK=$(ls -R /usr/share | grep zsh)
for plugin in 'zsh-syntax-highlighting' 'zsh-autosuggestions' ; do
if test "${CHECK#*$plugin}" != "$CHECK"; then
echo "$plugin already installed"
else
echo "installing $plugin..."
pacman -S "$plugin"
fi
done
# Copy VIM configurations
cp -r "$SCRIPTDIR/.config/vim" "$XDG_CONFIG_HOME"
# Move .emacs
if [ -d ~/.emacs ]; then
mv ~/.emacs "$XDG_CONFIG_HOME/emacs"
fi
# Copy i3 configurations
# TODO: I forgot to script it when I installed...
# Copy dunst configurations
# if type dunst >/dev/null 2>&1; then
# echo "dunst already installed"
# else
# echo "installing dunst..."
# apt-get install -y dunst
# cp -r "$SCRIPTDIR/.config/dunst" "$XDG_CONFIG_HOME"
# fi
# Setting npmrc
if [ ! -f /usr/etc/npmrc ]; then
echo "adding /usr/etc/npmrc file:"
cp -r "$SCRIPTDIR/usr/etc/npmrc" "/usr/etc/npmrc"
echo "done adding npmrc config file"
else
echo "file /usr/etc/npmrc already exists, skipping..."
cat /usr/etc/npmrc
fi
# It is my intent to override all the files
# in case the .zshrc file is not present in the folder
# makes for an easy update / cleanup
SCRIPTDIR="$(cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
if [ ! -f $ZDOTDIR/.zshrc ]; then
echo "adding .config/zsh files:"
ls $SCRIPTDIR/.config/zsh/*
cp -r "$SCRIPTDIR/.config/zsh" "$XDG_CONFIG_HOME"
echo "done adding zsh config files"
else
echo "file $ZDOTDIR/.zshrc already exists, skipping zsh config files..."
fi
if type nvim >/dev/null 2>&1; then
echo "neovim already installed"
else
echo "installing neovim and symlinks..."
pacman -S neovim neovim-symlinks
fi
# copy files only if init.vim is not present in the nvim folder
echo "$XDG_CONFIG_HOME/nvim/init.vim"
if [ ! -f $XDG_CONFIG_HOME/nvim/init.vim ]; then
echo "adding .config/nvim files:"
ls $SCRIPTDIR/.config/nvim/*
cp -r "$SCRIPTDIR/.config/nvim" "$XDG_CONFIG_HOME"
echo "done adding nvim config files"
else
echo "file $nvim_home/ini.vim already exists, skipping nvim config files..."
fi
|
<gh_stars>1-10
import React, { Component } from 'react';
import { TimelineLite } from 'gsap';
class Header extends Component {
componentDidMount() {
const animationHeaders = new TimelineLite();
const header = this.header;
const headerTitle = this.headerTitle;
const headerAuthor = this.headerAuthor;
animationHeaders
.to(headerAuthor, 0, {
display: 'none',
opacity: 0,
})
.fromTo(headerTitle, 1.5, {
css: {
transform: 'translate(-50px, 100px)',
opacity: 0,
}
},
{
css: {
display: 'block',
transform: 'translate(0px, 0px)',
opacity: 1,
},
/*eslint-disable*/
ease: Elastic.easeOut,
/*eslint-enable*/
})
.to(headerTitle, 1.5, {
css: {
transform: 'translate(150px, 300px)',
opacity: 0,
display: 'none',
},
/*eslint-disable*/
ease: Elastic.easeInOut,
/*eslint-enable*/
})
.to(headerAuthor, 0, {
css: {
display: 'block',
},
})
.to(headerAuthor, 1.5, {
css: {
opacity: 1,
},
/*eslint-disable*/
ease: Elastic.easeInOut,
/*eslint-enable*/
}, 3)
.to(header, 1, {
css: {
opacity: 0,
display: 'none',
},
});
}
render() {
return (
<div className="header" ref={(header) => this.header = header}>
<h1 className="header-title" ref={(headerTitle) => this.headerTitle = headerTitle}>The Animated Kodawa</h1>
<h2 className="header-author" ref={(headerAuthor) => this.headerAuthor = headerAuthor}>by <NAME></h2>
</div>
);
}
}
export default Header;
|
<reponame>samueltan3972/spring-petclinic-rest<filename>report/reports/data/problem_summary_df25451e-e5b6-4bf9-baf8-d2a72a9dea7b.js<gh_stars>0
MIGRATION_ISSUES_DETAILS["df25451e-e5b6-4bf9-baf8-d2a72a9dea7b"] = [
{description: "<p>The application embeds a Swagger library.<\/p>", ruleID: "integration-00005", issueName: "Embedded framework - Swagger",
problemSummaryID: "df25451e-e5b6-4bf9-baf8-d2a72a9dea7b", files: [
{l:"spring-petclinic-rest-2.4.2.jar/BOOT-INF/lib/springfox-swagger2-3.0.0.jar", oc:"1"},
{l:"spring-petclinic-rest-2.4.2.jar/BOOT-INF/lib/springfox-swagger-common-3.0.0.jar", oc:"1"},
{l:"spring-petclinic-rest-2.4.2.jar/BOOT-INF/lib/swagger-models-1.5.20.jar", oc:"1"},
{l:"spring-petclinic-rest-2.4.2.jar/BOOT-INF/lib/springfox-swagger-ui-3.0.0.jar", oc:"1"},
{l:"spring-petclinic-rest-2.4.2.jar/BOOT-INF/lib/swagger-annotations-1.5.20.jar", oc:"1"},
{l:"spring-petclinic-rest-2.4.2.jar/BOOT-INF/lib/swagger-annotations-2.1.2.jar", oc:"1"},
{l:"spring-petclinic-rest-2.4.2.jar/BOOT-INF/lib/swagger-models-2.1.2.jar", oc:"1"},
], resourceLinks: [
]},
];
onProblemSummaryLoaded("df25451e-e5b6-4bf9-baf8-d2a72a9dea7b"); |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.