text
stringlengths 1
1.05M
|
|---|
import { Component, OnInit } from '@angular/core';
@Component({
selector: 'app-item-selector',
templateUrl: './item-selector.component.html',
styleUrls: ['./item-selector.component.css']
})
export class ItemSelectorComponent implements OnInit {
items = ['Apple', 'Banana', 'Orange', 'Pineapple', 'Lemon'];
selectedItem: string;
constructor() { }
ngOnInit() {
}
onSelected(item: string) {
this.selectedItem = item;
}
}
<!-- item-selector.component.html -->
<div>
<ul>
<li *ngFor="let item of items" (click)="onSelected(item)">{{ item }}</li>
</ul>
<p *ngIf="selectedItem">You have selected: {{ selectedItem }}</p>
</div>
|
<gh_stars>10-100
package com.telenav.osv.event.hardware;
import com.telenav.osv.event.OSVEvent;
/**
* Created by Kalman on 07/11/2016.
*/
abstract class SensorEvent extends OSVEvent {
}
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+512+512-SS-N-VB/model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+512+512-SS-N-VB/512+512+512-N-VB-FILL-1 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_nouns_and_verbs_fill_first_third_full --eval_function last_element_eval
|
'use strict';
var Bank = require('../models/bank.js');
var logger = require('../../logger');
module.exports = {
getAmount: function() {
return new Promise(function(resolve, reject) {
Bank.findOne({ name: 'aces' }, function(err, doc) {
if (err) throw err;
if (!doc) {
var newBank = new Bank();
newBank.name = 'aces';
newBank.amount = 0;
newBank.amountCollected = 0;
logger.info('New bank account created');
newBank.save(function(err) {
if (err) {
throw err;
}
else {
resolve(0);
}
});
}
else {
resolve(doc.amount);
}
});
});
},
updateAmount: function(incr) {
Bank.findOne({ 'name': 'aces' }, function(err, doc) {
if (err) throw err;
var newAmount = doc.amount + Number(incr);
Bank.findOneAndUpdate({ 'name': 'aces' }, { $set: { amount: newAmount } }, function(err, doc) {
if (err) console.error(err);
logger.info('Bank Balance changed by: ' + Number(incr));
logger.info('New balance: ' + Number(newAmount));
});
});
},
};
|
using System;
[AttributeUsage(AttributeTargets.Property, AllowMultiple = false)]
public class RangeValidationAttribute : Attribute
{
private readonly int _minValue;
private readonly int _maxValue;
public RangeValidationAttribute(int minValue, int maxValue)
{
_minValue = minValue;
_maxValue = maxValue;
}
public bool IsValid(object value)
{
if (value is int intValue)
{
return intValue >= _minValue && intValue <= _maxValue;
}
return false;
}
}
public class Product
{
[RangeValidation(10, 100)]
public int Price { get; set; }
}
public class Program
{
public static void Main()
{
Product product = new Product();
// Valid price
product.Price = 50;
bool isValidPrice = ValidatePrice(product.Price, typeof(Product).GetProperty("Price"));
Console.WriteLine("Is price valid: " + isValidPrice); // Output: Is price valid: True
// Invalid price
product.Price = 5;
isValidPrice = ValidatePrice(product.Price, typeof(Product).GetProperty("Price"));
Console.WriteLine("Is price valid: " + isValidPrice); // Output: Is price valid: False
}
public static bool ValidatePrice(object value, System.Reflection.PropertyInfo propertyInfo)
{
var rangeValidationAttribute = (RangeValidationAttribute)Attribute.GetCustomAttribute(propertyInfo, typeof(RangeValidationAttribute));
if (rangeValidationAttribute != null)
{
return rangeValidationAttribute.IsValid(value);
}
return true; // If attribute not found, consider it valid
}
}
|
<gh_stars>0
import { types } from 'rjv'
export type RjvFieldProps = {
path: string;
schema: types.ISchema;
dependencies?: any[];
}
|
/*---------------------------------------------------------------------------------------------
* Copyright (c) <NAME>, Incorporated. All rights reserved.
* See LICENSE.md in the project root for license terms and full copyright notice.
*--------------------------------------------------------------------------------------------*/
import * as React from "react";
import { Leading, ProgressRadial } from "@itwin/itwinui-react";
import { VerticalStack } from "./CenteredStack";
export interface LoadingIndicatorProps {
/** Element id attribute. */
id?: string | undefined;
/** Content displayed under the loading animation. */
children: React.ReactNode;
}
/** Displays a spinning loading animation with a description. */
export function LoadingIndicator(props: LoadingIndicatorProps): React.ReactElement {
return (
<VerticalStack id={props.id}>
<ProgressRadial size="large" indeterminate={true} />
<Leading>{props.children}</Leading>
</VerticalStack>
);
}
|
import { Injectable } from '@angular/core';
import { HttpClient, HttpHeaders } from '@angular/common/http';
import { Observable } from 'rxjs/Observable';
import { environment } from 'environments/environment';
import { UserInfoModel } from 'app/models/user-info';
import { ReplaySubject } from 'rxjs/ReplaySubject';
@Injectable()
export class TaskanaEngineService {
private dataObs$ = new ReplaySubject<UserInfoModel>(1);
constructor(
private httpClient: HttpClient
) { }
// GET
getUserInformation(forceRefresh = false): Observable<UserInfoModel> {
if (!this.dataObs$.observers.length || forceRefresh) {
this.httpClient.get<UserInfoModel>(`${environment.taskanaRestUrl}/v1/current-user-info`).subscribe(
data => this.dataObs$.next(data),
error => {
this.dataObs$.error(error);
this.dataObs$ = new ReplaySubject(1);
}
);
}
return this.dataObs$;
}
}
|
#!/usr/bin/env bash
set -e
echo "" > coverage.txt
golangci-lint run \
--exclude horizon.Account \
--exclude horizon.Problem \
--exclude horizon.Signer \
--exclude horizon.Transaction \
--exclude proto.MessageName \
--exclude github.com/golang/protobuf/proto \
--exclude github.com/golang/protobuf/jsonpb
go test -test.v=true -race -coverprofile=profile.out $(go list -e ./...)
if [ -f profile.out ]; then
cat profile.out >> coverage.txt
rm profile.out
fi
|
#!/bin/bash
if [ "$EUID" -ne 0 ]
then echo "Please run as root"
exit
fi
sudo docker cp ezgen_api:/opt/app/ezgen.db ./api
sudo docker cp ezgen_api:/opt/app/tmp/ ./tmp
sudo docker cp ezgen_telegram:/opt/app/telegram.db ./telegram
touch api/ezgen.db
touch telegram/telegram.db
docker-compose down
docker-compose up --build --force-recreate -d
|
// JavaScript code
const data = [100, 200, 300, 400];
// Initialize the chart
let chart = new Chart(data);
// Get chart data
let labels = chart.getLabels();
let datasets = chart.getDatasets();
// Render the chart
let ctx = document.getElementById('my-chart').getContext('2d');
let chart = new Chart(ctx, {
type: 'bar',
data: {
labels: labels,
datasets: datasets
},
options: {
responsive: true
}
});
|
export SECRET_KEY=c317606b09268da1062862b6e81f9616
export MAIL_USERNAME=realgirduj@gmail.com
export MAIL_PASSWORD=keymoney20
python3.6 manage.py server
|
<gh_stars>0
/////////////////////////////////////////////////////////////////////////////
// Name: samples/webrequest.cpp
// Purpose: wxWebRequest Sample
// Author: <NAME>
// Created: 2018-10-15
// Copyright: (c) 2018 wxWidgets development team
// Licence: wxWindows licence
/////////////////////////////////////////////////////////////////////////////
// For compilers that support precompilation, includes "wx/wx.h".
#include "wx/wxprec.h"
#ifndef WX_PRECOMP
#include "wx/wx.h"
#endif
#include "wx/notebook.h"
#include "wx/artprov.h"
#include "wx/creddlg.h"
#include "wx/webrequest.h"
#include "wx/filedlg.h"
#include "wx/image.h"
#ifndef wxHAS_IMAGES_IN_RESOURCES
#include "../sample.xpm"
#endif
#if !wxUSE_WEBREQUEST
#error "wxUSE_WEBREQUEST must be 1 for this sample."
#endif
class WebRequestFrame : public wxFrame
{
public:
enum Pages
{
Page_Image,
Page_Text,
Page_Download,
Page_Advanced
};
WebRequestFrame(const wxString& title):
wxFrame(NULL, wxID_ANY, title)
{
// set the frame icon
SetIcon(wxICON(sample));
Bind(wxEVT_CLOSE_WINDOW, &WebRequestFrame::OnClose, this);
// Prepare UI controls
wxSizer* mainSizer = new wxBoxSizer(wxVERTICAL);
mainSizer->Add(new wxStaticText(this, wxID_ANY, "Request URL:"),
wxSizerFlags().Border());
m_urlTextCtrl = new wxTextCtrl(this, wxID_ANY,
"https://www.wxwidgets.org/downloads/logos/blocks.png",
wxDefaultPosition, wxDefaultSize,
wxTE_PROCESS_ENTER);
mainSizer->Add(m_urlTextCtrl,
wxSizerFlags().Expand().Border(wxLEFT | wxRIGHT));
m_urlTextCtrl->Bind(wxEVT_TEXT_ENTER, &WebRequestFrame::OnStartButton, this);
m_notebook = new wxNotebook(this, wxID_ANY);
m_notebook->Bind(wxEVT_NOTEBOOK_PAGE_CHANGED, &WebRequestFrame::OnNotebookPageChanged, this);
// Image page
wxPanel* imagePanel = new wxPanel(m_notebook);
wxSizer* imageSizer = new wxBoxSizer(wxVERTICAL);
m_imageStaticBitmap = new wxStaticBitmap(imagePanel,
wxID_ANY, wxArtProvider::GetBitmap(wxART_MISSING_IMAGE));
imageSizer->Add(m_imageStaticBitmap, wxSizerFlags(1).Expand());
imagePanel->SetSizer(imageSizer);
m_notebook->AddPage(imagePanel, "Image", true);
// Text page
wxPanel* textPanel = new wxPanel(m_notebook);
wxSizer* textSizer = new wxBoxSizer(wxVERTICAL);
m_postCheckBox = new wxCheckBox(textPanel, wxID_ANY, "Post request body");
textSizer->Add(m_postCheckBox, wxSizerFlags().Border());
m_postCheckBox->Bind(wxEVT_CHECKBOX, &WebRequestFrame::OnPostCheckBox, this);
m_postRequestTextCtrl = new wxTextCtrl(textPanel, wxID_ANY,
"app=WebRequestSample&version=1",
wxDefaultPosition, wxSize(-1, FromDIP(60)), wxTE_MULTILINE);
textSizer->Add(m_postRequestTextCtrl,
wxSizerFlags().Expand().Border(wxLEFT | wxRIGHT));
textSizer->Add(new wxStaticText(textPanel, wxID_ANY, "Request body content type:"),
wxSizerFlags().Border());
m_postContentTypeTextCtrl = new wxTextCtrl(textPanel, wxID_ANY,
"application/x-www-form-urlencoded");
textSizer->Add(m_postContentTypeTextCtrl,
wxSizerFlags().Expand().Border(wxLEFT | wxRIGHT));
textSizer->Add(new wxStaticText(textPanel, wxID_ANY, "Response body:"),
wxSizerFlags().Border());
m_textResponseTextCtrl = new wxTextCtrl(textPanel, wxID_ANY, "",
wxDefaultPosition, wxDefaultSize, wxTE_MULTILINE | wxTE_READONLY);
m_textResponseTextCtrl->SetBackgroundColour(wxSystemSettings::GetColour(wxSYS_COLOUR_BTNFACE));
textSizer->Add(m_textResponseTextCtrl,
wxSizerFlags(1).Expand().Border(wxLEFT | wxRIGHT | wxBOTTOM));
textPanel->SetSizer(textSizer);
m_notebook->AddPage(textPanel, "Text");
// Download page
wxPanel* downloadPanel = new wxPanel(m_notebook);
wxSizer* downloadSizer = new wxBoxSizer(wxVERTICAL);
wxStaticText* downloadHeader = new wxStaticText(downloadPanel, wxID_ANY,
"The URL will be downloaded to a file.\n"
"Progress will be shown and you will be asked, where\n"
"to save the file when the download completed.");
downloadSizer->Add(downloadHeader, wxSizerFlags().Expand().Border());
downloadSizer->AddStretchSpacer();
m_downloadGauge = new wxGauge(downloadPanel, wxID_ANY, 100);
downloadSizer->Add(m_downloadGauge, wxSizerFlags().Expand().Border());
m_downloadStaticText = new wxStaticText(downloadPanel, wxID_ANY, "");
downloadSizer->Add(m_downloadStaticText, wxSizerFlags().Expand().Border());
downloadSizer->AddStretchSpacer();
downloadPanel->SetSizer(downloadSizer);
m_notebook->AddPage(downloadPanel, "Download");
// Advanced page
wxPanel* advancedPanel = new wxPanel(m_notebook);
wxSizer* advSizer = new wxBoxSizer(wxVERTICAL);
wxStaticText* advHeader = new wxStaticText(advancedPanel, wxID_ANY,
"As an example of processing data while\n"
"it's being received from the server, every\n"
"zero byte in the response will be counted below.");
advSizer->Add(advHeader, wxSizerFlags().Expand().Border());
advSizer->AddStretchSpacer();
m_advCountStaticText = new wxStaticText(advancedPanel, wxID_ANY, "0",
wxDefaultPosition, wxDefaultSize, wxALIGN_CENTRE_HORIZONTAL | wxST_NO_AUTORESIZE);
m_advCountStaticText->SetFont(m_advCountStaticText->GetFont()
.MakeBold().MakeLarger().MakeLarger());
advSizer->Add(m_advCountStaticText, wxSizerFlags().Expand().Border());
advSizer->AddStretchSpacer();
advancedPanel->SetSizer(advSizer);
m_notebook->AddPage(advancedPanel, "Advanced");
mainSizer->Add(m_notebook, wxSizerFlags(1).Expand().Border());
wxStdDialogButtonSizer* btnSizer = new wxStdDialogButtonSizer();
m_cancelButton = new wxButton(this, wxID_CANCEL, "Cancel");
m_cancelButton->Bind(wxEVT_BUTTON, &WebRequestFrame::OnCancelButton, this);
m_cancelButton->Disable();
btnSizer->AddButton(m_cancelButton);
m_startButton = new wxButton(this, wxID_OK, "&Start Request");
m_startButton->Bind(wxEVT_BUTTON, &WebRequestFrame::OnStartButton, this);
btnSizer->AddButton(m_startButton);
btnSizer->Realize();
mainSizer->Add(btnSizer, wxSizerFlags().Expand().Border());
wxCommandEvent evt;
OnPostCheckBox(evt);
SetBackgroundColour(wxSystemSettings::GetColour(wxSYS_COLOUR_BTNFACE));
SetSizer(mainSizer);
SetSize(FromDIP(wxSize(540, 500)));
CreateStatusBar();
wxLogStatus(this, "%s", wxWebSession::GetDefault().GetLibraryVersionInfo().ToString());
m_downloadProgressTimer.Bind(wxEVT_TIMER,
&WebRequestFrame::OnProgressTimer, this);
}
virtual ~WebRequestFrame()
{
// We have to block until the web request completes, but we need to
// process events while doing it.
Hide();
while ( m_currentRequest.IsOk() )
{
wxYield();
}
}
void OnStartButton(wxCommandEvent& WXUNUSED(evt))
{
wxLogStatus(this, "Started request...");
// Create request for the specified URL from the default session
m_currentRequest = wxWebSession::GetDefault().CreateRequest(this,
m_urlTextCtrl->GetValue());
// Bind event for state change
Bind(wxEVT_WEBREQUEST_STATE, &WebRequestFrame::OnWebRequestState, this);
// Prepare request based on selected action
switch (m_notebook->GetSelection())
{
case Page_Image:
// Reset static bitmap image
m_imageStaticBitmap->SetBitmap(wxArtProvider::GetBitmap(wxART_MISSING_IMAGE));
break;
case Page_Text:
// Reset response text control
m_textResponseTextCtrl->Clear();
// Set postdata if checked
if ( m_postCheckBox->IsChecked() )
{
m_currentRequest.SetData(m_postRequestTextCtrl->GetValue(),
m_postContentTypeTextCtrl->GetValue());
}
break;
case Page_Download:
m_currentRequest.SetStorage(wxWebRequest::Storage_File);
m_downloadGauge->SetValue(0);
m_downloadGauge->Pulse();
m_downloadStaticText->SetLabel("");
m_downloadProgressTimer.Start(500);
SetStatusText("");
break;
case Page_Advanced:
m_currentRequest.SetStorage(wxWebRequest::Storage_None);
Bind(wxEVT_WEBREQUEST_DATA, &WebRequestFrame::OnRequestData, this);
wxLogStatus(this, "Counting...");
m_advCount = 0;
m_advCountStaticText->SetLabel("0");
break;
}
m_startButton->Disable();
// Start the request (events will be sent on success or failure)
m_currentRequest.Start();
}
void OnCancelButton(wxCommandEvent& WXUNUSED(evt))
{
if ( m_currentRequest.IsOk() )
m_currentRequest.Cancel();
}
void OnWebRequestState(wxWebRequestEvent& evt)
{
m_startButton->Enable(evt.GetState() != wxWebRequest::State_Active);
m_cancelButton->Enable(evt.GetState() == wxWebRequest::State_Active);
bool stillActive = false;
switch (evt.GetState())
{
case wxWebRequest::State_Completed:
switch (m_notebook->GetSelection())
{
case Page_Image:
{
wxImage img(*evt.GetResponse().GetStream());
m_imageStaticBitmap->SetBitmap(img);
m_notebook->GetPage(Page_Image)->Layout();
wxLogStatus(this, "Loaded %lld bytes image data", evt.GetResponse().GetContentLength());
break;
}
case Page_Text:
m_textResponseTextCtrl->SetValue(evt.GetResponse().AsString());
wxLogStatus(this, "Loaded %lld bytes text data (Status: %d %s)",
evt.GetResponse().GetContentLength(),
evt.GetResponse().GetStatus(),
evt.GetResponse().GetStatusText());
break;
case Page_Download:
{
m_downloadGauge->SetValue(100);
m_downloadStaticText->SetLabel("");
wxLogStatus(this, "Download completed");
// Ask the user where to save the file
wxFileDialog fileDlg(this, "Save download", "",
evt.GetResponse().GetSuggestedFileName(), "*.*",
wxFD_SAVE | wxFD_OVERWRITE_PROMPT);
if ( fileDlg.ShowModal() == wxID_OK )
{
if ( !wxRenameFile(evt.GetDataFile(), fileDlg.GetPath()) )
wxLogError("Could not move file");
}
break;
}
case Page_Advanced:
UpdateAdvCount();
SetStatusText("");
break;
}
break;
case wxWebRequest::State_Failed:
wxLogError("Web Request failed: %s", evt.GetErrorDescription());
SetStatusText("");
break;
case wxWebRequest::State_Cancelled:
m_downloadGauge->SetValue(0);
m_downloadStaticText->SetLabel("");
wxLogStatus(this, "Cancelled");
break;
case wxWebRequest::State_Unauthorized:
{
wxWebAuthChallenge
auth = m_currentRequest.GetAuthChallenge();
if ( !auth.IsOk() )
{
wxLogStatus("Unexpectedly missing auth challenge");
break;
}
wxCredentialEntryDialog dialog
(
this,
wxString::Format
(
"Please enter credentials for accessing\n"
"%s",
evt.GetResponse().GetURL()
),
"wxWidgets web request sample",
m_credentials
);
if ( dialog.ShowModal() == wxID_OK )
{
m_credentials = dialog.GetCredentials();
auth.SetCredentials(m_credentials);
wxLogStatus("Trying to authenticate...");
stillActive = true;
}
}
break;
case wxWebRequest::State_Active:
stillActive = true;
break;
case wxWebRequest::State_Idle:
// Nothing special to do for this state.
break;
}
if ( !stillActive )
{
m_currentRequest = wxWebRequest();
m_downloadProgressTimer.Stop();
}
}
void OnRequestData(wxWebRequestEvent& evt)
{
// Count zero bytes in data buffer
const char* p = (const char*) evt.GetDataBuffer();
for ( size_t i = 0; i < evt.GetDataSize(); i++ )
{
if ( *p == 0 )
m_advCount++;
p++;
}
UpdateAdvCount();
// Make sure the new text is immediately visible.
m_advCountStaticText->Update();
}
void UpdateAdvCount()
{
m_advCountStaticText->SetLabel(wxString::Format("%lld", m_advCount));
}
void OnProgressTimer(wxTimerEvent& WXUNUSED(evt))
{
if ( !m_currentRequest.IsOk() || m_currentRequest.GetBytesExpectedToReceive() <= 0 )
return;
m_downloadGauge->SetValue((m_currentRequest.GetBytesReceived() * 100) /
m_currentRequest.GetBytesExpectedToReceive());
m_downloadStaticText->SetLabelText(wxString::Format("%lld/%lld",
m_currentRequest.GetBytesReceived(), m_currentRequest.GetBytesExpectedToReceive()));
}
void OnPostCheckBox(wxCommandEvent& WXUNUSED(evt))
{
m_postContentTypeTextCtrl->Enable(m_postCheckBox->IsChecked());
m_postRequestTextCtrl->Enable(m_postCheckBox->IsChecked());
wxColour textBg = wxSystemSettings::GetColour(
(m_postCheckBox->IsChecked()) ? wxSYS_COLOUR_WINDOW : wxSYS_COLOUR_BTNFACE);
m_postContentTypeTextCtrl->SetBackgroundColour(textBg);
m_postRequestTextCtrl->SetBackgroundColour(textBg);
}
void OnNotebookPageChanged(wxBookCtrlEvent& event)
{
wxString defaultURL;
switch (event.GetSelection())
{
case Page_Image:
defaultURL = "https://www.wxwidgets.org/downloads/logos/blocks.png";
break;
case Page_Text:
defaultURL = "https://httpbin.org/post";
break;
case Page_Download:
defaultURL = "https://github.com/wxWidgets/wxWidgets/releases/download/v3.1.1/wxWidgets-3.1.1.7z";
break;
case Page_Advanced:
defaultURL = "https://httpbin.org/bytes/100000";
break;
}
m_urlTextCtrl->SetValue(defaultURL);
}
void OnClose(wxCloseEvent& event)
{
if ( m_currentRequest.IsOk() )
{
if ( event.CanVeto() )
{
wxMessageDialog dialog
(
this,
"A web request is in progress, "
"closing the window will cancel it.",
"Please confirm",
wxYES_NO
);
dialog.SetYesNoLabels("Cancel and close", "Don't close");
if ( dialog.ShowModal() != wxID_YES )
{
event.Veto();
return;
}
}
m_currentRequest.Cancel();
}
event.Skip();
}
private:
wxNotebook* m_notebook;
wxTextCtrl* m_urlTextCtrl;
wxButton* m_startButton;
wxButton* m_cancelButton;
wxStaticBitmap* m_imageStaticBitmap;
wxWebRequest m_currentRequest;
wxCheckBox* m_postCheckBox;
wxTextCtrl* m_postContentTypeTextCtrl;
wxTextCtrl* m_postRequestTextCtrl;
wxTextCtrl* m_textResponseTextCtrl;
wxGauge* m_downloadGauge;
wxStaticText* m_downloadStaticText;
wxTimer m_downloadProgressTimer;
wxStaticText* m_advCountStaticText;
wxLongLong m_advCount;
// Normally it would be a bad idea to permanently store credentials like
// this, we should use wxSecretStore to load them as needed, but let's keep
// things simple in this example.
wxWebCredentials m_credentials;
};
class WebRequestApp : public wxApp
{
public:
virtual bool OnInit() wxOVERRIDE
{
if ( !wxApp::OnInit() )
return false;
wxInitAllImageHandlers();
// create the main application window
WebRequestFrame *frame = new WebRequestFrame("wxWebRequest Sample App");
frame->Show(true);
return true;
}
};
wxIMPLEMENT_APP(WebRequestApp);
|
<filename>src/popup/components/wallet-cards/__tests__/wallet-cards.spec.tsx
import React from 'react';
import { shallow } from 'enzyme';
import { Link } from 'react-router-dom';
import WalletCards from '../wallet-cards';
import { CardConfigData, GiftCardsData } from '../../../../testData';
const GetBalanceReturnValue = 5;
jest.mock('../../../../services/gift-card', () => ({
getLatestBalance: (): number => GetBalanceReturnValue
}));
describe('Wallet Cards', () => {
it('should create the component for non empty list', () => {
const wrapper = shallow(<WalletCards activeCards={GiftCardsData} supportedCards={[CardConfigData]} />);
expect(wrapper.exists()).toBeTruthy();
});
it('should return a card/invoiceId link', () => {
const wrapper = shallow(<WalletCards activeCards={GiftCardsData} supportedCards={[CardConfigData]} />);
const expected = {
pathname: `/card/${GiftCardsData[0].invoiceId}`,
state: {
cardConfig: CardConfigData,
card: GiftCardsData[0]
}
};
const linkProps = wrapper.find(Link).props().to;
expect(linkProps).toEqual(expected);
});
it('should return card/brand link', () => {
const giftCardsList = JSON.parse(JSON.stringify(GiftCardsData));
giftCardsList.push(JSON.parse(JSON.stringify(GiftCardsData))[0]);
const expected = {
pathname: `/cards/${giftCardsList[0].name}`,
state: {
cardConfig: CardConfigData,
cards: giftCardsList
}
};
const wrapper = shallow(<WalletCards activeCards={giftCardsList} supportedCards={[CardConfigData]} />);
const linkProps = wrapper.find(Link).props().to;
expect(linkProps).toEqual(expected);
});
it('should return multiple links', () => {
const giftCardsList = JSON.parse(JSON.stringify(GiftCardsData));
giftCardsList.push(JSON.parse(JSON.stringify(GiftCardsData))[0]);
giftCardsList[1].name = 'Nike.com';
giftCardsList.push(JSON.parse(JSON.stringify(GiftCardsData))[0]);
const cardConfigList = JSON.parse(JSON.stringify([CardConfigData]));
cardConfigList.push(JSON.parse(JSON.stringify([CardConfigData]))[0]);
cardConfigList[1].name = 'Nike.com';
const expected = [
{
pathname: `/cards/${giftCardsList[0].name}`,
state: {
cardConfig: cardConfigList[0],
cards: [giftCardsList[0], giftCardsList[2]]
}
},
{
pathname: `/card/${giftCardsList[1].invoiceId}`,
state: {
cardConfig: cardConfigList[1],
card: giftCardsList[1]
}
}
];
const wrapper = shallow(<WalletCards activeCards={giftCardsList} supportedCards={cardConfigList} />);
expect(
wrapper
.find(Link)
.first()
.props().to
).toEqual(expected[0]);
expect(
wrapper
.find(Link)
.last()
.props().to
).toEqual(expected[1]);
});
});
|
<reponame>maohuang81/newrelic-cli
/*
* Copyright 2017-2018 IBM Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package newrelic
import (
"context"
"fmt"
)
type syntheticsConditions service
type AlertsSyntheticsConditionList struct {
AlertsSyntheticsConditions []*AlertsSyntheticsCondition `json:"synthetics_conditions,omitempty"`
}
type AlertsSyntheticsCondition struct {
ID *int64 `json:"id,omitempty"`
Name *string `json:"name,omitempty"`
MonitorID *string `json:"monitor_id,omitempty"`
RunbookURL *string `json:"runbook_url,omitempty"`
Enabled *bool `json:"enabled,omitempty"`
}
type AlertsSyntheticsConditionEntity struct {
AlertsSyntheticsCondition *AlertsSyntheticsCondition `json:"synthetics_condition,omitempty"`
}
func (s *syntheticsConditions) listAll(ctx context.Context, list *AlertsConditionList, opt *AlertsConditionsOptions) (*Response, error) {
u, err := addOptions("alerts_synthetics_conditions.json", opt)
if err != nil {
return nil, err
}
req, err := s.client.NewRequest("GET", u, nil)
if err != nil {
return nil, err
}
list.AlertsSyntheticsConditionList = new(AlertsSyntheticsConditionList)
resp, err := s.client.Do(ctx, req, list.AlertsSyntheticsConditionList)
if err != nil {
return resp, err
}
return resp, nil
}
func (s *syntheticsConditions) deleteByID(ctx context.Context, id int64) (*Response, error) {
u := fmt.Sprintf("alerts_synthetics_conditions/%v.json", id)
req, err := s.client.NewRequest("DELETE", u, nil)
if err != nil {
return nil, err
}
return s.client.Do(ctx, req, nil)
}
func (s *syntheticsConditions) create(ctx context.Context, c *AlertsConditionEntity, policyID int64) (*AlertsConditionEntity, *Response, error) {
u := fmt.Sprintf("alerts_synthetics_conditions/policies/%v.json", policyID)
if c.AlertsSyntheticsConditionEntity.AlertsSyntheticsCondition.ID != nil {
c.AlertsSyntheticsConditionEntity.AlertsSyntheticsCondition.ID = nil
}
req, err := s.client.NewRequest("POST", u, c.AlertsSyntheticsConditionEntity)
if err != nil {
return nil, nil, err
}
condition := new(AlertsConditionEntity)
condition.AlertsSyntheticsConditionEntity = new(AlertsSyntheticsConditionEntity)
resp, err := s.client.Do(ctx, req, condition.AlertsSyntheticsConditionEntity)
if err != nil {
return nil, resp, err
}
return condition, resp, nil
}
func (s *syntheticsConditions) update(ctx context.Context, c *AlertsConditionEntity, id int64) (*AlertsConditionEntity, *Response, error) {
u := fmt.Sprintf("alerts_synthetics_conditions/%v.json", id)
req, err := s.client.NewRequest("PUT", u, c.AlertsSyntheticsConditionEntity)
if err != nil {
return nil, nil, err
}
condition := new(AlertsConditionEntity)
condition.AlertsSyntheticsConditionEntity = new(AlertsSyntheticsConditionEntity)
resp, err := s.client.Do(ctx, req, condition.AlertsSyntheticsConditionEntity)
if err != nil {
return nil, resp, err
}
return condition, resp, nil
}
|
################################################################################
# Github: https://github.com/MaxInGaussian/GomPlex
# Author: <NAME> (<EMAIL>)
################################################################################
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.metrics import roc_curve, auc
from sklearn.linear_model import LogisticRegression
from DecisionSystem import DecisionSystem
from sys import path
path.append("../../")
from GomPlex import *
DRAWING_RAW_DATA_PATH = 'data/drawing_raw_data.csv'
model = DecisionSystem(sample_time=100, use_past=4,
use_gender=True, use_age=True, use_edu_level=True,
show_training_drawings=False, show_predicted_drawings=False)
model.load_drawing_data(DRAWING_RAW_DATA_PATH)
num_ci, num_nci = model.ci.sum(), len(model.ci)-model.ci.sum()
# model.show_velocity_graph('MS0045')
# model.show_direction_graph('MS0045')
def get_eval_from_fpr_tpr(fpr, tpr):
cfs_mat = np.array([[tpr*num_ci, num_ci-tpr*num_ci],
[fpr*num_nci, num_nci-fpr*num_nci]])
accuracy = (cfs_mat[0, 0]+cfs_mat[1, 1])/np.sum(cfs_mat)
precision = 0 if np.sum(cfs_mat[:, 0]) == 0 else\
cfs_mat[0, 0]/np.sum(cfs_mat[:, 0])
sensitivity = 0 if np.sum(cfs_mat[0]) == 0 else\
cfs_mat[0, 0]/np.sum(cfs_mat[0])
specificity = 0 if np.sum(cfs_mat[1]) == 0 else\
cfs_mat[1, 1]/np.sum(cfs_mat[1])
F1 = 0 if precision+sensitivity == 0 else\
2*(precision*sensitivity)/(precision+sensitivity)
print("Sensitivity =", sensitivity)
print("Specificity =", specificity)
print("Accuracy =", accuracy)
print("Precision =", precision)
print("F1 Score =", F1)
return sensitivity, specificity, accuracy, precision, F1
AUC, F1, cfs_mat, cis, pred_cis, age, gender, edu_lv =\
list(map(np.array, model.eval_model_for_subjects(n_trains=300)))
plt.figure()
fpr_f, tpr_f, thresholds_f = roc_curve(cis[gender==0], pred_cis[gender==0])
AUC = auc(fpr_f, tpr_f)
arg = np.argmax(tpr_f-fpr_f)
plt.plot(fpr_f, tpr_f, linestyle='-', label='GPMC-Female (AUC = %0.3f)' % (AUC))
plt.scatter(fpr_f[arg], tpr_f[arg], s=50, marker='x')
fpr_m, tpr_m, thresholds_f = roc_curve(cis[gender==1], pred_cis[gender==1])
AUC = auc(fpr_m, tpr_m)
arg = np.argmax(tpr_m-fpr_m)
plt.plot(fpr_m, tpr_m, linestyle='-', label='GPMC-Male (AUC = %0.3f)' % (AUC))
plt.scatter(fpr_m[arg], tpr_m[arg], s=50, marker='x')
plt.plot([0, 1], [0, 1], 'k-', label='Random Guessing (AUC = 0.5)', alpha=0.3)
plt.xlim([0, 1])
plt.ylim([0, 1])
plt.xlabel('False Positive Rate (1 - Specificity)')
plt.ylabel('True Positive Rate (Sensitivity)')
plt.title('Receiver Operating Characteristic (Gender)')
plt.legend(loc="lower right")
plt.tight_layout()
plt.show()
plt.figure()
for lv in np.unique(edu_lv):
fpr, tpr, thresholds = roc_curve(cis[edu_lv==lv], pred_cis[edu_lv==lv])
AUC = auc(fpr, tpr)
arg = np.argmax(tpr-fpr)
plt.plot(fpr, tpr, linestyle='-', label='GPMC-%s (AUC = %0.3f)' % (lv, AUC))
plt.scatter(fpr[arg], tpr[arg], s=50, marker='x')
plt.plot([0, 1], [0, 1], 'k-', label='Random Guessing (AUC = 0.5)', alpha=0.3)
plt.xlim([0, 1])
plt.ylim([0, 1])
plt.xlabel('False Positive Rate (1 - Specificity)')
plt.ylabel('True Positive Rate (Sensitivity)')
plt.title('Receiver Operating Characteristic (Education Level)')
plt.legend(loc="lower right")
plt.tight_layout()
plt.show()
plt.figure()
age_rng = [60, 70, 80, 90, 100]
for i in range(len(age_rng)-1):
rng = np.logical_and(age_rng[i]<=age, age<age_rng[i+1])
fpr, tpr, thresholds = roc_curve(cis[rng], pred_cis[rng])
AUC = auc(fpr, tpr)
arg = np.argmax(tpr-fpr)
plt.plot(fpr, tpr, linestyle='-', label='GPMC-(%d<=age<%d) (AUC = %0.3f)'
% (int(age_rng[i]), int(age_rng[i+1]), AUC))
plt.scatter(fpr[arg], tpr[arg], s=50, marker='x')
plt.plot([0, 1], [0, 1], 'k-', label='Random Guessing (AUC = 0.5)', alpha=0.3)
plt.xlim([0, 1])
plt.ylim([0, 1])
plt.xlabel('False Positive Rate (1 - Specificity)')
plt.ylabel('True Positive Rate (Sensitivity)')
plt.title('Receiver Operating Characteristic (Age)')
plt.legend(loc="lower right")
plt.tight_layout()
plt.show()
def plot_confusion_matrix(cm, classes):
normalize=False
cmap=plt.cm.Blues
plt.imshow(cfs_mat, interpolation='nearest', cmap=cmap)
plt.title('Confusion Matrix')
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=45)
plt.yticks(tick_marks, classes)
thresh = cfs_mat.max() / 2.
for i, j in itertools.product(range(cfs_mat.shape[0]), range(cfs_mat.shape[1])):
plt.text(j, i, cfs_mat[i, j], horizontalalignment="center",
color="white" if cfs_mat[i, j] > thresh else "black")
plt.tight_layout()
plt.ylabel('True label')
plt.xlabel('Predicted label')
|
/*
* Copyright 2014-2020 chronicle.software
*
* http://www.chronicle.software
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.openhft.chronicle.logger.log4j1;
import net.openhft.chronicle.core.OS;
import net.openhft.chronicle.core.util.Time;
import net.openhft.chronicle.logger.ChronicleLogLevel;
import org.slf4j.Logger;
import java.io.File;
class Log4j1TestBase {
// *************************************************************************
//
// *************************************************************************
static final ChronicleLogLevel[] LOG_LEVELS = ChronicleLogLevel.values();
static String rootPath() {
String path = System.getProperty("java.io.tmpdir");
String sep = System.getProperty("file.separator");
if (!path.endsWith(sep)) {
path += sep;
}
return path + "chronicle-log4j1";
}
static String basePath(String type) {
return rootPath()
+ File.separator
+ type;
}
static void log(Logger logger, ChronicleLogLevel level, String fmt, Object... args) {
switch (level) {
case TRACE:
logger.trace(fmt, args);
break;
case DEBUG:
logger.debug(fmt, args);
break;
case INFO:
logger.info(fmt, args);
break;
case WARN:
logger.warn(fmt, args);
break;
case ERROR:
logger.error(fmt, args);
break;
default:
throw new UnsupportedOperationException();
}
}
}
|
# Just print some boring stuff
print "Mary had a little lamb."
# strange stuff in here. And snow is just a string, not a variable
print "Its fleece was white as %s." % 'snow'
print "And everywhere that Mary went."
# A lot of points
print "." * 10 # what'd that do?
# Lots of variables with characters
end1 = "C"
end2 = "h"
end3 = "e"
end4 = "e"
end5 = "s"
end6 = "e"
end7 = "B"
end8 = "u"
end9 = "r"
end10 = "g"
end11 = "e"
end12 = "r"
# watch that comma at the end. try removing it to see what happens
# It is considered bad style to place everything on 1 line that becomes longer than 80 characters
print end1 + end2 + end3 + end4 + end5 + end6,
# and were are not done yet
print end7 + end8 + end9 + end10 + end11 + end12
|
<gh_stars>0
import React, {useEffect} from 'react'
import './Prizes.css';
import AOS from 'aos';
import "aos/dist/aos.css";
const Prizes = () => {
useEffect(() => {
AOS.init();
AOS.refresh();
}, []);
return (
<div>
<h1 className="heading">PRIZES</h1>
<div className="prize-container">
<div data-aos="fade-up">
<h2 className="prizes-head">
Perks for each participant
</h2>
<ul>
<li className='list-item'>
Cash prize worth 20k sponsored by Quidnunc Infotainment and River Rises.
</li>
<li className="list-item">
Potential candidates get a chance to bag an internship or a full time job at Quidnunc’s IT division with stipend and seed funding upto 2Lakh*.
</li>
<li className="list-item">
Cash prize worth 10k for best hack built on top of Ethereum and for teams that integrate Polygon in their hacks as well the prize money will be 15k along with eligibility for internship/full-time role interviews and a chance to land seed funding of upto 5000 USD(or equivalent)!*.
</li>
<li className="list-item">
Cash prize worth 20k for best Dapp built on Tezos. Continuity grant opportunity up to 5,000 USD(or equivalent) for an outstanding project*.
</li>
<li className="list-item">
Cash prize worth 20k for best Dapp built on Celo and Cash prize worth 20k for best use of IPFS and/or Filecoin*.
</li>
<li className="list-item">
For top performing teams, exclusive t-shirts and swags will be provided*.
</li>
</ul>
<div className="prizes-btn">
<button className='btn-primary knowmore'>Know More</button>
</div>
</div>
</div>
</div>
)
}
export default Prizes
|
#!/usr/bin/env bash
# Copyright 2020 Amazon.com Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e
set -o errexit
set -o nounset
set -o pipefail
shopt -s globstar
PROJECT="$1"
MAKE_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd -P)"
PROJECT_ROOT=$MAKE_ROOT/$PROJECT
mkdir -p _output
touch _output/total_summary.txt
function build::attribution::generate(){
make -C $PROJECT_ROOT attribution
for summary in $PROJECT_ROOT/_output/**/summary.txt; do
sed -i "s/+.*=/ =/g" $summary
awk -F" =\> " '{ count[$1]+=$2} END { for (item in count) printf("%s => %d\n", item, count[item]) }' \
$summary _output/total_summary.txt | sort > _output/total_summary.tmp && mv _output/total_summary.tmp _output/total_summary.txt
done
}
build::attribution::generate
|
#!/usr/bin/env bash
function getOutputValue()
{
local VARIABLES_FILE="$1"
local VARIABLE_NAME="$2"
local temp=$(grep "^export ${VARIABLE_NAME}=" ${VARIABLES_FILE} | awk -F "=" '{print $2}')
temp="${temp%\"}"
printf "${temp#\"}"
}
function assertEquals()
{
local MESSAGE="$1"
local EXPECTED_STRING="$2"
local ACTUAL_STRING="$3"
if [[ "${EXPECTED_STRING}" = "${ACTUAL_STRING}" ]]; then
return 0
else
printf "FAILED: ${MESSAGE}, expected: ${EXPECTED_STRING}, but was: ${ACTUAL_STRING}\n"
return 1
fi
}
function assertNotEquals()
{
local MESSAGE="$1"
local EXPECTED_STRING="$2"
local ACTUAL_STRING="$3"
if [[ "${EXPECTED_STRING}" != "${ACTUAL_STRING}" ]]; then
return 0
else
printf "FAILED: ${MESSAGE}, expected not: ${EXPECTED_STRING}, but was: ${ACTUAL_STRING}\n"
return 1
fi
}
function assertStartsWith()
{
local MESSAGE="$1"
local EXPECTED_STRING="$3"
local ACTUAL_STRING="$3"
if [[ "${ACTUAL_STRING}" == ${EXPECTED_STRING}* ]]; then
return 0
else
printf "FAILED: ${MESSAGE}, expected it to start with ${EXPECTED_STRING}, but was: ${ACTUAL_STRING}\n"
return 1
fi
}
function assertNotEmpty()
{
local MESSAGE="$1"
local ACTUAL_STRING="$2"
if [ -z "${ACTUAL_STRING}" ]; then
printf "FAILED: ${MESSAGE}, expected it to be not empty, but it was.\n"
return 1
else
return 0
fi
}
function assertEmpty()
{
local MESSAGE="$1"
local ACTUAL_STRING="$2"
if [ -z "${ACTUAL_STRING}" ]; then
return 0
else
printf "FAILED: ${MESSAGE}, expected it to be empty, but was: ${ACTUAL_STRING}\n"
return 1
fi
}
function assertVaultToken()
{
local VAULT_TOKEN_TO_TEST="$1"
if OUTPUT="$(VAULT_TOKEN=${VAULT_TOKEN_TO_TEST} vault token lookup 2>&1)"; then
return 0
else
printf "FAILED: token is not valid:\n${OUTPUT}\n"
return 1
fi
}
function cleanEnv()
{
unset VARIABLES_FILE LEASE_IDS RUN_ONCE RENEW_INTERVAL
}
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICEMESE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIOMES OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import yaml
from apmecclient.common import exceptions
from apmecclient.i18n import _
from apmecclient.apmec import v1_0 as apmecV10
_MES = 'mes'
_RESOURCE = 'resource'
class ListMES(apmecV10.ListCommand):
"""List MES that belong to a given tenant."""
resource = _MES
list_columns = ['id', 'name', 'mesd_id', 'mgmt_urls', 'status']
class ShowMES(apmecV10.ShowCommand):
"""Show information of a given MES."""
resource = _MES
class CreateMES(apmecV10.CreateCommand):
"""Create a MES."""
resource = _MES
remove_output_fields = ["attributes"]
def add_known_arguments(self, parser):
parser.add_argument(
'name', metavar='NAME',
help=_('Set a name for the MES'))
parser.add_argument(
'--description',
help=_('Set description for the MES'))
mesd_group = parser.add_mutually_exclusive_group(required=True)
mesd_group.add_argument(
'--mesd-id',
help=_('MESD ID to use as template to create MES'))
mesd_group.add_argument(
'--mesd-template',
help=_('MESD file to create MES'))
mesd_group.add_argument(
'--mesd-name',
help=_('MESD name to use as template to create MES'))
vim_group = parser.add_mutually_exclusive_group()
vim_group.add_argument(
'--vim-id',
help=_('VIM ID to use to create MES on the specified VIM'))
vim_group.add_argument(
'--vim-name',
help=_('VIM name to use to create MES on the specified VIM'))
parser.add_argument(
'--vim-region-name',
help=_('VIM Region to use to create MES on the specified VIM'))
parser.add_argument(
'--param-file',
help=_('Specify parameter yaml file'))
def args2body(self, parsed_args):
args = {'attributes': {}}
body = {self.resource: args}
if parsed_args.vim_region_name:
args.setdefault('placement_attr', {})['region_name'] = \
parsed_args.vim_region_name
apmec_client = self.get_client()
apmec_client.format = parsed_args.request_format
if parsed_args.vim_name:
_id = apmecV10.find_resourceid_by_name_or_id(apmec_client,
'vim',
parsed_args.
vim_name)
parsed_args.vim_id = _id
if parsed_args.mesd_name:
_id = apmecV10.find_resourceid_by_name_or_id(apmec_client,
'mesd',
parsed_args.
mesd_name)
parsed_args.mesd_id = _id
elif parsed_args.mesd_template:
with open(parsed_args.mesd_template) as f:
template = f.read()
try:
args['mesd_template'] = yaml.load(
template, Loader=yaml.SafeLoader)
except yaml.YAMLError as e:
raise exceptions.InvalidInput(e)
if not args['mesd_template']:
raise exceptions.InvalidInput('The mesd file is empty')
if parsed_args.param_file:
with open(parsed_args.param_file) as f:
param_yaml = f.read()
try:
args['attributes']['param_values'] = yaml.load(
param_yaml, Loader=yaml.SafeLoader)
except yaml.YAMLError as e:
raise exceptions.InvalidInput(e)
apmecV10.update_dict(parsed_args, body[self.resource],
['tenant_id', 'name', 'description',
'mesd_id', 'vim_id'])
return body
class DeleteMES(apmecV10.DeleteCommand):
"""Delete given MES(s)."""
resource = _MES
deleted_msg = {'mes': 'delete initiated'}
class UpdateMES(apmecV10.UpdateCommand):
"""Update a given MES."""
resource = _MES
def add_known_arguments(self, parser):
parser.add_argument(
'--mesd-template',
help=_('MESD file to update MES')
)
def args2body(self, parsed_args):
args = {}
body = {self.resource: args}
apmec_client = self.get_client()
apmec_client.format = parsed_args.request_format
if parsed_args.mesd_template:
with open(parsed_args.mesd_template) as f:
template = f.read()
try:
args['mesd_template'] = yaml.load(
template, Loader=yaml.SafeLoader)
except yaml.YAMLError as e:
raise exceptions.InvalidInput(e)
if not args['mesd_template']:
raise exceptions.InvalidInput('The mesd template is empty')
apmecV10.update_dict(parsed_args, body[self.resource],
['tenant_id'])
return body
|
from global_covid_tracker.dataframes import deaths_dataframe
import plotly.graph_objects as go
def plot_deaths_by_country(country: str) -> go.Figure:
data = deaths_dataframe(country)
fig = go.Figure(data=[
go.Bar(
x=data.index,
y=data['new_deaths'],
name='New Daily Deaths',
marker={'color': 'blue', 'opacity': 0.5},
text=data['text_new_deaths'],
hoverinfo='text',)],
layout=go.Layout(
title=f'Daily New COVID-19 Deaths for {country}',
xaxis={'title': 'Date'},
yaxis={'title': 'Daily Deaths'}
)
)
fig.add_trace(
go.Scatter(
x=data.index,
y=data['new_deaths_7d_rolling_mean'],
name='7 Day Rolling Mean',
marker={'color': 'blue'},
text=data['text_new_deaths_7d_rolling_mean'],
hoverinfo='text'
)
)
fig.update_layout(height=600, width=900,
legend={'x': 0.01, 'y': 0.98, 'bordercolor': 'Black',
'borderwidth': 1})
return fig
|
#ifndef GAUSSIANBLURRING_HPP
#define GAUSSIANBLURRING_HPP
#include "smoothing.hpp"
class GaussianBlurring : public Smoothing {
public:
GaussianBlurring() = default;
virtual void smooth(cv::Mat& m) {
cv::Mat dst;
cv::GaussianBlur(m, dst,cv::Size(5,5),0);
m = std::move(dst);
}
};
#endif // GAUSSIANBLURRING_HPP
|
#!/bin/bash
# Define the variables
IMAGE_NAME="your_image_name"
TAG="your_tag"
# Pull the Docker image
docker pull $IMAGE_NAME:$TAG
# Start a shell within the container and execute the required commands
docker run -it $IMAGE_NAME:$TAG sh -c 'eval `ssh-agent -s`; sh -l'
|
public class Matrix {
// Create a 3x4 matrix
private int[][] matrix = new int[3][4];
public void setCell(int row, int col, int value) {
matrix[row][col] = value;
}
public int getCell(int row, int col) {
return matrix[row][col];
}
}
|
#!/bin/bash
# Author: yeho <lj2007331 AT gmail.com>
# BLOG: https://blog.linuxeye.com
#
# Notes: OneinStack for CentOS/RadHat 5+ Debian 6+ and Ubuntu 12+
#
# Project home page:
# http://oneinstack.com
# https://github.com/lj2007331/oneinstack
export PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin
clear
printf "
#######################################################################
# OneinStack for CentOS/RadHat 5+ Debian 6+ and Ubuntu 12+ #
# upgrade Web,Database,PHP,Redis,phpMyAdmin for OneinStack #
# For more information please visit http://oneinstack.com #
#######################################################################
"
# get pwd
sed -i "s@^oneinstack_dir.*@oneinstack_dir=`pwd`@" ./options.conf
. ./options.conf
. ./include/color.sh
. ./include/check_os.sh
. ./include/check_db.sh
. ./include/download.sh
. ./include/get_char.sh
. ./include/upgrade_web.sh
. ./include/upgrade_db.sh
. ./include/upgrade_php.sh
. ./include/upgrade_redis.sh
. ./include/upgrade_phpmyadmin.sh
# Check if user is root
[ $(id -u) != "0" ] && { echo "${CFAILURE}Error: You must be root to run this script${CEND}"; exit 1; }
# get the IP information
PUBLIC_IPADDR=`./include/get_public_ipaddr.py`
[ "`./include/get_ipaddr_state.py $PUBLIC_IPADDR`" == '\u4e2d\u56fd' ] && IPADDR_STATE=CN
Usage(){
printf "
Usage: $0 [ ${CMSG}web${CEND} | ${CMSG}db${CEND} | ${CMSG}php${CEND} | ${CMSG}redis${CEND} | ${CMSG}phpmyadmin${CEND} ]
${CMSG}web${CEND} --->Upgrade Nginx/Tengine
${CMSG}db${CEND} --->Upgrade MySQL/MariaDB/Percona
${CMSG}php${CEND} --->Upgrade PHP
${CMSG}redis${CEND} --->Upgrade Redis
${CMSG}phpmyadmin${CEND} --->Upgrade phpMyAdmin
"
}
Menu(){
while :
do
printf "
What Are You Doing?
\t${CMSG}1${CEND}. Upgrade Nginx/Tengine
\t${CMSG}2${CEND}. Upgrade MySQL/MariaDB/Percona
\t${CMSG}3${CEND}. Upgrade PHP
\t${CMSG}4${CEND}. Upgrade Redis
\t${CMSG}5${CEND}. Upgrade phpMyAdmin
\t${CMSG}q${CEND}. Exit
"
echo
read -p "Please input the correct option: " Number
if [[ ! $Number =~ ^[1-5,q]$ ]];then
echo "${CWARNING}input error! Please only input 1,2,3,4,5 and q${CEND}"
else
case "$Number" in
1)
if [ -e "$nginx_install_dir/sbin/nginx" ];then
Upgrade_Nginx
elif [ -e "$tengine_install_dir/sbin/nginx" ];then
Upgrade_Tengine
fi
;;
2)
Upgrade_DB
;;
3)
Upgrade_PHP
;;
4)
Upgrade_Redis
;;
5)
Upgrade_phpMyAdmin
;;
q)
exit
;;
esac
fi
done
}
if [ $# == 0 ];then
Menu
elif [ $# == 1 ];then
case $1 in
web)
if [ -e "$nginx_install_dir/sbin/nginx" ];then
Upgrade_Nginx
elif [ -e "$tengine_install_dir/sbin/nginx" ];then
Upgrade_Tengine
fi
;;
db)
Upgrade_DB
;;
php)
Upgrade_PHP
;;
redis)
Upgrade_Redis
;;
phpmyadmin)
Upgrade_phpMyAdmin
;;
*)
Usage
;;
esac
else
Usage
fi
|
<reponame>shlomif/quad-pres<filename>installer/t/data/p4n5/src/slidy.js
/* slidy.js
Copyright (c) 2005 W3C (MIT, ERCIM, Keio), All Rights Reserved.
W3C liability, trademark, document use and software licensing
rules apply, see:
http://www.w3.org/Consortium/Legal/copyright-documents
http://www.w3.org/Consortium/Legal/copyright-software
Modified by <NAME> - rights disclaimed.
*/
window.onload = startup; // equivalent to onload on body element
var rtl_keys = false;
var lastShown = null; // last incrementally shown item
var lastShownBeginning = false;
var lastShownEnd = false;
/* general initialization */
function startup()
{
lastShownBeginning = true;
setVisibilityAllIncremental("hidden");
document.onkeydown = keyDown;
}
function cancel(event)
{
if (event)
{
event.cancel = true;
event.returnValue = false;
if (event.preventDefault)
event.preventDefault();
}
return false;
}
function follow_link(accessor)
{
window.location = $("a[accesskey='" + accessor +"']").first().attr('href');
return;
}
// See e.g. http://www.quirksmode.org/js/events/keys.html for keycodes
function keyDown(event)
{
var key;
if (!event)
var event = window.event;
// kludge around NS/IE differences
if (window.event)
key = window.event.keyCode;
else if (event.which)
key = event.which;
else
return true; // Yikes! unknown browser
// ignore event if key value is zero
// as for alt on Opera and Konqueror
if (!key)
return true;
// check for concurrent control/command/alt key
// but are these only present on mouse events?
if (event.ctrlKey || event.altKey || event.metaKey)
return true;
if (key == 37) // Left arrow
{
lastShown = (!rtl_keys)
? hidePreviousItem(lastShown)
: revealNextItem(lastShown)
;
return cancel(event);
}
else if (key == 39) // Right arrow
{
lastShown = (!rtl_keys)
? revealNextItem(lastShown)
: hidePreviousItem(lastShown)
;
return cancel(event);
}
else if (key == 78) // N key
{
follow_link("n");
}
else if (key == 80) // P key
{
follow_link("p");
}
return true;
}
function getClassList(element)
{
if (typeof window.pageYOffset =='undefined')
return element.getAttribute("className");
return element.getAttribute("class");
}
function hasClass(element, name)
{
var regexp = new RegExp("(^| )" + name + "\W*");
if (regexp.test(getClassList(element)))
return true;
return false;
}
// left to right traversal of root's content
function nextNode(root, node)
{
if (node == null)
return root.firstChild;
if (node.firstChild)
return node.firstChild;
if (node.nextSibling)
return node.nextSibling;
for (;;)
{
node = node.parentNode;
if (!node || node == root)
break;
if (node && node.nextSibling)
return node.nextSibling;
}
return null;
}
// right to left traversal of root's content
function previousNode(root, node)
{
if (node == null)
{
node = root.lastChild;
if (node)
{
while (node.lastChild)
node = node.lastChild;
}
return node;
}
if (node.previousSibling)
{
node = node.previousSibling;
while (node.lastChild)
node = node.lastChild;
return node;
}
if (node.parentNode != root)
return node.parentNode;
return null;
}
function isIncrementalNode(node)
{
if (node.nodeType == 1) // ELEMENT
{
if (node.nodeName.toLowerCase() == "li")
{
var ancestor = node;
while (ancestor != document.body)
{
if (hasClass(ancestor, "point"))
{
return true;
}
ancestor = ancestor.parentNode;
}
}
}
return false;
}
function nextIncrementalItem(node)
{
for (;;)
{
node = nextNode(document.body, node);
if (node == null || node.parentNode == null)
break;
if (isIncrementalNode(node))
{
return node;
}
}
return node;
}
function previousIncrementalItem(node)
{
for (;;)
{
node = previousNode(document.body, node);
if (node == null || node.parentNode == null)
break;
if (isIncrementalNode(node))
{
return node;
}
}
return node;
}
// set visibility for all elements on current slide with
// a parent element with attribute class="incremental"
function setVisibilityAllIncremental(value)
{
var node = nextIncrementalItem(null);
while (node)
{
node.style.visibility = value;
node = nextIncrementalItem(node);
}
}
// reveal the next hidden item on the slide
// node is null or the node that was last revealed
function revealNextItem(node)
{
if (lastShownEnd)
{
return null;
}
node = nextIncrementalItem(node);
lastShownBeginning = false;
if (node && node.nodeType == 1) // an element
node.style.visibility = "visible";
if (! node)
{
lastShownEnd = true;
}
return node;
}
// exact inverse of revealNextItem(node)
function hidePreviousItem(node)
{
if (lastShownBeginning)
{
return null;
}
else if (lastShownEnd)
{
node = previousIncrementalItem(node);
}
if (node && node.nodeType == 1) // an element
node.style.visibility = "hidden";
node = previousIncrementalItem(node);
lastShownEnd = false;
if (! node)
{
lastShownBeginning = true;
}
return node;
}
|
#!/bin/bash
path=$1
here=$(pwd) # Mark the project directory
cd $path
# Take the hours data from failed logins and convert it into addRow javascript lines
cat ./*/failed_login_data.txt | awk '{print $3;}' | sort | uniq -c |
awk '{print "data.addRow([\x27"$2"\x27, "$1"]);"}' > temp_addRow.txt
# Concatenates the header, footer, and temp file
$here/bin/wrap_contents.sh temp_addRow.txt $here/html_components/hours_dist hours_dist.html
# Removes temp file
rm temp_addRow.txt
|
#!/bin/bash
# Check.
ping -c2 www.google.com > /dev/null
# $? is return code of previous latest command.
# 0 is true.
# non-zero is false.
if [ $? != 0 ]
then
# Wait for AP finishes boot.
sleep 40
# Turn off power management for RPi3 WiFi.
sudo iw dev wlan0 set power_save off
# RPi3 wifi dead after AP rebooting. Try following but workaround is nothing.
sudo ifdown --force eth0
sudo ifdown --force wlan0
sudo systemctl daemon-reload
#sudo /etc/init.d/networking restart
sudo systemctl stop dhcpcd.service
sudo systemctl restart networking.service
sudo systemctl restart avahi-daemon.service
sudo systemctl restart wpa_supplicant.service
sudo systemctl start dhcpcd.service
sudo ifup --force wlan0
wpa_cli -i wlan0 reconfigure
# Log
#echo $(date; iwgetid -r) >> reconnect.log
ifconfig >> reconnect.log
#iwconfig >> reconnect.log
#systemctl status >> recoonect.log
sleep 5
# So give up.
if ifconfig wlan0 | grep '192.168.2.1' > /dev/null
then
echo $(date; iwgetid -r; echo "OK.") >> reconnect.log
else
echo $(date; iwgetid -r; echo "NG. Need reboot.") >> reconnect.log
sudo reboot
fi
fi
|
package cn.finalteam.rxgalleryfinalprovider.di.scope;
import java.lang.annotation.Retention;
import javax.inject.Scope;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
* Desction:
* Author:pengjianbo
* Date:16/5/18 下午5:09
*/
@Scope
@Retention(RUNTIME)
public @interface MediaGridScope {
}
|
<reponame>bpbpublications/Building-Server-side-and-Microservices-with-Go
package data
import (
"building-restful-web-services-with-go/chapter2/library/server/dbserver"
"building-restful-web-services-with-go/chapter2/library/util"
"building-restful-web-services-with-go/chapter2/library/values"
"context"
"time"
)
var (
// CreateBook creates new Book
CreateBook = createBook
// GetBook returns book
GetBook = getBook
// GetAllBooksForMember returns list of books for memeber
GetAllBooksForMember = getAllBooksForMember
// GetAllBooksForLibrerian returns list of books for librerian
GetAllBooksForLibrerian = getAllBooksForLibrerian
// UpdateBook updates book
UpdateBook = updateBook
// DeleteBook deletes book
DeleteBook = deleteBook
// GetBookStatus returns book status
GetBookStatus = getBookStatus
// ChangeBookStatus changes book status
ChangeBookStatus = changeBookStatus
)
func createBook(ctx context.Context, bookName, authorName, publisher string, description util.NullString) (response *BookEntity, err error) {
dbRunner := ctx.Value(values.ContextKeyDbRunner).(dbserver.Runner)
query := `
insert into book(book_name, author_name, publisher, book_description)
values ($1, $2, $3, $4)
returning book_id, created_at`
rows, err := dbRunner.Query(ctx, query, bookName, authorName, publisher, description)
if err != nil {
return
}
defer rows.Close()
rr, err := dbserver.GetRowReader(rows)
if err != nil {
return
}
if rr.ScanNext() {
response = &BookEntity{}
response.BookID = rr.ReadByIdxString(0)
response.BookName = bookName
response.AuthorName = authorName
response.Publisher = publisher
response.Description = util.GetNullStringValue(description)
response.Status = values.BookStatusAvailable
response.CreatedAt = rr.ReadByIdxTime(1)
response.UpdatedAt = rr.ReadByIdxTime(1)
response.BorrowerID = ""
}
err = rr.Error()
return
}
func getBook(ctx context.Context, bookID string) (response *BookDetails, err error) {
dbRunner := ctx.Value(values.ContextKeyDbRunner).(dbserver.Runner)
query := `
select
book_id as "BookID",
book_name as "BookName",
author_name as "AuthorName",
publisher as "Publisher",
book_description as "Description"
from book
where book_id = $1`
rows, err := dbRunner.Query(ctx, query, bookID)
if err != nil {
return
}
defer rows.Close()
rr, err := dbserver.GetRowReader(rows)
if err != nil {
return
}
if rr.ScanNext() {
response = &BookDetails{}
rr.ReadAllToStruct(response)
}
err = rr.Error()
return
}
func getAllBooksForMember(ctx context.Context, searchTerm string, rowOffset, rowLimit int) (response []*BookInfoMember, err error) {
dbRunner := ctx.Value(values.ContextKeyDbRunner).(dbserver.Runner)
query := `
select
book_id as "BookID",
book_name as "BookName",
author_name as "AuthorName",
publisher as "Publisher"
from book
where book_name like '%%' || $1 || '%%' and book_status = $2
offset $3
limit $4`
rows, err := dbRunner.Query(ctx, query, searchTerm, values.BookStatusAvailable, rowOffset, rowLimit)
if err != nil {
return
}
defer rows.Close()
rr, err := dbserver.GetRowReader(rows)
if err != nil {
return
}
response = make([]*BookInfoMember, 0)
for rr.ScanNext() {
book := &BookInfoMember{}
rr.ReadAllToStruct(book)
response = append(response, book)
}
err = rr.Error()
return
}
func getAllBooksForLibrerian(ctx context.Context, searchTerm string, rowOffset, rowLimit int) (response []*BookInfoLibrarian, err error) {
dbRunner := ctx.Value(values.ContextKeyDbRunner).(dbserver.Runner)
query := `
select
b.book_id as "BookID",
b.book_name as "BookName",
b.author_name as "AuthorName",
b.publisher as "Publisher",
b.book_status as "Status",
u.full_name as "Borrower"
from book b
left join library_user u on u.user_id = b.borrower_id
where b.book_name like '%%' || $1 || '%%'
offset $2
limit $3`
rows, err := dbRunner.Query(ctx, query, searchTerm, rowOffset, rowLimit)
if err != nil {
return
}
defer rows.Close()
rr, err := dbserver.GetRowReader(rows)
if err != nil {
return
}
response = make([]*BookInfoLibrarian, 0)
for rr.ScanNext() {
book := &BookInfoLibrarian{}
rr.ReadAllToStruct(book)
response = append(response, book)
}
err = rr.Error()
return
}
func updateBook(ctx context.Context, bookID, bookName, authorName, publisher string, description util.NullString) (response time.Time, err error) {
query := `
update book
set
book_name = $1,
author_name = $2,
publisher = $3,
book_description = $4
where book_id = $5
returning updated_at`
return executeQueryWithTimeResponse(ctx, query, bookName, authorName, publisher, description, bookID)
}
func deleteBook(ctx context.Context, bookID string) (response int64, err error) {
query := `delete from book where book_id = $1`
return executeQueryWithRowsAffected(ctx, query, bookID)
}
func getBookStatus(ctx context.Context, bookID string) (response int64, err error) {
query := `select book_status from book where book_id = $1`
return executeQueryWithInt64Response(ctx, query, bookID)
}
func changeBookStatus(ctx context.Context, bookID string, status int, userID util.NullString) (err error) {
dbRunner := ctx.Value(values.ContextKeyDbRunner).(dbserver.Runner)
query := `
update book
set
book_status = $1,
borrower_id = $2
where book_id = $3`
_, err = dbRunner.Exec(ctx, query, status, userID, bookID)
return
}
|
import axios from 'axios';
import type { AxiosResponse } from 'axios';
const BASE_URL = process.env.REACT_APP_API;
const api = axios.create({
baseURL: BASE_URL,
timeout: 3000,
});
api.interceptors.request.use(
(config) => config,
(error) => Promise.reject(error)
);
api.interceptors.response.use(
(response) => response,
(error) => Promise.reject(error)
);
export default api;
export const Api = {
textrank: <T>(data: string[]): Promise<AxiosResponse<T>> =>
api.post('/data/', { data }, { timeout: 30000 }),
};
|
package openanonymizer.model.dataset;
import openanonymizer.model.wrapper.EntityWrapper;
import java.util.Iterator;
/**
* Container for empty data set.
*
* @version 0.1
* @since Open Anonymizer 1.0.0
*/
public class EmptyDataSet implements PagedDataSet {
public static EmptyDataSet build() {
return new EmptyDataSet();
}
@Override
public boolean hasNext() {
return false;
}
@Override
public EntityWrapper next() {
return null;
}
@Override
public long getOffset() {
return 0;
}
@Override
public int getPageNumber() {
return 0;
}
@Override
public int getPageSize() {
return 0;
}
@Override
public long getTotalItems() {
return 0;
}
@Override
public boolean hasNextPage() {
return false;
}
@Override
public Iterator<EntityWrapper> iterator() {
return null;
}
@Override
public long size() {
return 0;
}
}
|
import {
isGreaterThanLimit,
isLessThanLimit,
isLongerThanLimit,
isMailInvalid,
isNumberValid,
isShorterThanLimit,
isWrittenValueEmpty,
errorDataHandler,
} from 'simple-input-validators';
import {
LiveValidator,
ValidatorErrorProps,
HookProps,
ValidatorsRulesListInsideValidator
} from '@common-types';
import { isLiveValidatorEnable } from '@validators/helpers/is-live-validator-enable';
/**
* @description
* Живой валидатор введенных данных в интуп
*
* @param {HookProps} hooksData - Данные для хуков(контрол, его данные, форма)
*
* @returns {{ValidatorErrorProps}}
*
*/
export const validateWrittenData: LiveValidator = hooksData => {
/**
* 1.Получить данные для работы (контрол через который будут проходить данные, вводимое значение, всю форму)
* 2.Получить правила валидации(все валидаторы)
* 3.Получить настройки валидаторов
*/
const { currentControl, newValue } = hooksData,
controlValidatorsRules = currentControl.validateRules as ValidatorsRulesListInsideValidator || {},
{
minValue: minValueRules,
maxValue: maxValueRules,
minLength: minLengthRules,
maxLength: maxLengthRules,
required: requiredRules,
number: numberRules,
email: emailRules,
} = controlValidatorsRules,
errorData: ValidatorErrorProps = {
hasError: false,
hasErrorLockingSubmitBtn: false,
shouldLockNotValidWrite: false,
message: null,
limit: null,
showLiveErrorAfterFirstSubmit: false,
hideErrorTimeout: null,
showErrorTimeout: null,
};
/**
* @description
* Если новое значение подходит по типу
*/
if (typeof newValue === 'string' || typeof newValue === 'number') {
const {dot = false, negative = false} = numberRules || {},
isInputNumberValid = isNumberValid(newValue, {shouldLockFloatNumber: true, shouldLockNegativeNumber: false}),
isInputNumberInvalid = !isInputNumberValid,
isWrittenValueNotEmpty = !isWrittenValueEmpty(newValue),
hasError = true;
/**
* @description
* Enable live validators only for filled input
*/
if (isWrittenValueNotEmpty) {
/**
* Live validator for less value limit
*/
if (
isInputNumberValid &&
isLessThanLimit(newValue, minValueRules) &&
isLiveValidatorEnable(minValueRules)
) {
const hasErrorLockingSubmitBtn = minValueRules.shouldLockSubmitBtnWhenControlInvalid
errorDataHandler(errorData, {
...minValueRules,
hasErrorLockingSubmitBtn,
hasError,
});
}
/**
* Live validator for greater limit
*/
if (
isInputNumberValid &&
isGreaterThanLimit(newValue, maxValueRules) &&
isLiveValidatorEnable(maxValueRules)
) {
const hasErrorLockingSubmitBtn = maxValueRules.shouldLockSubmitBtnWhenControlInvalid
errorDataHandler(errorData, {
...maxValueRules,
hasErrorLockingSubmitBtn,
hasError,
});
}
/**
* Live validator for shorter limit
*/
if (
isShorterThanLimit(newValue, minLengthRules) &&
isLiveValidatorEnable(minLengthRules)
) {
const hasErrorLockingSubmitBtn = minLengthRules.shouldLockSubmitBtnWhenControlInvalid
errorDataHandler(errorData, {
...minLengthRules,
hasErrorLockingSubmitBtn,
hasError,
});
}
/**
* Live validator for longer limit
*/
if (
isLongerThanLimit(newValue, maxLengthRules) &&
isLiveValidatorEnable(maxLengthRules)
) {
const hasErrorLockingSubmitBtn = maxLengthRules.shouldLockSubmitBtnWhenControlInvalid
errorDataHandler(errorData, {
...maxLengthRules,
hasErrorLockingSubmitBtn,
hasError,
});
}
/**
* Live validator for valid email
*/
if (
emailRules &&
isMailInvalid(newValue) &&
isLiveValidatorEnable(emailRules)
) {
const hasErrorLockingSubmitBtn = emailRules.shouldLockSubmitBtnWhenControlInvalid
errorDataHandler(errorData, {
...emailRules,
hasErrorLockingSubmitBtn,
hasError,
});
}
}
/**
* Live validator for required field
*/
if (
requiredRules &&
isWrittenValueEmpty(newValue) &&
isLiveValidatorEnable(requiredRules)
) {
const hasErrorLockingSubmitBtn = requiredRules.shouldLockSubmitBtnWhenControlInvalid
errorDataHandler(errorData, {
...requiredRules,
hasErrorLockingSubmitBtn,
hasError,
});
}
/**
* Live validator for valid number
*/
if (numberRules && isInputNumberInvalid) {
const hasErrorLockingSubmitBtn = numberRules.shouldLockSubmitBtnWhenControlInvalid
errorDataHandler(errorData, {
...numberRules,
hasErrorLockingSubmitBtn,
hasError,
});
}
}
return { errorData };
};
|
<reponame>Erwanncf/orange-smart-display-v2
var myVar = setInterval(myTimer, 30000);
var newDataFlag = 1;
var temperatur = 'nc';
var Humidite='nc';
var PsAtmo= 'nc';
var Luminosite = 'nc';
var bruit = 'nc';
var Altitude = 'nc';
var tempCaf = 'nc';
var heure= 'nc';
var Message = 'nc';
var IP = 'nc';
var Box = 'nc';
var d= new Date();
// récupérer l'heure (initialement définie à 0 en html)
var h = 30;
var dateData;
var dateDataMemory ;
var etatCaf = "Eteinte";
var Occupation = "Non occupée";
var c = false;
var timeOut = 0;
var EtatDisplay = "Connecté";
(function(w, d, s, id, n)
{
w[n] = w[n] || {q: [],
init: function(o) {w[n].initOpts = o;},
ready: function(c) {w[n].q.push(c);}};
var js, fjs = d.getElementsByTagName(s)[0];
if (d.getElementById(id)) {return;}
js = d.createElement(s); js.id = id;
js.src = "https://cdn.by.wonderpush.com/sdk/1.1/wonderpush-loader.min.js";
fjs.parentNode.insertBefore(js, fjs);
}(window, document, "script", "wonderpush-jssdk-loader", "WonderPush"));
WonderPush.init({
webKey: "f8af2631e9bc3fede55d22dfa04bd264a2cba99f467e7dd3046295ae8a140b4c",
optInOptions: {
// You can modify or translate the following:
modalBoxMessage: "We will send you personalized notifications.<br/>You can always unsubscribe at any time.",
modalBoxButton: "Got it!",
externalBoxProcessingMessage: "Subscribing...",
externalBoxSuccessMessage: "Thanks for subscribing!",
externalBoxFailureMessage: "Sorry, something went wrong.",
externalBoxTooLongHint: "Poor connection or private browsing?",
externalBoxCloseHint: "Close"
}
});
function displayMyVar (targetElementId)
{
document.getElementById('target1').innerHTML = temperatur ;
document.getElementById('target2').innerHTML = Humidite;
document.getElementById('target3').innerHTML = PsAtmo;
document.getElementById('target5').innerHTML = bruit;
document.getElementById('target6').innerHTML = tempCaf;
document.getElementById('target7').innerHTML = Message;
document.getElementById('target8').innerHTML = IP;
document.getElementById('target9').innerHTML = etatCaf;
document.getElementById('target10').innerHTML = Occupation;
document.getElementById('target11').innerHTML = EtatDisplay;
document.getElementById('target13').innerHTML = Box;
document.getElementById('target12').href= "http://" + IP + "/";
}
function getSensors(dateData)
{
var urlSensors = 'https://liveobjects.orange-business.com/api/v0/data/streams/urn%3Alo%3Ansid%3AsmartDisplay%3A1856745!Data?limit=100';
var req = new XMLHttpRequest();
req.onreadystatechange = function (aEvt)
{
if (req.readyState == 4)
{
//console.log(req.readyState);
//console.log(req.status);
if(req.status == 200)
{
var ArrData = JSON.parse(req.responseText);
var LastData= ArrData[0];
//console.log(LastData);
var AllValue = LastData.value;
console.log(AllValue);
dateData = LastData.timestamp;
//console.log(dateData);
document.getElementById("demo").innerHTML = new Date(dateData);
heure = d.getHours();
// console.log(heure);
if (dateData != dateDataMemory)
{ if(tempCaf >50 | bruit > 0 )
{
document.getElementById('xyz').play();
}
h = 30;
dateDataMemory = dateData;
}
temperatur= AllValue.Temp;
//console.log(temperatur);
Humidite= AllValue.Hygro;
//console.log(Humidite);
PsAtmo= AllValue.PsAt;
//console.log(PsAtmo);
Luminosite= AllValue.Lum;
//console.log(Luminosite);
bruit= AllValue.Bruit;
//console.log(bruit);
Altitude= AllValue.Alt;
//console.log(Altitude);
tempCaf= AllValue.tempCaf;
//console.log(tempCaf);
Message= AllValue.Message;
//console.log(Message);
IP= AllValue.IP;
//console.log(IP);
Box= AllValue.Box;
if (tempCaf > 50)
{
etatCaf = "Allumée"
c = true;
}
else
{
etatCaf = "Éteinte"
c = false;
}
if (bruit > 1)
{
Occupation = "Occupée"
}
else
{
Occupation = "Non occupée"
}
if(c== true)
{
document.getElementById("img").setAttribute("src","café.gif");
}
if (timeOut <30)
{
EtatDisplay = "Connecté";
}
else
{
EtatDisplay = "Déconnecté";
}
displayMyVar ();
}
else
{
console.log("Erreur pendant le chargement de la page.\n");
}
}
};
req.open('GET', urlSensors , true);
req.setRequestHeader("X-API-KEY", '<KEY>');
req.send(null);
}
function myTimer(setTime)
{
if ( h>setTime ) {
//l'incrémenter
h--;
//inserer la nouvelle heure
document.getElementById("heure").innerHTML = h;
timeOut = 0;
}
else
{
getSensors();
timeOut=timeOut+1;
}
}
//déclencher la fonction toutes les secondes
window.setInterval("myTimer(0)", 1000);
0
|
#!/bin/bash
set -e
########################################
# e.g.
#
# ./cabal.sh
#
# ./cabal.sh new build
# ./cabal.sh old build
#
# ./cabal.sh new-repl
# ./cabal.sh repl
#
# ./cabal.sh 802
# ./cabal.sh js
#
# ./cabal.sh --argstr compiler ghc802
# ./cabal.sh --argstr compiler ghcjs
#
# ./cabal.sh old 841 --arg isProfiled true
# ./cabal.sh new js --arg isProfiled true
#
# ./cabal.sh --arg withHoogle true --arg isProfiled true --arg isTested true --arg isBenchmarked true --arg isDocumented true --arg isHyperlinked true --arg isDwarf true --argstr whichLinker gold --argstr whichObjectLibrary static
#
#
########################################
DEFAULT_COMMAND="cabal new-build"
COMMAND="$DEFAULT_COMMAND"
########################################
# aliases for cabal new-versus-old:
case "$1" in
new)
SUBCOMMAND="${2:?}"
COMMAND="cabal new-$SUBCOMMAND"
shift 2
;;
old)
SUBCOMMAND="${2:?}"
COMMAND="cabal $SUBCOMMAND"
shift 2
;;
build|repl|test|bench|run|haddock)
SUBCOMMAND="${1}"
COMMAND="cabal $SUBCOMMAND"
shift 1
;;
new-build|new-repl|new-test|new-bench|new-run|new-haddock)
SUBCOMMAND="${1}"
COMMAND="cabal $SUBCOMMAND"
shift 1
;;
*)
;;
esac
# case "$1" in
# new)
# SUBCOMMAND="${2}"
# COMMAND="cabal new-$SUBCOMMAND"
# shift 1
# ;;
# old)
# SUBCOMMAND="${2}"
# COMMAND="cabal $SUBCOMMAND"
# shift 1
# ;;
# build|repl|test|bench|run|haddock)
# shift 1
# ;;
# *)
# ;;
# esac
########################################
echo "$@"
echo
./environment.sh $@ --run "$COMMAND"
|
<filename>lib/deepComparator.js
/**
* This algorithm will be used to deep comparison of n-level nested objects.
*
* Created by <NAME> on 1/7/15.
*/
'use strict';
var debug = require('debuglog')('comparator');
/**
* This function will accept a n-level nested object as source and then return the flattened object as an array.
*
* @param sourceObj
* @returns flattendObject
*/
var flattenObject = function(sourceObj) {
var objBranchPath = [];
var flattenedObjectArray = [];
return flattenObjectHelper(sourceObj, objBranchPath, flattenedObjectArray);
};
/**
* This is the helper function which is called recursively to find all the nested elements in an object and the object in the form of
* flattened object.
*
* @param obj
* @param path
* @param objectMap
* @returns flattened object
*/
function flattenObjectHelper(obj, path, objectMap) {
if(!obj) {
debug('Undefined object encountered!');
throw new Error('Undefined object encountered');
}
if(typeof obj !== 'object') {
throw new Error('Only object types are permitted for this function');
}
for(var x in obj) {
var dataType = typeof obj[x];
if(dataType === 'function') {
var exception = 'these are data objects, functions are not allowed in data objects';
debug(exception);
throw new Error(exception);
}
if(dataType === 'object') {
debug('Object type encountered!!');
path.push(x);
objectMap = flattenObjectHelper(obj[x], path, objectMap); //recursive function call for nested objects
//pop last path element from array if there are no embedded objects
var hasEmbeddedObject = false;
for(var t in obj) {
if(typeof t === 'object') {
hasEmbeddedObject = true;
return hasEmbeddedObject;
}
}
if(!hasEmbeddedObject) {
path.pop();
}
}
var fullyQualifiedName;
var tempObj = {}; //temporary object to store key-value object key value pairs for pushing it to final array
//create qualified names from elements stored in path array
if(path.length > 0) {
var objectPath = path[0];
for(var p = 1; p < path.length; p++) {
objectPath += '.' + path[p];
}
if(obj[x] !== 'object') {
fullyQualifiedName = objectPath + '.' + x;
debug('Fully Qualified Path of element is: ' + fullyQualifiedName);
tempObj.key = fullyQualifiedName;
}
}
//print the element if it's not an object
if(typeof obj[x] !== 'object') {
debug(x + ' => ' + obj[x]);
tempObj.value = obj[x];
if(tempObj.key === undefined) {
tempObj.key = x;
}
objectMap.push(tempObj); //push tempObj to array for final response
}
}
debug('Here is final object map:');
for(var i = 0; i < objectMap.length; i++) {
debug(objectMap[i]);
}
return objectMap;
}
/**
* This is used to deep compare two flattened objects and return boolean based on comparison result.
*
* @param list1
* @param list2
* @returns {boolean}
*/
var compareFlattenedArray = function(list1, list2) {
if(!list1) {
throw new Error('Can\'t proceed comparison with undefined objects');
}
if(!list2) {
throw new Error('Can\'t proceed comparison with undefined objects');
}
return compareFlattenedArrayHelper(list1, list2) && compareFlattenedArrayHelper(list2, list1);
};
/**
* Private helper method to determine if list1 is a subset of list2.
*
* @param list1
* @param list2
* @returns {boolean}
*/
var compareFlattenedArrayHelper = function(list1, list2) {
var keyList = [];
var valueList = [];
//explode the object from list2 and store them in 2 separate arrays.
// one containing key and other containing value.
// this is done to do easy comparison of list1 to list2 objects.
for(var x in list2) {
keyList.push(list2[x].key);
valueList.push(list2[x].value);
}
var keyFound = false;
for(var m in list1) {
keyFound = false;
for(var i = 0; i < keyList.length; i++) {
if(list1[m].key === keyList[i]) {
debug('key matched... ' + list1[m].key);
if(list1[m].value === valueList[i]) {
debug('Value matched... ' + list1[m].value);
keyFound = true;
}
}
if(keyFound) {
break;
}
}
if(!keyFound) {
break;
}
}
return keyFound;
};
//export functions for public use
exports.flattenObject = flattenObject;
exports.compareFlattenedArray = compareFlattenedArray;
|
class Circle:
"""Class to calculate the area of a circle"""
def __init__(self, radius):
self._radius = radius
def area(self):
"""Calculate the area of the circle"""
return (self._radius**2) * 3.14
# Create an instance of circle
circle = Circle(5)
# Calculate and print the area of the circle
print("Area of circle:", circle.area())
|
<gh_stars>1-10
public class DecimalsTest {
public static void main(String[] args) {
Decimals.printDecimalDigits(37.2);
}
}
|
#!/usr/bin/env sh
# generated from dynamic_reconfigure/cmake/setup_custom_pythonpath.sh.in
PYTHONPATH=/home/chenz16/Desktop/Rover/devel/lib/python3/dist-packages:$PYTHONPATH
exec "$@"
|
#!/bin/bash
# This script forwards BOOKSTORE port 80 to local host port 8082
# shellcheck disable=SC1091
source .env
backend="${1:-bookstore-v2}"
thisScript="$(dirname "$0")/$(basename "$0")"
if [ -z "$backend" ]; then
echo "Usage: $thisScript <backend-name>"
exit 1
fi
POD="$(kubectl get pods --selector app="$backend" -n "$BOOKSTORE_NAMESPACE" --no-headers | grep 'Running' | awk '{print $1}')"
kubectl describe pod "$POD" -n "$BOOKSTORE_NAMESPACE"
kubectl port-forward "$POD" -n "$BOOKSTORE_NAMESPACE" 8082:80
|
<filename>videoEditSdk/src/main/java/com/video/process/preview/custfilter/GlSoulOutFilter.java<gh_stars>10-100
package com.video.process.preview.custfilter;
import android.content.Context;
import android.opengl.GLES20;
import android.opengl.GLES30;
import com.video.process.R;
import com.video.process.preview.filter.FilterType;
import com.video.process.preview.filter.GlFilter;
public class GlSoulOutFilter extends GlFilter {
float mScale = 0f;
float mOffset = 0f;
private int mScaleHandle;
public GlSoulOutFilter(Context context) {
super(context, R.raw.def_vertext, R.raw.fragment_soulout);
}
@Override
public FilterType getFilterType() {
return FilterType.SPX_SOULOUT;
}
@Override
public void initProgramHandle() {
super.initProgramHandle();
mScaleHandle = GLES30.glGetUniformLocation(mProgramHandle, "scale");
}
@Override
public void onDraw() {
mScale = 1.0f + 0.5f * getInterpolation(mOffset);
mOffset += 0.04f;
if (mOffset > 1.0f) {
mOffset = 0.0f;
}
GLES20.glUniform1f(mScaleHandle, mScale);
}
private float getInterpolation(float input) {
return (float) (Math.cos((input + 1) * Math.PI) / 2.0f) + 0.5f;
}
}
|
/*
* Copyright (c) 2012, <NAME>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name "TwelveMonkeys" nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.twelvemonkeys.lang;
import org.junit.Test;
import java.util.*;
import java.util.concurrent.CopyOnWriteArrayList;
import static org.junit.Assert.*;
/**
* ValidateTest
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @author last modified by $Author: haraldk$
* @version $Id: ValidateTest.java,v 1.0 11.04.12 09:06 haraldk Exp$
*/
public class ValidateTest {
// Not null
@Test
public void testNotNull() {
assertEquals("foo", Validate.notNull("foo"));
}
@Test(expected = IllegalArgumentException.class)
public void testNotNullNull() {
Validate.notNull(null);
}
@Test
public void testNotNullWithParameter() {
assertEquals("foo", Validate.notNull("foo", "bar"));
}
@Test(expected = IllegalArgumentException.class)
public void testNotNullWithParameterNull() {
try {
Validate.notNull(null, "xyzzy");
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("xyzzy"));
throw e;
}
}
@Test
public void testNotNullWithNullParameter() {
Validate.notNull("foo", null);
}
@Test(expected = IllegalArgumentException.class)
public void testNotNullWithNullParameterNull() {
Validate.notNull(null, null);
}
// Not empty (CharSequence)
@Test
public void testNotEmptyCharSequence() {
assertEquals("foo", Validate.notEmpty("foo"));
}
@Test(expected = IllegalArgumentException.class)
public void testNotEmptyCharSequenceNull() {
Validate.notEmpty((CharSequence) null);
}
@Test(expected = IllegalArgumentException.class)
public void testNotEmptyCharSequenceEmpty() {
Validate.notEmpty("");
}
@Test(expected = IllegalArgumentException.class)
public void testNotEmptyCharSequenceOnlyWS() {
Validate.notEmpty(" \t\r");
}
@Test(expected = IllegalArgumentException.class)
public void testNotEmptyCharSequenceNullWithParameter() {
try {
Validate.notEmpty((CharSequence) null, "xyzzy");
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("xyzzy"));
throw e;
}
}
@Test(expected = IllegalArgumentException.class)
public void testNotEmptyCharSequenceEmptyWithParameter() {
try {
Validate.notEmpty("", "xyzzy");
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("xyzzy"));
throw e;
}
}
@Test(expected = IllegalArgumentException.class)
public void testNotEmptyCharSequenceOnlyWSWithParameter() {
try {
Validate.notEmpty(" \t", "xyzzy");
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("xyzzy"));
throw e;
}
}
@Test
public void testNotEmptyCharSequenceWithParameter() {
assertEquals("foo", Validate.notEmpty("foo", "bar"));
}
@Test
public void testNotEmptyCharSequenceWithParameterNull() {
assertEquals("foo", Validate.notEmpty("foo", null));
}
@Test(expected = IllegalArgumentException.class)
public void testNotEmptyCharSequenceNullWithParameterNull() {
try {
Validate.notEmpty((CharSequence) null, null);
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("parameter"));
throw e;
}
}
@Test(expected = IllegalArgumentException.class)
public void testNotEmptyCharSequenceEmptyWithParameterNull() {
try {
Validate.notEmpty("", null);
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("parameter"));
throw e;
}
}
@Test(expected = IllegalArgumentException.class)
public void testNotEmptyCharSequenceOnlyWSWithParameterNull() {
try {
Validate.notEmpty(" \t\t \n", null);
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("parameter"));
throw e;
}
}
// Not empty (array)
@Test
public void testNotEmptyArray() {
Integer[] array = new Integer[2];
assertSame(array, Validate.notEmpty(array));
}
@Test(expected = IllegalArgumentException.class)
public void testNotEmptyArrayNull() {
Validate.notEmpty((Object[]) null);
}
@Test(expected = IllegalArgumentException.class)
public void testNotEmptyArrayEmpty() {
Validate.notEmpty(new String[0]);
}
@Test
public void testNotEmptyArrayParameter() {
Integer[] array = new Integer[2];
assertSame(array, Validate.notEmpty(array, "bar"));
}
@Test(expected = IllegalArgumentException.class)
public void testNotEmptyArrayNullParameter() {
try {
Validate.notEmpty((Object[]) null, "xyzzy");
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("xyzzy"));
throw e;
}
}
@Test(expected = IllegalArgumentException.class)
public void testNotEmptyArrayEmptyParameter() {
try {
Validate.notEmpty(new Float[0], "xyzzy");
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("xyzzy"));
throw e;
}
}
@Test
public void testNotEmptyArrayWithParameterNull() {
Byte[] array = new Byte[1];
assertSame(array, Validate.notEmpty(array, null));
}
@Test(expected = IllegalArgumentException.class)
public void testNotEmptyArrayNullWithParameterNull() {
try {
Validate.notEmpty((Object[]) null, null);
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("parameter"));
throw e;
}
}
@Test(expected = IllegalArgumentException.class)
public void testNotEmptyArrayEmptyWithParameterNull() {
try {
Validate.notEmpty(new Object[0], null);
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("parameter"));
throw e;
}
}
// Not empty (Collection)
@Test
public void testNotEmptyCollection() {
Collection<Integer> collection = Arrays.asList(new Integer[2]);
assertSame(collection, Validate.notEmpty(collection));
}
@Test(expected = IllegalArgumentException.class)
public void testNotEmptyCollectionNull() {
Validate.notEmpty((Collection<?>) null);
}
@Test(expected = IllegalArgumentException.class)
public void testNotEmptyCollectionEmpty() {
Validate.notEmpty(Collections.emptySet());
}
@Test
public void testNotEmptyCollectionParameter() {
List<Integer> collection = Collections.singletonList(1);
assertSame(collection, Validate.notEmpty(collection, "bar"));
}
@Test(expected = IllegalArgumentException.class)
public void testNotEmptyCollectionNullParameter() {
try {
Validate.notEmpty((Collection<?>) null, "xyzzy");
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("xyzzy"));
throw e;
}
}
@Test(expected = IllegalArgumentException.class)
public void testNotEmptyCollectionEmptyParameter() {
try {
Validate.notEmpty(new ArrayList<Object>(), "xyzzy");
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("xyzzy"));
throw e;
}
}
@Test
public void testNotEmptyCollectionWithParameterNull() {
Set<Byte> collection = Collections.singleton((byte) 1);
assertSame(collection, Validate.notEmpty(collection, null));
}
@Test(expected = IllegalArgumentException.class)
public void testNotEmptyCollectionNullWithParameterNull() {
try {
Validate.notEmpty((Collection<?>) null, null);
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("parameter"));
throw e;
}
}
@Test(expected = IllegalArgumentException.class)
public void testNotEmptyCollectionEmptyWithParameterNull() {
try {
Validate.notEmpty((Collection<?>) null, null);
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("parameter"));
throw e;
}
}
// Not empty (Map)
@Test
public void testNotEmptyMap() {
Map<Integer, ?> map = new HashMap<Integer, Object>() {{
put(1, null);
put(2, null);
}};
assertSame(map, Validate.notEmpty(map));
}
@Test(expected = IllegalArgumentException.class)
public void testNotEmptyMapNull() {
Validate.notEmpty((Map<?, ?>) null);
}
@Test(expected = IllegalArgumentException.class)
public void testNotEmptyMapEmpty() {
Validate.notEmpty(Collections.emptyMap());
}
@Test
public void testNotEmptyMapParameter() {
Map<Integer, ?> map = Collections.singletonMap(1, null);
assertSame(map, Validate.notEmpty(map, "bar"));
}
@Test(expected = IllegalArgumentException.class)
public void testNotEmptyMapNullParameter() {
try {
Validate.notEmpty((Map<?, ?>) null, "xyzzy");
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("xyzzy"));
throw e;
}
}
@Test(expected = IllegalArgumentException.class)
public void testNotEmptyMapEmptyParameter() {
try {
Validate.notEmpty(new HashMap<Object, Object>(), "xyzzy");
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("xyzzy"));
throw e;
}
}
@Test
public void testNotEmptyMapWithParameterNull() {
Map<Byte, Object> map = Collections.singletonMap((byte) 1, null);
assertSame(map, Validate.notEmpty(map, null));
}
@Test(expected = IllegalArgumentException.class)
public void testNotEmptyMapNullWithParameterNull() {
try {
Validate.notEmpty((Map<?, ?>) null, null);
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("parameter"));
throw e;
}
}
@Test(expected = IllegalArgumentException.class)
public void testNotEmptyMapEmptyWithParameterNull() {
try {
Validate.notEmpty((Map<?, ?>) null, null);
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("parameter"));
throw e;
}
}
// No null elements (array)
@Test
public void testNoNullElementsArray() {
String[] array = new String[] {"foo", "bar", "baz"};
assertSame(array, Validate.noNullElements(array));
}
@Test
public void testNoNullElementsArrayEmpty() {
Object[] array = new Object[0];
assertSame(array, Validate.noNullElements(array));
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullElementsArrayNull() {
Validate.noNullElements((Object[]) null);
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullElementsArrayNullElements() {
Validate.noNullElements(new Object[3]);
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullElementsArrayMixed() {
String[] array = new String[] {"foo", null, "bar"};
Validate.noNullElements(array);
}
@Test
public void testNoNullElementsArrayParameter() {
String[] array = new String[] {"foo", "bar", "baz"};
assertSame(array, Validate.noNullElements(array, "foo"));
}
@Test
public void testNoNullElementsArrayEmptyParameter() {
Object[] array = new Object[0];
assertSame(array, Validate.noNullElements(array, "foo"));
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullElementsArrayNullParameter() {
try {
Validate.noNullElements((Object[]) null, "foo");
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("foo"));
throw e;
}
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullElementsArrayNullElementsParameter() {
try {
Validate.noNullElements(new Object[3], "foo");
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("foo"));
throw e;
}
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullElementsArrayMixedParameter() {
try {
Validate.noNullElements(new String[] {"foo", null, "bar"}, "foo");
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("foo"));
throw e;
}
}
@Test
public void testNoNullElementsArrayParameterNull() {
String[] array = new String[] {"foo", "bar", "baz"};
assertSame(array, Validate.noNullElements(array, null));
}
@Test
public void testNoNullElementsArrayEmptyParameterNull() {
Object[] array = new Object[0];
assertSame(array, Validate.noNullElements(array, null));
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullElementsArrayNullParameterNull() {
try {
Validate.noNullElements((Object[]) null, null);
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("method parameter"));
throw e;
}
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullElementsArrayNullElementsParameterNull() {
try {
Validate.noNullElements(new Object[3], null);
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("method parameter"));
throw e;
}
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullElementsArrayMixedParameterNull() {
try {
Validate.noNullElements(new String[] {"foo", null, "bar"}, null);
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("method parameter"));
throw e;
}
}
// No null elements (Collection)
@Test
public void testNoNullElementsCollection() {
List<String> collection = Arrays.asList("foo", "bar", "baz");
assertSame(collection, Validate.noNullElements(collection));
}
@Test
public void testNoNullElementsCollectionEmpty() {
Set<?> collection = Collections.emptySet();
assertSame(collection, Validate.noNullElements(collection));
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullElementsCollectionNull() {
Validate.noNullElements((Collection<?>) null);
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullElementsCollectionNullElements() {
Validate.noNullElements(Arrays.asList(null, null, null));
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullElementsCollectionMixed() {
Validate.noNullElements(Arrays.asList("foo", null, "bar"));
}
@Test
public void testNoNullElementsCollectionParameter() {
List<String> collection = Arrays.asList("foo", "bar", "baz");
assertSame(collection, Validate.noNullElements(collection, "foo"));
}
@Test
public void testNoNullElementsCollectionEmptyParameter() {
List<?> collection = new CopyOnWriteArrayList<Object>();
assertSame(collection, Validate.noNullElements(collection, "foo"));
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullElementsCollectionNullParameter() {
try {
Validate.noNullElements((Set<?>) null, "foo");
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("foo"));
throw e;
}
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullElementsCollectionNullElementsParameter() {
try {
Validate.noNullElements(Collections.singletonList(null), "foo");
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("foo"));
throw e;
}
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullElementsCollectionMixedParameter() {
try {
Validate.noNullElements(Arrays.asList("foo", null, "bar"), "foo");
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("foo"));
throw e;
}
}
@Test
public void testNoNullElementsCollectionParameterNull() {
List<String> collection = Arrays.asList("foo", "bar", "baz");
assertSame(collection, Validate.noNullElements(collection, null));
}
@Test
public void testNoNullElementsCollectionEmptyParameterNull() {
Collection<?> collection = Collections.emptySet();
assertSame(collection, Validate.noNullElements(collection, null));
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullElementsCollectionNullParameterNull() {
try {
Validate.noNullElements((ArrayList<?>) null, null);
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("method parameter"));
throw e;
}
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullElementsCollectionNullElementsParameterNull() {
try {
Validate.noNullElements(Collections.singleton(null), null);
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("method parameter"));
throw e;
}
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullElementsCollectionMixedParameterNull() {
Collection<?> collection = Arrays.asList("foo", null, "bar");
try {
Validate.noNullElements(collection, null);
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("method parameter"));
throw e;
}
}
// No null values (Map)
@Test
public void testNoNullValuesMap() {
Map<String, ?> map = new HashMap<String, Object>() {{
put("foo", 1);
put("bar", 2);
put("baz", 3);
}};
assertSame(map, Validate.noNullValues(map));
}
@Test
public void testNoNullValuesEmpty() {
Map<?, ?> map = Collections.emptyMap();
assertSame(map, Validate.noNullValues(map));
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullValuesNull() {
Validate.noNullValues((Map<?, ?>) null);
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullValuesNullElements() {
Validate.noNullValues(Collections.singletonMap("foo", null));
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullValuesMixed() {
Validate.noNullValues(new HashMap<String, Object>() {{
put("foo", 1);
put(null, null);
put("baz", null);
}});
}
@Test
public void testNoNullValuesParameter() {
Map<String, ?> map = new HashMap<String, Object>() {{
put("foo", 1);
put("bar", 2);
put("baz", 3);
}};
assertSame(map, Validate.noNullValues(map, "foo"));
}
@Test
public void testNoNullValuesEmptyParameter() {
Map<?, ?> map = new HashMap<Object, Object>();
assertSame(map, Validate.noNullValues(map, "foo"));
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullValuesNullParameter() {
try {
Validate.noNullValues((Map<?, ?>) null, "foo");
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("foo"));
throw e;
}
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullValuesNullElementsParameter() {
try {
Validate.noNullValues(Collections.singletonMap("bar", null), "foo");
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("foo"));
throw e;
}
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullValuesMixedParameter() {
try {
Validate.noNullValues(new HashMap<String, Object>() {{
put("foo", 1);
put(null, null);
put("bar", null);
}}, "foo");
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("foo"));
throw e;
}
}
@Test
public void testNoNullValuesParameterNull() {
Map<String, ?> map = new HashMap<String, Object>() {{
put("foo", 1);
put("bar", 2);
put("baz", 3);
}};
assertSame(map, Validate.noNullValues(map, null));
}
@Test
public void testNoNullValuesEmptyParameterNull() {
Map<?, ?> map = Collections.emptyMap();
assertSame(map, Validate.noNullValues(map, null));
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullValuesNullParameterNull() {
try {
Validate.noNullValues((Map<?, ?>) null, null);
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("method parameter"));
throw e;
}
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullValuesNullElementsParameterNull() {
try {
Validate.noNullValues(Collections.singletonMap(null, null), null);
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("method parameter"));
throw e;
}
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullValuesMixedParameterNull() {
try {
Validate.noNullValues(new HashMap<String, Object>() {{
put("foo", 1);
put(null, null);
put("bar", null);
}}, null);
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("method parameter"));
throw e;
}
}
// No null keys (Map)
@Test
public void testNoNullKeysMap() {
Map<String, ?> map = new HashMap<String, Object>() {{
put("foo", 1);
put("bar", 2);
put("baz", 3);
}};
assertSame(map, Validate.noNullKeys(map));
}
@Test
public void testNoNullKeysEmpty() {
Map<?, ?> map = Collections.emptyMap();
assertSame(map, Validate.noNullKeys(map));
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullKeysNull() {
Validate.noNullKeys((Map<?, ?>) null);
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullKeysNullElements() {
Validate.noNullKeys(Collections.singletonMap(null, "foo"));
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullKeysMixed() {
Validate.noNullKeys(new HashMap<String, Object>() {{
put("foo", 1);
put(null, null);
put("baz", null);
}});
}
@Test
public void testNoNullKeysParameter() {
Map<String, ?> map = new HashMap<String, Object>() {{
put("foo", 1);
put("bar", 2);
put("baz", 3);
}};
assertSame(map, Validate.noNullKeys(map, "foo"));
}
@Test
public void testNoNullKeysEmptyParameter() {
Map<?, ?> map = new HashMap<Object, Object>();
assertSame(map, Validate.noNullKeys(map, "foo"));
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullKeysNullParameter() {
try {
Validate.noNullKeys((Map<?, ?>) null, "foo");
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("foo"));
throw e;
}
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullKeysNullElementsParameter() {
try {
Validate.noNullKeys(Collections.singletonMap(null, "bar"), "foo");
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("foo"));
throw e;
}
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullKeysMixedParameter() {
try {
Validate.noNullKeys(new HashMap<String, Object>() {{
put("foo", 1);
put(null, null);
put("bar", null);
}}, "foo");
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("foo"));
throw e;
}
}
@Test
public void testNoNullKeysParameterNull() {
Map<String, ?> map = new HashMap<String, Object>() {{
put("foo", 1);
put("bar", 2);
put("baz", 3);
}};
assertSame(map, Validate.noNullKeys(map, null));
}
@Test
public void testNoNullKeysEmptyParameterNull() {
Map<?, ?> map = Collections.emptyMap();
assertSame(map, Validate.noNullKeys(map, null));
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullKeysNullParameterNull() {
try {
Validate.noNullKeys((Map<?, ?>) null, null);
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("method parameter"));
throw e;
}
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullKeysNullElementsParameterNull() {
try {
Validate.noNullKeys(Collections.singletonMap(null, null), null);
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("method parameter"));
throw e;
}
}
@Test(expected = IllegalArgumentException.class)
public void testNoNullKeysMixedParameterNull() {
try {
Validate.noNullKeys(new HashMap<String, Object>() {{
put("foo", 1);
put(null, null);
put("bar", null);
}}, null);
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("method parameter"));
throw e;
}
}
// Is true
@Test
public void testIsTrue() {
assertTrue(Validate.isTrue(true, "%s"));
}
@Test(expected = IllegalArgumentException.class)
public void testIsTrueFalse() {
try {
Validate.isTrue(false, "is %s");
}
catch (IllegalArgumentException e) {
assertEquals("is false", e.getMessage());
throw e;
}
}
@Test(expected = IllegalArgumentException.class)
public void testIsTrueFalseNullParam() {
try {
Validate.isTrue(false, null);
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("false"));
throw e;
}
}
@Test
public void testIsTrueValue() {
Object object = new Object();
assertSame(object, Validate.isTrue(true, object, "%s"));
}
@Test(expected = IllegalArgumentException.class)
public void testIsTrueFalseValue() {
try {
Validate.isTrue(false, "baz", "foo is '%s'");
}
catch (IllegalArgumentException e) {
assertEquals("foo is 'baz'", e.getMessage());
throw e;
}
}
@Test
public void testIsTrueValueParamNull() {
assertEquals("foo", Validate.isTrue(true, "foo", null));
}
@Test(expected = IllegalArgumentException.class)
public void testIsTrueFalseValueParamNull() {
try {
Validate.isTrue(false, "foo", null);
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("foo"));
throw e;
}
}
@Test
public void testIsTrueValueNullParamNull() {
assertNull(Validate.isTrue(true, null, null));
}
@Test(expected = IllegalArgumentException.class)
public void testIsTrueFalseValueNullParamNull() {
try {
Validate.isTrue(false, null, null);
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("null"));
throw e;
}
}
}
|
"""Leetcode 160. Intersection of Two Linked Lists
Easy
URL: https://leetcode.com/problems/intersection-of-two-linked-lists/
Write a program to find the node at which the intersection of
two singly linked lists begins.
For example, the following two linked lists:
A1 -> A2
\ C1 -> C2 -> C2
/
B1 -> B2 -> B3
begin to intersect at node C1.
Example 1:
Input: intersectVal = 8, listA = [4,1,8,4,5], listB = [5,0,1,8,4,5],
skipA = 2, skipB = 3
Output: Reference of the node with value = 8
Input Explanation: The intersected node's value is 8
(note that this must not be 0 if the two lists intersect).
From the head of A, it reads as [4,1,8,4,5].
From the head of B, it reads as [5,0,1,8,4,5].
There are 2 nodes before the intersected node in A;
There are 3 nodes before the intersected node in B.
Example 2:
Input: intersectVal = 2, listA = [0,9,1,2,4], listB = [3,2,4],
skipA = 3, skipB = 1
Output: Reference of the node with value = 2
Input Explanation: The intersected node's value is 2
(note that this must not be 0 if the two lists intersect).
From the head of A, it reads as [0,9,1,2,4].
From the head of B, it reads as [3,2,4].
There are 3 nodes before the intersected node in A;
There are 1 node before the intersected node in B.
Example 3:
Input: intersectVal = 0, listA = [2,6,4], listB = [1,5],
skipA = 3, skipB = 2
Output: null
Input Explanation: From the head of A, it reads as [2,6,4].
From the head of B, it reads as [1,5].
Since the two lists do not intersect, intersectVal must be 0,
while skipA and skipB can be arbitrary values.
Explanation: The two lists do not intersect, so return null.
Notes:
- If the two linked lists have no intersection at all, return null.
- The linked lists must retain their original structure after the function returns.
- You may assume there are no cycles anywhere in the entire linked structure.
- Your code should preferably run in O(n) time and use only O(1) memory.
"""
# Definition for singly-linked list.
class ListNode(object):
def __init__(self, val):
self.val = val
self.next = None
class SolutionSet(object):
def getIntersectionNode(self, headA, headB):
"""
:type headA, headB: ListNode
:rtype: ListNode
Time complexity: O(m+n), where m and n is the lenght of lists A and B.
Space complexity: O(max(m, n)).
"""
if not headA or not headB:
return None
# Add headA nodes into set for quick lookup.
setA = set()
currentA = headA
while currentA:
setA.add(currentA)
currentA = currentA.next
# Traverse headB nodes to check intersection.
currentB = headB
while currentB:
if currentB in setA:
return currentB
currentB = currentB.next
return None
class SolutionTwoPointersTraverseSwap(object):
def getIntersectionNode(self, headA, headB):
"""
:type headA, headB: ListNode
:rtype: ListNode
Time complexity: O(max(m, n)), where m and n is the lenght of lists A and B.
Space complexity: O(1).
"""
if not headA or not headB:
return None
# Two pointer method: Check current nodes are the same or not.
currentA = headA
currentB = headB
# If current node A != B, visit next node or head of another list.
while currentA != currentB:
if currentA:
# If not, visit next nodes.
currentA = currentA.next
else:
# When arrive at the end, visit the head of another list.
currentA = headB
if currentB:
currentB = currentB.next
else:
currentB = headA
return currentA
def main():
# Input: intersectVal = 8, listA = [4,1,8,4,5], listB = [5,0,1,8,4,5].
# Output: Reference of the node with value = 8
headA = ListNode(4)
headA.next = ListNode(1)
headB = ListNode(5)
headB.next = ListNode(0)
headB.next.next = ListNode(1)
intersectNode1 = ListNode(8)
intersectNode2 = ListNode(4)
intersectNode3 = ListNode(5)
headA.next.next = intersectNode1
headA.next.next.next = intersectNode2
headA.next.next.next.next = intersectNode3
headB.next.next = intersectNode1
headB.next.next.next = intersectNode2
headB.next.next.next.next = intersectNode3
print SolutionSet().getIntersectionNode(headA, headB).val
print SolutionTwoPointersTraverseSwap().getIntersectionNode(headA, headB).val
# Input: intersectVal = 2, listA = [0,9,1,2,4], listB = [3,2,4].
# Output: Reference of the node with value = 2
headA = ListNode(0)
headA.next = ListNode(9)
headA.next.next = ListNode(1)
headB = ListNode(3)
intersectNode1 = ListNode(2)
intersectNode2 = ListNode(4)
headA.next.next.next = intersectNode1
headA.next.next.next.next = intersectNode2
headB.next = intersectNode1
headB.next.next = intersectNode2
print SolutionSet().getIntersectionNode(headA, headB).val
print SolutionTwoPointersTraverseSwap().getIntersectionNode(headA, headB).val
# Input: intersectVal = 0, listA = [2,6,4], listB = [1,5].
# Output: null
headA = ListNode(2)
headA.next = ListNode(6)
headA.next.next = ListNode(1)
headB = ListNode(1)
headB.next = ListNode(5)
print SolutionSet().getIntersectionNode(headA, headB)
print SolutionTwoPointersTraverseSwap().getIntersectionNode(headA, headB)
if __name__ == '__main__':
main()
|
/**
* Copyright 2005 Sakai Foundation Licensed under the
* Educational Community License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.osedu.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS"
* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package org.sakaiproject.evaluation.logic.externals;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import org.sakaiproject.evaluation.logic.model.EvalHierarchyNode;
import org.sakaiproject.evaluation.providers.EvalHierarchyProvider;
/**
* This brings in the hierarchy information and gives us the ability to control the
* hierarchy
*
* @author <NAME> (<EMAIL>)
*/
public interface ExternalHierarchyLogic extends EvalHierarchyProvider, ExternalHierarchyPermissions {
/**
* Add a new node to a hierarchy
*
* @param parentNodeId the unique id for the parent of this node, can be null if this is the root or a top level node
* @return the object representing the newly added node
*/
public EvalHierarchyNode addNode(String parentNodeId);
/**
* Remove a node from the hierarchy if it is possible,
* nodes can only be removed if they have no children associations,
* root nodes can never be removed,
* nodes which have evalgroups assigned to them cannot be removed,
* exception occurs if these rules are violated
* <br/>NOTE: this will remove associated template items from their association
* with the node and place them back in the default level
*
* @param nodeId a unique id for a hierarchy node
* @return the object representing the parent of the removed node
*/
public EvalHierarchyNode removeNode(String nodeId);
/**
* Save meta data on a node, this has to be done separately from creating a node
*
* @param nodeId a unique id for a hierarchy node
* @param title the title of the node
* @param description a description for this node
* @return the object representing the updated node
*/
public EvalHierarchyNode updateNodeData(String nodeId, String title, String description);
/**
* Set this set to be the set of eval group ids associated with this node,
* can also be used to clear the associated ids
*
* @param nodeId a unique id for a hierarchy node
* @param evalGroupIds the unique IDs of eval groups to associate, if this is an
* empty set then this will clear the associations so there are no groups associated
*/
public void setEvalGroupsForNode(String nodeId, Set<String> evalGroupIds);
/**
* Get all the children node ids which exist under the set of supplied nodes
* @param nodes a set of eval hierarchy nodes
* @param includeSuppliedNodeIds include the nodes ids of the supplied collection of nodes
* @return a set of all unique child nodes
*/
public Set<String> getAllChildrenNodes(Collection<EvalHierarchyNode> nodes, boolean includeSuppliedNodeIds);
/**
* Create a sorted list of nodes based on a set of input nodes,
* list goes from root (or highest parent) down to the bottom most node
*
* @param nodes a collection of nodes
* @return a list of {@link EvalHierarchyNode}
*/
public List<EvalHierarchyNode> getSortedNodes(Collection<EvalHierarchyNode> nodes);
}
|
"use strict";
class Player {
constructor(environment, addMode = "immediate", badStates = {}, badStateCounts = {}, activeBadStateCounts = {}) {
/** 治療回数 */
this.repairCount = 0;
/** ドーピング回数 */
this.dopeCount = 0;
/** 通常ステータス */
this.normalStatus = new PlayerNormalStatus();
/** ベース感度 */
this.baseSensitivity = new PlayerSensitivity();
/** ベース性的快感上限 */
this.baseSensationLimit = 1000;
/** 性的快感 */
this.sensation = 0;
/** 絶頂回数 */
this.orgasmCount = 0;
/** このステージでの絶頂回数 */
this.currentStageOrgasmCount = 0;
/** 失敗となる絶頂回数 */
this.orgasmLimit = 1;
/** 挑戦回数 */
this.challengeCount = 0;
/** 敗北回数 */
this.failedCount = 0;
/** 抵抗値(この%分だけ効果を削る) */
this.resist = 0;
/** 抵抗値(%)減少ステップ */
this.resistStep = 0;
/** 抵抗値最小値 */
this.resistMin = 0;
/** 感度上昇速度バイアス(%) */
this.sensitiveSpeedBias = 100;
/** 精神加速(%) */
this.speedBoost = 100;
/** 精神加速鈍化ステップ(%) */
this.speedBoostStep = 0;
/** 精神加速最小値(%) */
this.speedBoostMin = 100;
/** 我慢値(%) */
this.patience = 100;
/** 我慢値減少ステップ(%) */
this.patienceStep = 0;
/** 我慢最小値(%) */
this.patienceMin = 100;
this.environment = environment;
this.addMode = addMode;
this.badStates = new PlayerBadStates(this, badStates, badStateCounts, activeBadStateCounts);
this.previousChallengeSensitivity = this.previousStageSensitivity = this.baseSensitivity;
this.initialSensitivity = this.baseSensitivity.copy();
this.initialBadStates = this.previousChallengeBadStates = this.previousStageBadStates = this.badStates;
this.logs = new PlayerLogs();
}
static sensitiveSpeed(sensitivity, allSensitivity, sensitiveSpeedBias) {
return PlayerSensitivity.sensitiveSpeed(sensitivity, allSensitivity) * sensitiveSpeedBias / 100;
}
static sensationSpeed(sensitivity, allSensitivity, effectiveRate) {
return PlayerSensitivity.sensationSpeed(sensitivity, allSensitivity) * effectiveRate;
}
get effectiveBadStates() { return this.addMode === "immediate" ? this.badStates : this.previousStageBadStates; }
/** 遅延時間・停止時間・持続時間・快感上昇速度にきく */
get effectiveRate() { return (100 - this.resist) / 100; }
get sensitivity() { return this.badStates.sensitivityBias.sensitivity(this.baseSensitivity, this.patience); }
get sensationLimit() { return this.baseSensationLimit + Math.exp(1 - this.sensitivity.all / 10000) / Math.exp(1); }
get canOrgasm() { return this.sensation >= this.sensationLimit; }
get currentStageCanClear() { return this.currentStageOrgasmCount < this.orgasmLimit; }
get canRepair() { return this.repairCount < this.challengeCount; }
get canDope() { return this.dopeCount < this.failedCount; }
get delay() { return (this.sensitivity.delay + this.badStates.delay) * this.effectiveRate; }
/*
sensitiveSpeed(part: SensitivePart) {
return this.sensitivity.sensitiveSpeed(part) * this.sensitiveSpeedBias / 100;
}
sensationSpeed(part: SensitivePart) {
return this.sensitivity.sensationSpeed(part) * this.effectiveRate;
}
*/
newChallenge(count) {
this.previousChallengeSensitivity = this.sensitivity.copy();
this.previousChallengeBadStates = this.badStates;
++this.challengeCount;
this.logs.newChallenge(count);
}
newStageChallenge(level, repeatCount) {
this.previousStageSensitivity = this.sensitivity.copy();
this.previousStageBadStates = this.badStates;
this.currentStageOrgasmCount = 0;
this.logs.newStageChallenge(level, repeatCount);
}
upSensation(parts, value, badState) {
const all = this.sensitivity.all;
const partUpSensitivity = {};
let upSensation = 0;
for (const part of parts) { // allの計算をキャッシュして同じ時点で計算
const sensitivity = this.sensitivity[part];
partUpSensitivity[part] = Player.sensitiveSpeed(sensitivity, all, this.sensitiveSpeedBias) * value;
this.baseSensitivity[part] += partUpSensitivity[part];
upSensation += Player.sensationSpeed(sensitivity, all, this.effectiveRate) * value;
}
this.sensation += upSensation;
this.logs.upSensation(parts, value, upSensation, badState);
return { sensitivity: partUpSensitivity, sensation: upSensation };
}
toOrgasmCount() {
const count = Math.floor(this.sensation / this.sensationLimit);
return count;
}
orgasm(count) {
this.orgasmCount += count;
this.currentStageOrgasmCount += count;
// 連続絶頂体質なら快感を残す
const 連続絶頂体質 = this.effectiveBadStates.find("連続絶頂体質");
this.sensation = 連続絶頂体質 ? 連続絶頂体質.progress * 20 / 100 * this.sensationLimit : 0;
this.logs.orgasm(count, 連続絶頂体質 ? 連続絶頂体質.progress * 20 : 0);
}
/**
*
* @param setName
* @param upProgress
* @param triggeredBy 誘発由来なら原因のバッドステートか、原因を明示しない場合はtrue
*/
upBadState(setName, triggeredBy, upProgress = 1) {
const previousBadState = this.badStates.find(setName);
const badStates = this.badStates.up(setName, upProgress);
this.logs.upBadState(setName, Boolean(badStates), previousBadState, badStates ? badStates.find(setName) : undefined, triggeredBy);
if (badStates)
this.badStates = badStates;
return badStates;
}
downBadState(setName, downProgress = 1) {
const previousBadState = this.badStates.find(setName);
const badStates = this.badStates.down(setName, downProgress);
this.logs.downBadState(setName, Boolean(badStates), previousBadState, badStates ? badStates.find(setName) : undefined);
if (badStates)
this.badStates = badStates;
return badStates;
}
downBadStatesOnBattleEnd() {
const badStates = this.badStates.downBattleEnd();
this.logs.downBadStatesOnBattleEnd(this.badStates, badStates);
if (badStates)
this.badStates = badStates;
return badStates;
}
repair(repair) {
++this.repairCount;
this.resist = repair.resist;
this.resistStep = repair.resistStep;
this.resistMin = repair.resistMin;
this.sensitiveSpeedBias = repair.sensitiveSpeedBias;
this.logs.repair(this.repairCount, this.resist, this.sensitiveSpeedBias);
}
dope(dope) {
++this.dopeCount;
this.speedBoost = dope.speedBoost;
this.speedBoostStep = dope.speedBoostStep;
this.speedBoostMin = dope.speedBoostMin;
this.patience = dope.patience;
this.patienceStep = dope.patienceStep;
this.patienceMin = dope.patienceMin;
this.logs.dope(this.dopeCount, this.speedBoost, this.patience);
}
downBadStatesOnRetry() {
const badStates = this.badStates.downRetry();
this.logs.downBadStatesOnRetry(this.badStates, badStates);
if (badStates)
this.badStates = badStates;
return badStates;
}
endStage(successRate) {
if (!this.currentStageCanClear)
++this.failedCount;
this.logs.endStage(this.currentStageCanClear, this.currentStageOrgasmCount, successRate);
}
/** ステージ経過効果 */
passStage() {
const previousResist = this.resist;
this.resist -= this.resistStep;
if (this.resist < this.resistMin)
this.resist = this.resistMin;
const previousSpeedBoost = this.speedBoost;
this.speedBoost -= this.speedBoostStep;
if (this.speedBoost < this.speedBoostMin)
this.speedBoost = this.speedBoostMin;
const previousAnestethia = this.patience;
this.patience -= this.patienceStep;
if (this.patience < this.patienceMin)
this.patience = this.patienceMin;
this.logs.passStage(previousResist, this.resist, this.resistStep, this.resistMin, previousSpeedBoost, this.speedBoost, this.speedBoostStep, this.speedBoostMin, previousAnestethia, this.patience, this.patienceStep, this.patienceMin);
}
}
/** 感度 */
class PlayerSensitivity {
constructor(params = {}) {
this.skin = 15;
this.rightNipple = 70;
this.leftNipple = 70;
this.bust = 40;
this.urethra = 10;
this.clitoris = 100;
this.vagina = 40;
this.portio = 9;
this.womb = 8;
this.anal = 20;
this.hip = 20;
for (const name of Object.keys(params)) {
const value = params[name];
if (value != null)
this[name] = value;
}
}
static ja(part, alt = false) {
if (part === "all")
return alt ? this.allPartJaAlt : this.allPartJa;
return this.partsJa[this.parts.indexOf(part)];
}
static sensitiveSpeed(sensitivity, allSensitivity) {
return 1 + Math.pow(sensitivity, 0.7) / 60 * Math.log10(allSensitivity); // TODO: 調整
}
static sensationSpeed(sensitivity, allSensitivity) {
return Math.pow(sensitivity, 0.7) / 8 * Math.log10(allSensitivity); // TODO: 調整
}
get all() {
return PlayerSensitivity.parts.reduce((sum, part) => this[part] + sum, 0);
}
get delay() {
return Math.pow(((this.all - PlayerSensitivity.initialAll) / 3), 0.65) / 1.2; // TODO:
}
copy() {
const params = {};
for (const name of PlayerSensitivity.parts) {
params[name] = this[name];
}
return new PlayerSensitivity(params);
}
}
PlayerSensitivity.parts = [
"skin", "rightNipple", "leftNipple", "bust", "urethra", "clitoris", "vagina", "portio", "womb", "anal", "hip",
];
PlayerSensitivity.partsJa = [
"肌", "右乳首", "左乳首", "乳房", "尿道", "陰核", "膣", "ポルチオ", "子宮", "尻穴", "尻肉",
];
PlayerSensitivity.allPartJa = "全部";
PlayerSensitivity.allPartJaAlt = "全部位";
PlayerSensitivity.initialAll = new PlayerSensitivity().all;
/** 感度バイアス */
class PlayerSensitivityBias {
constructor() {
this.skin = 100;
this.rightNipple = 100;
this.leftNipple = 100;
this.bust = 100;
this.urethra = 100;
this.clitoris = 100;
this.vagina = 100;
this.portio = 100;
this.womb = 100;
this.anal = 100;
this.hip = 100;
}
sensitivity(playerSensitivity, patience) {
return new PlayerSensitivity(PlayerSensitivity.parts.reduce((params, part) => {
params[part] = playerSensitivity[part] * this[part] / 100 / patience * 100;
return params;
}, {}));
}
}
/** 現在有効なバッドステート immutable */
class PlayerBadStates {
constructor(player, badStates, badStateCounts, activeBadStateCounts) {
this.player = player;
this.badStates = badStates;
this.badStateCounts = badStateCounts;
this.activeBadStateCounts = activeBadStateCounts;
this.sensitivityBias = this.makeSensitivityBias();
this.setNames = Object.keys(this.badStates).sort((a, b) => this.badStates[a].setIndex - this.badStates[b].setIndex);
this.sortedBadStates = this.setNames.map((setName) => this.badStates[setName]);
this.delay = this.sortedBadStates.reduce((delay, badState) => delay + (badState.delay || 0), 0);
this.dangers = Array.from(new Set(this.sortedBadStates.reduce((dangers, badState) => dangers.concat(badState.danger || []), [])));
}
find(setName) {
return this.badStates[setName];
}
/**
* バッドステート起動
*
* 付与など状態が変わったら値を返す
* @param setName バッドステートセット名
* @param upProgress 起動する段階
*/
up(setName, upProgress = 1) {
const currentBadState = this.badStates[setName];
const currentProgress = currentBadState ? currentBadState.progress : 0;
const badStateSet = this.player.environment.badStates.findSet(setName);
let nextProgress = currentProgress;
let nextBadState = undefined;
const org = upProgress;
let finishSearch = false;
while (upProgress) {
++nextProgress;
const nextBadStateCandidate = badStateSet.byProgress(nextProgress);
if (!nextBadStateCandidate)
break;
if (nextBadStateCandidate.countActivate) { // 累計起動回数判定
for (const condition of nextBadStateCandidate.countActivate) {
if (condition.count > (this.badStateCounts[condition.name] || 0)) {
finishSearch = true;
break;
}
}
}
if (nextBadStateCandidate.activeCountActivate) { // 付与以降起動回数判定
for (const condition of nextBadStateCandidate.activeCountActivate) {
if (condition.count > (this.activeBadStateCounts[condition.name] || 0)) {
finishSearch = true;
break;
}
}
}
if (finishSearch)
break;
nextBadState = nextBadStateCandidate;
--upProgress;
}
if (nextBadState && nextBadState.countActivate)
console.warn(nextBadState, this.badStateCounts);
if (!nextBadState && (!currentBadState || !currentBadState.count))
return; // 次レベルがなくカウントもなければ変化なし
const nextBadStates = nextBadState ? Object.assign(Object.assign({}, this.badStates), { [setName]: nextBadState }) : this.badStates;
const addCount = nextBadState ? nextBadState.count || 0 : currentBadState ? currentBadState.count || 0 : 0;
const nextBadStateCounts = addCount ? Object.assign(Object.assign({}, this.badStateCounts), { [setName]: (this.badStateCounts[setName] || 0) + addCount }) : this.badStateCounts;
const nextActiveBadStateCounts = addCount ? Object.assign(Object.assign({}, this.activeBadStateCounts), { [setName]: (this.activeBadStateCounts[setName] || 0) + addCount }) : this.activeBadStateCounts;
return new PlayerBadStates(this.player, nextBadStates, nextBadStateCounts, nextActiveBadStateCounts);
}
/**
* バッドステート解消
*
* 状態が変わったら値を返す
* @param setName バッドステートセット名
* @param downProgress 解消する段階 trueなら全部
*/
down(setName, downProgress = 1) {
const currentBadState = this.find(setName);
if (!currentBadState)
return;
const nextProgress = downProgress === true ? 0 : currentBadState.progress - (downProgress || 0);
if (nextProgress === currentBadState.progress)
return;
if (nextProgress > 0) {
const nextBadState = this.player.environment.badStates.findSet(setName).byProgress(nextProgress);
return new PlayerBadStates(this.player, Object.assign(Object.assign({}, this.badStates), { [setName]: nextBadState }), this.badStateCounts, this.activeBadStateCounts);
}
else {
const nextBadStates = Object.assign({}, this.badStates);
delete nextBadStates[setName];
const nextActiveBadStateCounts = Object.assign({}, this.activeBadStateCounts);
delete nextActiveBadStateCounts[setName];
return new PlayerBadStates(this.player, nextBadStates, this.badStateCounts, nextActiveBadStateCounts);
}
}
/** バトル終了時進行度回復 */
downBattleEnd() {
const nextBadStates = Object.assign({}, this.badStates);
const nextActiveBadStateCounts = Object.assign({}, this.activeBadStateCounts);
let modified = false;
for (const setName of Object.keys(this.badStates)) {
const badState = this.badStates[setName];
if (badState.stageDown) {
const nextProgress = badState.stageDown === true ? 0 : badState.progress - badState.stageDown;
if (nextProgress > 0) {
nextBadStates[setName] = this.player.environment.badStates.findSet(setName).byProgress(nextProgress);
}
else {
delete nextBadStates[setName];
delete nextActiveBadStateCounts[setName];
}
modified = true;
}
}
if (modified)
return new PlayerBadStates(this.player, nextBadStates, this.badStateCounts, nextActiveBadStateCounts);
}
/** 撤退時進行度回復 */
downRetry() {
const nextBadStates = Object.assign({}, this.badStates);
const nextActiveBadStateCounts = Object.assign({}, this.activeBadStateCounts);
let modified = false;
for (const setName of Object.keys(this.badStates)) {
const badState = this.badStates[setName];
if (badState.retryDown) {
const nextProgress = badState.retryDown === true ? 0 : badState.progress - badState.retryDown;
if (nextProgress > 0) {
nextBadStates[setName] = this.player.environment.badStates.findSet(setName).byProgress(nextProgress);
}
else {
delete nextBadStates[setName];
delete nextActiveBadStateCounts[setName];
}
modified = true;
}
}
if (modified)
return new PlayerBadStates(this.player, nextBadStates, this.badStateCounts, nextActiveBadStateCounts);
}
makeSensitivityBias() {
const playerSensitivityBias = new PlayerSensitivityBias();
for (const name of Object.keys(this.badStates)) {
const sensitivity = this.badStates[name].sensitivity;
if (typeof sensitivity === "number") {
if (sensitivity !== 0) {
for (const part of PlayerSensitivity.parts) {
playerSensitivityBias[part] += sensitivity;
}
}
}
else {
for (const part of Object.keys(sensitivity)) {
const value = sensitivity[part];
if (value)
playerSensitivityBias[part] += value;
}
}
}
return playerSensitivityBias;
}
}
class PlayerBadStateDiff {
constructor(before, after) {
this.before = before;
this.after = after;
const uniqueSetNames = {};
for (const setName of this.before.setNames.concat(this.after.setNames)) {
uniqueSetNames[setName] = true;
}
this.setNames = Object.keys(uniqueSetNames).sort((a, b) => (this.before.badStates[a] || this.after.badStates[a]).setIndex - (this.before.badStates[b] || this.after.badStates[b]).setIndex);
this.sortedBadStateDiffEntries = this.setNames.map((setName) => new PlayerBadStateDiffEntry(this.before.badStates[setName], this.after.badStates[setName]));
}
}
class PlayerBadStateDiffEntry {
constructor(before, after) {
this.before = before;
this.after = after;
}
get type() {
if (!this.before) {
return "add";
}
else if (!this.after) {
return "remove";
}
else if (this.before.progress < this.after.progress) {
return "up";
}
else if (this.before.progress > this.after.progress) {
return "down";
}
else {
return "same";
}
}
get first() {
return (this.before || this.after);
}
}
class PlayerNormalStatus {
constructor() {
this.lv = 120;
this.maxHp = 30000;
this.maxMp = 2500;
this.atk = 800;
this.def = 400;
this.mag = 1200;
this.spd = 500;
}
}
PlayerNormalStatus.names = ["lv", "maxHp", "maxMp", "atk", "def", "mag", "spd"];
PlayerNormalStatus.namesJa = ["Lv.", "最大HP", "最大MP", "攻撃力", "防御力", "魔法力", "素早さ"];
function br() {
return document.createElement("br");
}
function strong(text) {
const node = document.createElement("strong");
node.textContent = text;
return node;
}
function 付与() {
const node = strong("付与");
node.classList.add("add");
return node;
}
function 悪化() {
const node = strong("悪化");
node.classList.add("progress");
return node;
}
function text(text) {
return document.createTextNode(text);
}
class PlayerLogs extends Array {
newChallenge(count) {
this.unshift(this.createElement("newChallenge", [strong(`${count}回目`), text(`の挑戦開始`)]));
}
newStageChallenge(level, repeatCount) {
this.unshift(this.createElement("newStageChallenge", [
strong(`ステージ${level}`), text(`: `),
...(repeatCount === 1 ? [] : [strong(`${repeatCount}`), text(`回目の`)]),
text(`挑戦開始`),
]));
}
upSensation(parts, value, upSensation, badState) {
const partsJa = parts.length === PlayerSensitivity.parts.length ?
PlayerSensitivity.ja("all", true) :
parts.map(part => PlayerSensitivity.ja(part)).join(",");
this.unshift(this.createElement("upSensation", [
strong(`[${badState.displayName}]`), text(" "),
strong(partsJa), text("に"),
...(value === 1 ? [] : [value >= 1 ? strong(`${value}倍`) : text(`${value}倍`), text("の")]),
text("快感! "), strong(`+${float2(upSensation)}`),
]));
}
orgasm(count, restPercent) {
const base = [strong(`${count === 1 ? "" : `${count}回`}絶頂`), text(`してしまった!`)];
const rest = restPercent === 0 ? [] : [br(), strong("連続絶頂体質"), text(`により`), strong(`快感が${restPercent}%残ってしまう!`)];
this.unshift(this.createElement("orgasmLog", base.concat(rest)));
}
upBadState(setName, changed, previousBadState, nextBadState, triggeredBy) {
if (changed && nextBadState) { // count変わったとかでもnextBadStateは返ってくる
const reason = triggeredBy === true ?
[] :
triggeredBy ?
[strong(`[${triggeredBy.displayName}]`), text(`により`)] :
[strong(`[${setName}]`), text(`攻撃で`)];
if (nextBadState.onceLog) {
this.unshift(this.createElement("upBadState", [
...reason,
strong(`[${nextBadState.displayName}]${nextBadState.onceLog}しまった!`),
]));
}
else {
if (previousBadState) {
if (previousBadState.progress === nextBadState.progress)
return;
this.unshift(this.createElement("upBadState", [
...reason,
strong(`[${previousBadState.displayName}]`), text(`が`),
strong(`[${nextBadState.displayName}]に`), 悪化(), text(`してしまった!`),
]));
}
else {
this.unshift(this.createElement("upBadState", [
...reason,
strong(`[${nextBadState.displayName}]が`), 付与(), text(`されてしまった!`),
]));
}
}
}
else if (!triggeredBy) {
this.unshift(this.createElement("upBadState", [
strong(`[${setName}]`), text(`攻撃をうけてしまった!`),
]));
}
}
downBadState(setName, changed, previousBadState, nextBadState) {
if (!changed || previousBadState.onceLog)
return;
if (nextBadState) {
const progressDiff = previousBadState.progress - nextBadState.progress;
if (!progressDiff)
return;
this.unshift(this.createElement("downBadState", [
strong(`[${previousBadState.displayName}]`), text("→"), strong(`[${nextBadState.displayName}]`),
text(` ${progressDiff}段階軽減`),
]));
}
else {
this.unshift(this.createElement("downBadState", [
strong(`[${previousBadState.displayName}]が解消`), text(`した`),
]));
}
}
downBadStatesOnBattleEnd(previousBadStates, currentBadStates) {
if (!currentBadStates)
return;
const summary = this.badStateDiffSummary(previousBadStates, currentBadStates);
if (!summary)
return;
this.unshift(this.createElement("downBadStatesOnBattleEnd", [
text("ステージ終了による軽減・解消:"), br(),
...summary,
]));
}
repair(repairCount, resist, sensitiveSpeedBias) {
this.unshift(this.createElement("repair", [
text(`${repairCount}回目の治療をうけたことで`),
text(`抵抗値が`), strong(`${resist}%`),
...(sensitiveSpeedBias === 100 ? [] : [text(` 感度上昇速度が`), strong(`${float2(sensitiveSpeedBias / 100)}倍`)]),
text(`に`),
]));
}
dope(dopeCount, speedBoost, patience) {
this.unshift(this.createElement("dope", [
text(`${dopeCount}回目のドーピングで`),
text(`精神加速が`), strong(`${speedBoost}%`),
text(` 我慢値が`), strong(`${patience}%`),
text(`に`),
]));
}
downBadStatesOnRetry(previousBadStates, currentBadStates) {
if (!currentBadStates)
return;
const summary = this.badStateDiffSummary(previousBadStates, currentBadStates);
if (!summary)
return;
this.unshift(this.createElement("downBadStatesOnRetry", [
text("治療による軽減・解消:"), br(),
...summary,
]));
}
badStateDiffSummary(previousBadStates, currentBadStates) {
if (!currentBadStates)
return [];
const str = [];
const diff = new PlayerBadStateDiff(previousBadStates, currentBadStates);
for (const entry of diff.sortedBadStateDiffEntries) {
if (entry.type === "down") {
const progressDiff = entry.before.progress - entry.after.progress;
str.push(strong(`・[${entry.before.displayName}]`), text("→"), strong(`[${entry.after.displayName}]`), text(` ${progressDiff}段階軽減`), br());
}
else if (entry.type === "remove") {
str.push(strong(`・[${entry.before.displayName}]が解消`), br());
}
}
return str;
}
passStage(previousResist, resist, resistStep, resistMin, previousSpeedBoost, speedBoost, speedBoostStep, speedBoostMin, previousPatience, patience, patienceStep, patienceMin) {
if (!resistStep && !speedBoostStep && !patienceStep)
return;
const str = [text(`ステージ経過で`)];
const resistDiff = previousResist - resist;
if (resistDiff) {
const isMin = resist === resistMin;
str.push(br(), text(`抵抗値が${float2(resistDiff)}%減り`), strong(`${float2(resist)}%`), text(`に`), isMin ? strong("(下限)") : text(""));
}
else {
str.push(br(), strong(`抵抗値減少は下限に達しています`));
}
const speedBoostDiff = previousSpeedBoost - speedBoost;
if (speedBoostDiff) {
const isMin = speedBoost === speedBoostMin;
str.push(br(), text(`精神加速が${float2(speedBoostDiff)}%鈍化し`), strong(`${float2(speedBoost)}%`), text(`に`), isMin ? strong("(下限)") : text(""));
}
else {
str.push(br(), strong(`精神加速鈍化は下限に達しています`));
}
const patienceDiff = previousPatience - patience;
if (patienceDiff) {
const isMin = patience === patienceMin;
str.push(br(), text(`我慢値が${float2(patienceDiff)}%減少し`), strong(`${float2(patience)}%`), text(`に`), isMin ? strong("(下限)") : text(""));
}
else {
str.push(br(), strong(`我慢値は下限に達しています`));
}
this.unshift(this.createElement("passStage", str));
}
endStage(cleared, orgasmCount, successRate) {
this.unshift(this.createElement(cleared ? "endStageSuccess" : "endStageFailed", [
...(orgasmCount ? [strong(`${orgasmCount}回絶頂`), cleared ? text("してしまいましたが") : text("してしまい")] : [text("絶頂せずに")]),
cleared ? strong("攻略成功") : strong("攻略失敗"),
cleared ? text("しました") : text("してしまいました"), br(),
text("成功率"), strong(`${float2(successRate)}%`),
]));
}
createElement(type, text) {
const li = document.createElement("li");
li.classList.add(type);
for (const elem of text)
li.appendChild(elem);
return li;
}
}
|
def str_to_int(input_string):
"""Converts a given string to an integer."""
output = int(input_string)
return output
print(str_to_int("123"))
|
class SearchController < ApplicationController
def search
@results = lark_client.search(params.require(:q))
end
end
|
#! /bin/sh
envsubst < redis_orig.json > redis.json
ls -la
cat redis.json
locust -f redis_get_set.py
|
/**
* Copyright © 2015, University of Washington
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* * Neither the name of the University of Washington nor the names
* of its contributors may be used to endorse or promote products
* derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL UNIVERSITY OF
* WASHINGTON BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package edu.uw.apl.tupelo.logging;
import org.apache.log4j.Logger;
import org.apache.log4j.LogManager;
/**
* See src/test/resources/log4j.properties for the set up of this
* Layout test. Hard to assert as a unit test it succeeds/fails.
* Rather, for each logged message, we expect to see
*
* The time, in ISO8601 format
*
* The host name of the machine on which the JVM is running
*
* The logger name, which always equates to the class producing the message
*
* The log level, e.g. DEBUG, INFO
*
* The message being logged
*/
public class LogMonLayoutTest extends junit.framework.TestCase {
private Logger log;
protected void setUp() {
System.out.println("Setting up log test");
log = Logger.getLogger( getClass() );
}
protected void tearDown() {
System.out.println("Tearing down log test");
LogManager.shutdown();
}
public void test1() {
System.out.println("Testing sending log message");
log.debug( "Test Message" );
}
}
// eof
|
<reponame>otuhs-r/sesame-ruby
require 'spec_helper'
RSpec.describe Sesame do
let(:auth_token) { 'd015cf1353d21a14f392835bceb56d53649e447e3aebe440cef9d' }
let(:device_id) { 'ABCD12345' }
let(:client) { Sesame::Client.new auth_token: auth_token }
it 'has a version number' do
expect(Sesame::VERSION).not_to be nil
end
describe Sesame::Client do
subject { client }
it '#inspect does not expose the auth_token' do
expect(subject.inspect).not_to match(/auth_token/)
end
end
describe Sesame::Sesame do
before do
stub_fixtures('sesames', 'control')
end
describe 'all sesames' do
subject { client.sesames }
it 'should return a list of sesames' do
expect(subject.length).to eq(2)
expect(subject).to all(be_a(Sesame::Sesame))
end
end
describe 'single sesame' do
subject { client.sesame device_id: device_id }
it { is_expected.to be_a(Sesame::Sesame) }
it 'knows its details' do
expect(subject.device_id).to eq(device_id)
expect(subject.battery).to eq(100)
expect(subject).to be_locked
expect(subject.state).to eq('locked')
end
it 'locks' do
expect(subject.lock).to be true
end
it 'unlocks' do
expect(subject.unlock).to be true
end
it '#inspect' do
expect(subject.inspect)
.to match(/device_id: ABCD12345/)
end
end
end
end
|
#include <iostream>
#include <math.h>
// Function to normalize a two-dimensional vector
void normalize(int x[], int y[], int n)
{
for (int i = 0; i < n; i++) {
float length = sqrt(pow(x[i], 2) + pow(y[i], 2));
x[i] /= length;
y[i] /= length;
}
}
int main()
{
int x[] = { 2, 3, 4 };
int y[] = { 5, 8, 12 };
int n = 3;
normalize(x, y, n);
for (int i = 0; i < n; i++)
std::cout << x[i] << " " << y[i] << std::endl;
return 0;
}
|
TERMUX_PKG_HOMEPAGE=https://nixos.org/patchelf.html
TERMUX_PKG_DESCRIPTION="Utility to modify the dynamic linker and RPATH of ELF executables"
TERMUX_PKG_LICENSE="GPL-3.0"
TERMUX_PKG_MAINTAINER="@termux"
TERMUX_PKG_VERSION=0.13
TERMUX_PKG_SRCURL=https://github.com/NixOS/patchelf/archive/$TERMUX_PKG_VERSION.tar.gz
TERMUX_PKG_SHA256=60c6aeadb673de9cc1838b630c81f61e31c501de324ef7f1e8094a2431197d09
TERMUX_PKG_AUTO_UPDATE=true
TERMUX_PKG_DEPENDS="libc++"
TERMUX_PKG_BUILD_IN_SRC=true
termux_step_pre_configure() {
./bootstrap.sh
}
|
/* $OpenBSD: control.c,v 1.8 2021/03/02 04:10:07 jsg Exp $ */
/*
* Copyright (c) 2003, 2004 <NAME> <<EMAIL>>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#include <sys/types.h>
#include <sys/queue.h>
#include <sys/stat.h>
#include <sys/socket.h>
#include <sys/uio.h>
#include <sys/un.h>
#include <netinet/in.h>
#include <net/if.h>
#include <errno.h>
#include <event.h>
#include <imsg.h>
#include <limits.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#include "amused.h"
#include "log.h"
#include "control.h"
#include "playlist.h"
#define CONTROL_BACKLOG 5
struct {
struct event ev;
struct event evt;
int fd;
struct playlist play;
int tx;
} control_state = {.fd = -1, .tx = -1};
struct ctl_conn {
TAILQ_ENTRY(ctl_conn) entry;
int monitor; /* 1 if client is in monitor mode */
struct imsgev iev;
};
TAILQ_HEAD(ctl_conns, ctl_conn) ctl_conns = TAILQ_HEAD_INITIALIZER(ctl_conns);
struct ctl_conn *control_connbyfd(int);
struct ctl_conn *control_connbypid(pid_t);
void control_close(int);
int
control_init(char *path)
{
struct sockaddr_un sun;
int fd;
mode_t old_umask;
if ((fd = socket(AF_UNIX, SOCK_STREAM | SOCK_CLOEXEC | SOCK_NONBLOCK,
0)) == -1) {
log_warn("%s: socket", __func__);
return (-1);
}
memset(&sun, 0, sizeof(sun));
sun.sun_family = AF_UNIX;
strlcpy(sun.sun_path, path, sizeof(sun.sun_path));
if (unlink(path) == -1)
if (errno != ENOENT) {
log_warn("%s: unlink %s", __func__, path);
close(fd);
return (-1);
}
old_umask = umask(S_IXUSR|S_IXGRP|S_IWOTH|S_IROTH|S_IXOTH);
if (bind(fd, (struct sockaddr *)&sun, sizeof(sun)) == -1) {
log_warn("%s: bind: %s", __func__, path);
close(fd);
umask(old_umask);
return (-1);
}
umask(old_umask);
if (chmod(path, S_IRUSR|S_IWUSR|S_IRGRP|S_IWGRP) == -1) {
log_warn("%s: chmod", __func__);
close(fd);
(void)unlink(path);
return (-1);
}
return (fd);
}
int
control_listen(int fd)
{
if (control_state.fd != -1)
fatalx("%s: received unexpected controlsock", __func__);
control_state.fd = fd;
if (listen(control_state.fd, CONTROL_BACKLOG) == -1) {
log_warn("%s: listen", __func__);
return (-1);
}
event_set(&control_state.ev, control_state.fd, EV_READ,
control_accept, NULL);
event_add(&control_state.ev, NULL);
evtimer_set(&control_state.evt, control_accept, NULL);
return (0);
}
void
control_accept(int listenfd, short event, void *bula)
{
int connfd;
socklen_t len;
struct sockaddr_un sun;
struct ctl_conn *c;
event_add(&control_state.ev, NULL);
if ((event & EV_TIMEOUT))
return;
len = sizeof(sun);
if ((connfd = accept4(listenfd, (struct sockaddr *)&sun, &len,
SOCK_CLOEXEC | SOCK_NONBLOCK)) == -1) {
/*
* Pause accept if we are out of file descriptors, or
* libevent will haunt us here too.
*/
if (errno == ENFILE || errno == EMFILE) {
struct timeval evtpause = { 1, 0 };
event_del(&control_state.ev);
evtimer_add(&control_state.evt, &evtpause);
} else if (errno != EWOULDBLOCK && errno != EINTR &&
errno != ECONNABORTED)
log_warn("%s: accept4", __func__);
return;
}
if ((c = calloc(1, sizeof(struct ctl_conn))) == NULL) {
log_warn("%s: calloc", __func__);
close(connfd);
return;
}
imsg_init(&c->iev.ibuf, connfd);
c->iev.handler = control_dispatch_imsg;
c->iev.events = EV_READ;
event_set(&c->iev.ev, c->iev.ibuf.fd, c->iev.events,
c->iev.handler, &c->iev);
event_add(&c->iev.ev, NULL);
TAILQ_INSERT_TAIL(&ctl_conns, c, entry);
}
struct ctl_conn *
control_connbyfd(int fd)
{
struct ctl_conn *c;
TAILQ_FOREACH(c, &ctl_conns, entry) {
if (c->iev.ibuf.fd == fd)
break;
}
return (c);
}
struct ctl_conn *
control_connbypid(pid_t pid)
{
struct ctl_conn *c;
TAILQ_FOREACH(c, &ctl_conns, entry) {
if (c->iev.ibuf.pid == pid)
break;
}
return (c);
}
void
control_close(int fd)
{
struct ctl_conn *c;
if ((c = control_connbyfd(fd)) == NULL) {
log_warnx("%s: fd %d: not found", __func__, fd);
return;
}
/* abort the transaction if running by this user */
if (control_state.tx != -1 && c->iev.ibuf.fd == control_state.tx) {
playlist_free(&control_state.play);
control_state.tx = -1;
}
msgbuf_clear(&c->iev.ibuf.w);
TAILQ_REMOVE(&ctl_conns, c, entry);
event_del(&c->iev.ev);
close(c->iev.ibuf.fd);
/* Some file descriptors are available again. */
if (evtimer_pending(&control_state.evt, NULL)) {
evtimer_del(&control_state.evt);
event_add(&control_state.ev, NULL);
}
free(c);
}
void
control_notify(struct imsgev *iev, int type)
{
struct ctl_conn *c;
TAILQ_FOREACH(c, &ctl_conns, entry) {
if (&c->iev == iev || !c->monitor)
continue;
imsg_compose_event(&c->iev, IMSG_CTL_MONITOR, 0, 0,
-1, &type, sizeof(type));
}
}
void
control_dispatch_imsg(int fd, short event, void *bula)
{
struct ctl_conn *c;
struct imsg imsg;
struct player_repeat rp;
ssize_t n, off;
if ((c = control_connbyfd(fd)) == NULL) {
log_warnx("%s: fd %d: not found", __func__, fd);
return;
}
if (event & EV_READ) {
if (((n = imsg_read(&c->iev.ibuf)) == -1 && errno != EAGAIN) ||
n == 0) {
control_close(fd);
return;
}
}
if (event & EV_WRITE) {
if (msgbuf_write(&c->iev.ibuf.w) <= 0 && errno != EAGAIN) {
control_close(fd);
return;
}
}
for (;;) {
if ((n = imsg_get(&c->iev.ibuf, &imsg)) == -1) {
control_close(fd);
return;
}
if (n == 0)
break;
switch (imsg.hdr.type) {
case IMSG_CTL_PLAY:
switch (play_state) {
case STATE_STOPPED:
main_playlist_resume();
break;
case STATE_PLAYING:
/* do nothing */
break;
case STATE_PAUSED:
play_state = STATE_PLAYING;
main_send_player(IMSG_RESUME, -1);
break;
}
control_notify(&c->iev, imsg.hdr.type);
break;
case IMSG_CTL_TOGGLE_PLAY:
switch (play_state) {
case STATE_STOPPED:
main_playlist_resume();
break;
case STATE_PLAYING:
play_state = STATE_PAUSED;
main_send_player(IMSG_PAUSE, -1);
break;
case STATE_PAUSED:
play_state = STATE_PLAYING;
main_send_player(IMSG_RESUME, -1);
break;
}
control_notify(&c->iev, imsg.hdr.type);
break;
case IMSG_CTL_PAUSE:
if (play_state != STATE_PLAYING)
break;
play_state = STATE_PAUSED;
main_send_player(IMSG_PAUSE, -1);
control_notify(&c->iev, imsg.hdr.type);
break;
case IMSG_CTL_STOP:
if (play_state == STATE_STOPPED)
break;
play_state = STATE_STOPPED;
main_send_player(IMSG_STOP, -1);
control_notify(&c->iev, imsg.hdr.type);
break;
case IMSG_CTL_RESTART:
main_send_player(IMSG_STOP, -1);
main_restart_track();
control_notify(&c->iev, imsg.hdr.type);
break;
case IMSG_CTL_FLUSH:
playlist_truncate();
control_notify(&c->iev, imsg.hdr.type);
break;
case IMSG_CTL_SHOW:
main_send_playlist(&c->iev);
break;
case IMSG_CTL_STATUS:
main_send_status(&c->iev);
break;
case IMSG_CTL_NEXT:
main_send_player(IMSG_STOP, -1);
main_playlist_advance();
control_notify(&c->iev, imsg.hdr.type);
break;
case IMSG_CTL_PREV:
main_send_player(IMSG_STOP, -1);
main_playlist_previous();
control_notify(&c->iev, imsg.hdr.type);
break;
case IMSG_CTL_JUMP:
main_playlist_jump(&c->iev, &imsg);
control_notify(&c->iev, imsg.hdr.type);
break;
case IMSG_CTL_REPEAT:
if (IMSG_DATA_SIZE(imsg) != sizeof(rp)) {
log_warnx("%s: got wrong size", __func__);
break;
}
memcpy(&rp, imsg.data, sizeof(rp));
if (rp.repeat_all != -1)
repeat_all = rp.repeat_all;
if (rp.repeat_one != -1)
repeat_one = rp.repeat_one;
control_notify(&c->iev, imsg.hdr.type);
break;
case IMSG_CTL_BEGIN:
if (control_state.tx != -1) {
main_senderr(&c->iev, "locked");
break;
}
control_state.tx = c->iev.ibuf.fd;
imsg_compose_event(&c->iev, IMSG_CTL_BEGIN, 0, 0, -1,
NULL, 0);
break;
case IMSG_CTL_ADD:
if (control_state.tx != -1 &&
control_state.tx != c->iev.ibuf.fd) {
main_senderr(&c->iev, "locked");
break;
}
main_enqueue(control_state.tx != -1,
&control_state.play,&c->iev, &imsg);
if (control_state.tx == -1)
control_notify(&c->iev, imsg.hdr.type);
break;
case IMSG_CTL_COMMIT:
if (control_state.tx != c->iev.ibuf.fd) {
main_senderr(&c->iev, "locked");
break;
}
if (IMSG_DATA_SIZE(imsg) != sizeof(off)) {
main_senderr(&c->iev, "wrong size");
break;
}
memcpy(&off, imsg.data, sizeof(off));
playlist_swap(&control_state.play, off);
memset(&control_state.play, 0,
sizeof(control_state.play));
control_state.tx = -1;
imsg_compose_event(&c->iev, IMSG_CTL_COMMIT, 0, 0, -1,
NULL, 0);
control_notify(&c->iev, imsg.hdr.type);
break;
case IMSG_CTL_MONITOR:
c->monitor = 1;
break;
default:
log_debug("%s: error handling imsg %d", __func__,
imsg.hdr.type);
break;
}
imsg_free(&imsg);
}
imsg_event_add(&c->iev);
}
int
control_imsg_relay(struct imsg *imsg)
{
struct ctl_conn *c;
if ((c = control_connbypid(imsg->hdr.pid)) == NULL)
return (0);
return (imsg_compose_event(&c->iev, imsg->hdr.type, 0, imsg->hdr.pid,
-1, imsg->data, IMSG_DATA_SIZE(*imsg)));
}
|
<filename>build_scripts/copyStatic.js
/*
copyStatic.js
Created by <NAME>. on Dec 20 2020.
Copyright 2020 Vircadia contributors.
Distributed under the Apache License, Version 2.0.
See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
*/
const fse = require('fs-extra');
const srcDir = `./src/static`;
const destDir = `./dist/static`;
try {
fse.mkdirSync(destDir);
fse.copySync(srcDir, destDir, { overwrite: true });
console.log(`Successfully copied ${srcDir} to ${destDir}!`);
}
catch (err) {
console.log(`Failed to copy copied ${srcDir} to ${destDir}!: ${err}`);
};
|
const app = getApp();
Page({
data: {
value: '',
label: '',
key: ''
},
onLoad(option) {
const {
label,
value,
key
} = option;
wx.setNavigationBarTitle({
title: label
});
this.setData({value, label, key});
},
inputHandle(e) {
const that = this;
that.setData({
value: e.detail.value
});
},
backInfoPage() {
wx.navigateBack();
},
saveInfoHandle() {
const that = this;
const { key, value } = that.data;
const pages = getCurrentPages();
const prePage = pages[pages.length - 2];
wx.pro.getStorage('user')
.then((user) => {
const newUser = Object.assign({}, user, { [key]: value });
return wx.pro.setStorage('user', newUser);
})
.then((otherUser) => {
prePage.setData({user: otherUser});
wx.navigateBack();
})
.catch((err) => {
console.log(err);
});
}
})
|
#!/bin/bash
#
# author: J.L. Lanfranchi
#
# Copyright (c) 2014-2017, The IceCube Collaboration
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License
#
BASEDIR=$(dirname "$0")
PISA=$BASEDIR/..
TMP=/tmp/pisa_tests
export PISA_RESOURCES=${TMP}/pisa_resources:$PISA_RESOURCES
mkdir -p $TMP
mkdir -p $PISA_RESOURCES
echo "PISA=$PISA"
echo "=============================================================================="
echo "Generating toy MC for use with test scripts"
echo "=============================================================================="
PISA_FTYPE=float32 python $PISA/pisa/scripts/make_toy_events.py --outdir ${PISA_RESOURCES}/events \
--num-events 1e5 \
--energy-range 1 80 \
--spectral-index 1 \
--coszen-range -1 1
echo "------------------------------------------------------------------------------"
echo "Finished creating toy MC events to be used with unit tests"
echo "------------------------------------------------------------------------------"
echo ""
echo ""
echo "=============================================================================="
echo "Running test_example_pipelines.py"
echo "=============================================================================="
python $BASEDIR/test_example_pipelines.py -v
echo "------------------------------------------------------------------------------"
echo "Finished Running test_example_pipelines.py"
echo "------------------------------------------------------------------------------"
echo ""
echo ""
# TODO: all files except setup.py and __init__.py that are listed below should
# have a command-line test defined further down in this script (i.e., these are
# scripts that require specific command-line arguments)
for f in `find $PISA/pisa -name "*.py"`
do
BN=$(basename "$f")
#if [[ "$BN" == test_* ]];then continue;fi
if [[ "$f" == *pisa/scripts/* ]];then continue;fi
if [ "$BN" == "__init__.py" ];then continue;fi
if [ "$BN" == "setup.py" ];then continue;fi
if [ "$BN" == pipeline.py ];then continue;fi
if [ "$BN" == distribution_maker.py ];then continue;fi
if [ "$BN" == genie.py ];then continue;fi
echo "=============================================================================="
echo "Running python $BN at abs path"
echo " `realpath $f`"
echo "=============================================================================="
python $f || FAILURE=true
echo "------------------------------------------------------------------------------"
echo "Finished running python $BN"
echo "------------------------------------------------------------------------------"
echo ""
echo ""
sleep 1
done
#
# Test CPU vs GPU, both FP64 and FP32 and CPU FP32 vs CPU FP64
#
OUTDIR_CPU64_NH_PIPELINE=$TMP/cpu64nh_pipeline
echo "=============================================================================="
echo "Running pipeline.py with example.cfg, with CPU & fp64 selected."
echo "Storing results to"
echo " $OUTDIR_CPU64_NH_PIPELINE"
echo "=============================================================================="
PISA_FTYPE=float64 python $PISA/pisa/core/pipeline.py \
-p settings/pipeline/example.cfg \
--select "nh" \
--outdir $OUTDIR_CPU64_NH_PIPELINE \
--png -v
OUTDIR_CPU32_NH_PIPELINE=$TMP/cpu32nh_pipeline
echo "=============================================================================="
echo "Running pipeline.py with example.cfg, with CPU & fp32 selected."
echo "Storing results to"
echo " $OUTDIR_CPU32_NH_PIPELINE"
echo "=============================================================================="
PISA_FTYPE=float32 python $PISA/pisa/core/pipeline.py \
-p settings/pipeline/example.cfg \
-a stage.aeff param.aeff_events=events/events__vlvnt__toy_1_to_80GeV_spidx1.0_cz-1_to_1_1e5evts_set0__unjoined.hdf5 \
-a stage.reco param.reco_events=events/events__vlvnt__toy_1_to_80GeV_spidx1.0_cz-1_to_1_1e5evts_set0__unjoined.hdf5 \
--select "nh" \
--outdir $OUTDIR_CPU32_NH_PIPELINE \
--png -v
OUTDIR_GPU64_NH_PIPELINE=$TMP/gpu64nh_pipeline
echo "=============================================================================="
echo "Running pipeline.py with example.cfg, with GPU & fp64 selected."
echo "Storing results to"
echo " $OUTDIR_GPU64_NH_PIPELINE"
echo "=============================================================================="
PISA_FTYPE=float64 python $PISA/pisa/core/pipeline.py \
-p settings/pipeline/example.cfg \
-a stage.aeff param.aeff_events=events/events__vlvnt__toy_1_to_80GeV_spidx1.0_cz-1_to_1_1e5evts_set0__unjoined.hdf5 \
-a stage.reco param.reco_events=events/events__vlvnt__toy_1_to_80GeV_spidx1.0_cz-1_to_1_1e5evts_set0__unjoined.hdf5 \
--select "nh" \
--outdir $OUTDIR_GPU64_NH_PIPELINE \
--png -v
OUTDIR_GPU32_NH_PIPELINE=$TMP/gpu32nh_pipeline
echo "=============================================================================="
echo "Running pipeline.py with example.cfg, with GPU & fp32 selected."
echo "Storing results to"
echo " $OUTDIR_GPU32_NH_PIPELINE"
echo "=============================================================================="
PISA_FTYPE=float32 python $PISA/pisa/core/pipeline.py \
-p settings/pipeline/example.cfg \
-a stage.aeff param.aeff_events=events/events__vlvnt__toy_1_to_80GeV_spidx1.0_cz-1_to_1_1e5evts_set0__unjoined.hdf5 \
-a stage.reco param.reco_events=events/events__vlvnt__toy_1_to_80GeV_spidx1.0_cz-1_to_1_1e5evts_set0__unjoined.hdf5 \
--select "nh" \
--outdir $OUTDIR_GPU32_NH_PIPELINE \
--png -v
OUTDIR=$TMP/compare_cpu64nh_pipeline_gpu64nh_pipeline
echo "=============================================================================="
echo "Running compare.py, CPU vs. GPU pipeline settings, FP64."
echo "Storing results to"
echo " $OUTDIR"
echo "=============================================================================="
PISA_FTYPE=float64 python $PISA/pisa/scripts/compare.py \
--ref $OUTDIR_CPU64_NH_PIPELINE/*.json* \
--ref-label 'cpu64nh' \
--test $OUTDIR_GPU64_NH_PIPELINE/*.json* \
--test-label 'gpu64nh' \
--outdir $OUTDIR \
--png -v
OUTDIR=$TMP/compare_cpu32nh_pipeline_gpu32nh_pipeline
echo "=============================================================================="
echo "Running compare.py, CPU vs. GPU pipeline settings, FP32."
echo "Storing results to"
echo " $OUTDIR"
echo "=============================================================================="
PISA_FTYPE=float32 python $PISA/pisa/scripts/compare.py \
--ref $OUTDIR_CPU32_NH_PIPELINE/*.json* \
--ref-label 'cpu32nh' \
--test $OUTDIR_GPU32_NH_PIPELINE/*.json* \
--test-label 'gpu32nh' \
--outdir $OUTDIR \
--png -v
OUTDIR=$TMP/compare_cpu32nh_pipeline_cpu64nh_pipeline
echo "=============================================================================="
echo "Running compare.py, CPU32NH vs. CPU64NH"
echo "Storing results to"
echo " $OUTDIR"
echo "=============================================================================="
PISA_FTYPE=float64 python $PISA/pisa/scripts/compare.py \
--ref $OUTDIR_CPU64_NH_PIPELINE/*.json* \
--ref-label 'cpu64nh' \
--test $OUTDIR_CPU32_NH_PIPELINE/*.json* \
--test-label 'cpu32nh' \
--outdir $OUTDIR \
--png -v
#
# Test hierarchy NH vs IH
#
OUTDIR_CPU64_IH_PIPELINE=$TMP/cpu64ih_pipeline
echo "=============================================================================="
echo "Running pipeline.py with example.cfg, with ih selected."
echo "Storing results to"
echo " $OUTDIR_CPU64_IH_PIPELINE"
echo "=============================================================================="
PISA_FTYPE=float64 python $PISA/pisa/core/pipeline.py \
-p settings/pipeline/example.cfg \
-a stage.aeff param.aeff_events=events/events__vlvnt__toy_1_to_80GeV_spidx1.0_cz-1_to_1_1e5evts_set0__unjoined.hdf5 \
-a stage.reco param.reco_events=events/events__vlvnt__toy_1_to_80GeV_spidx1.0_cz-1_to_1_1e5evts_set0__unjoined.hdf5 \
--select "ih" \
--outdir $OUTDIR_CPU64_IH_PIPELINE \
--png -v
OUTDIR=$TMP/compare_cpu64nh_pipeline_to_cpu64ih_pipeline
echo "=============================================================================="
echo "Running compare.py, nh vs. ih MapSets produced above with plots."
echo "Storing results to"
echo " $OUTDIR"
echo "=============================================================================="
python $PISA/pisa/scripts/compare.py \
--ref $OUTDIR_CPU64_IH_PIPELINE/*.json* \
--ref-label 'cpu64ih' \
--test $OUTDIR_CPU64_NH_PIPELINE/*.json* \
--test-label 'cpu64nh' \
--outdir $OUTDIR \
--png -v
#
# Test that DistributionMaker has same result as pipeline
#
# TODO: removed since -a option doesn't work for distmaker
#OUTDIR_CPU64_NH_DISTMAKER=$TMP/cpu64nh_distmaker
#echo "=============================================================================="
#echo "Running distribution_maker.py with example.cfg, with nh selected."
#echo "Storing results to"
#echo " $OUTDIR_CPU64_NH_DISTMAKER"
#echo "=============================================================================="
#PISA_FTYPE=float64 python $PISA/pisa/core/distribution_maker.py \
# -p settings/pipeline/example.cfg \
# -a stage.aeff param.aeff_events=events/events__vlvnt__toy_1_to_80GeV_spidx1.0_cz-1_to_1_1e5evts_set0__unjoined.hdf5 \
# -a stage.reco param.reco_events=events/events__vlvnt__toy_1_to_80GeV_spidx1.0_cz-1_to_1_1e5evts_set0__unjoined.hdf5 \
# --select "nh" \
# --outdir $OUTDIR_CPU64_NH_DISTMAKER \
# --png -v
#
#OUTDIR=$TMP/compare_cpu64nh_distmaker_to_cpu64nh_pipeline
#echo "=============================================================================="
#echo "Running compare.py, fp64/cpu distmaker vs. fp64/cpu pipeline-produced MapSets."
#echo "Storing results to"
#echo " $OUTDIR"
#echo "=============================================================================="
#python $PISA/pisa/scripts/compare.py \
# --ref $OUTDIR_CPU64_NH_PIPELINE/*.json* \
# --ref-label 'cpu64nh_pipeline' \
# --test $OUTDIR_CPU64_NH_DISTMAKER/*.json* \
# --test-label 'cpu64nh_distmaker' \
# --outdir $OUTDIR \
# --png -v
# Call script to run hypothesis testing (runs minimizer with a pipeline)
$BASEDIR/test_hypo_testing.sh
|
#!/bin/sh
files="./*.hydla"
log="./log_all.txt"
rm ${log}
for filepath in ${files}
do
echo ${filepath} >> ${log}
../bin/hylagi -t5 ${filepath} --nd >> ${log}
done
read wait
|
#!/usr/bin/env bash
{{!
Template adapted from here:
https://github.com/chriskempson/base16-builder/blob/master/templates/gnome-terminal/dark.sh.erb
}}
# Base16 Edge Light - Gnome Terminal color scheme install script
# cjayross (https://github.com/cjayross)
[[ -z "$PROFILE_NAME" ]] && PROFILE_NAME="Base 16 Edge Light 256"
[[ -z "$PROFILE_SLUG" ]] && PROFILE_SLUG="base-16-edge-light-256"
[[ -z "$DCONF" ]] && DCONF=dconf
[[ -z "$UUIDGEN" ]] && UUIDGEN=uuidgen
dset() {
local key="$1"; shift
local val="$1"; shift
if [[ "$type" == "string" ]]; then
val="'$val'"
fi
"$DCONF" write "$PROFILE_KEY/$key" "$val"
}
# Because dconf still doesn't have "append"
dlist_append() {
local key="$1"; shift
local val="$1"; shift
local entries="$(
{
"$DCONF" read "$key" | tr -d '[]' | tr , "\n" | fgrep -v "$val"
echo "'$val'"
} | head -c-1 | tr "\n" ,
)"
"$DCONF" write "$key" "[$entries]"
}
# Newest versions of gnome-terminal use dconf
if which "$DCONF" > /dev/null 2>&1; then
# Check that uuidgen is available
type $UUIDGEN >/dev/null 2>&1 || { echo >&2 "Requires uuidgen but it's not installed. Aborting!"; exit 1; }
[[ -z "$BASE_KEY_NEW" ]] && BASE_KEY_NEW=/org/gnome/terminal/legacy/profiles:
if [[ -n "`$DCONF list $BASE_KEY_NEW/`" ]]; then
if which "$UUIDGEN" > /dev/null 2>&1; then
PROFILE_SLUG=`uuidgen`
fi
if [[ -n "`$DCONF read $BASE_KEY_NEW/default`" ]]; then
DEFAULT_SLUG=`$DCONF read $BASE_KEY_NEW/default | tr -d \'`
else
DEFAULT_SLUG=`$DCONF list $BASE_KEY_NEW/ | grep '^:' | head -n1 | tr -d :/`
fi
DEFAULT_KEY="$BASE_KEY_NEW/:$DEFAULT_SLUG"
PROFILE_KEY="$BASE_KEY_NEW/:$PROFILE_SLUG"
# Copy existing settings from default profile
$DCONF dump "$DEFAULT_KEY/" | $DCONF load "$PROFILE_KEY/"
# Add new copy to list of profiles
dlist_append $BASE_KEY_NEW/list "$PROFILE_SLUG"
# Update profile values with theme options
dset visible-name "'$PROFILE_NAME'"
dset palette "['#fafafa', '#db7070', '#7c9f4b', '#d69822', '#6587bf', '#b870ce', '#509c93', '#5e646f', '#5e646f', '#db7070', '#7c9f4b', '#d69822', '#6587bf', '#b870ce', '#509c93', '#5e646f']"
dset background-color "'#fafafa'"
dset foreground-color "'#5e646f'"
dset bold-color "'#5e646f'"
dset bold-color-same-as-fg "true"
dset cursor-colors-set "true"
dset cursor-background-color "'#5e646f'"
dset cursor-foreground-color "'#fafafa'"
dset use-theme-colors "false"
dset use-theme-background "false"
unset PROFILE_NAME
unset PROFILE_SLUG
unset DCONF
unset UUIDGEN
exit 0
fi
fi
# Fallback for Gnome 2 and early Gnome 3
[[ -z "$GCONFTOOL" ]] && GCONFTOOL=gconftool
[[ -z "$BASE_KEY" ]] && BASE_KEY=/apps/gnome-terminal/profiles
PROFILE_KEY="$BASE_KEY/$PROFILE_SLUG"
gset() {
local type="$1"; shift
local key="$1"; shift
local val="$1"; shift
"$GCONFTOOL" --set --type "$type" "$PROFILE_KEY/$key" -- "$val"
}
# Because gconftool doesn't have "append"
glist_append() {
local type="$1"; shift
local key="$1"; shift
local val="$1"; shift
local entries="$(
{
"$GCONFTOOL" --get "$key" | tr -d '[]' | tr , "\n" | fgrep -v "$val"
echo "$val"
} | head -c-1 | tr "\n" ,
)"
"$GCONFTOOL" --set --type list --list-type $type "$key" "[$entries]"
}
# Append the Base16 profile to the profile list
glist_append string /apps/gnome-terminal/global/profile_list "$PROFILE_SLUG"
gset string visible_name "$PROFILE_NAME"
gset string palette "#fafafa:#db7070:#7c9f4b:#d69822:#6587bf:#b870ce:#509c93:#5e646f:#5e646f:#db7070:#7c9f4b:#d69822:#6587bf:#b870ce:#509c93:#5e646f"
gset string background_color "#fafafa"
gset string foreground_color "#5e646f"
gset string bold_color "#5e646f"
gset bool bold_color_same_as_fg "true"
gset bool cursor-colors-set "true"
gset string cursor-background-color "'#5e646f'"
gset string cursor-foreground-color "'#fafafa'"
gset bool use_theme_colors "false"
gset bool use_theme_background "false"
unset PROFILE_NAME
unset PROFILE_SLUG
unset DCONF
unset UUIDGEN
|
package gui;
import javafx.geometry.Insets;
import javafx.scene.control.DialogPane;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.scene.layout.VBox;
import javafx.scene.text.Font;
import javafx.scene.text.Text;
import javafx.scene.text.TextAlignment;
import statics.VersionInfo;
public class InfoAlert extends GuardianAlert {
public InfoAlert() {
super(AlertType.INFORMATION);
this.setTitle(VersionInfo.PROJECT_TITLE + " Information");
VBox root = new VBox();
root.setPadding(new Insets(30, 30, 30, 30));
root.setSpacing(30);
DialogPane dialogPane = this.getDialogPane();
dialogPane.getChildren().setAll(root);
dialogPane.setPrefWidth(560);
dialogPane.setPrefHeight(510);
ImageView logo = new ImageView(new Image(VersionInfo.PROJECT_LOGO));
Text version = new Text("Version: " + VersionInfo.VERSION_NUMBER + " - " + VersionInfo.VERSION_TITLE);
version.setWrappingWidth(500);
version.setTextAlignment(TextAlignment.RIGHT);
version.setFont(new Font(12));
VBox logoBox = new VBox(logo, version);
root.getChildren().add(logoBox);
Text text = new Text("This tool is there to let you create and manage all your different passwords with one password (further reffered to as \"key\") alone. " +
"All the passwords are stored encrypted on your hard drive. " +
"The encryption process ensures that the only way to get stored passwords back is by decrypting it with the key it was encrypted with. " +
"To ensure your passwords are stored safely, only link randomly generated passwords with your key. " +
"DO NOT store sensitive information inside the text area, considering only the passwords are stored encrypted due to safety reasons.\n\n" +
"Guardian is developed by <NAME>. The projects source code can be found on " + VersionInfo.URL_GITHUB + ".");
text.setWrappingWidth(500);
text.setTextAlignment(TextAlignment.JUSTIFY);
text.setFont(new Font(17));
root.getChildren().add(text);
}
}
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+0+512-HPMI/model --tokenizer_name model-configs/1024-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+0+512-HPMI/512+0+512-SS-N-VB-1 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_sentences_remove_all_but_nouns_and_verbs_first_half_full --eval_function last_element_eval
|
#!/usr/bin/env bash
#############################################################################
##
## Integration script for XSB-MT
## Last updated on May 17, 2018
##
## This file is part of Logtalk <https://logtalk.org/>
## Copyright 1998-2020 Paulo Moura <pmoura@logtalk.org>
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
#############################################################################
## As silly as it seems, there is no reliable solution to put the following
## checks in their own file that would be source'd within each individual
## integration script!!!
if ! [ "$LOGTALKHOME" ]; then
echo "The environment variable LOGTALKHOME should be defined first, pointing"
echo "to your Logtalk installation directory!"
echo "Trying the default locations for the Logtalk installation..."
if [ -d "/usr/local/share/logtalk" ]; then
LOGTALKHOME=/usr/local/share/logtalk
echo "... using Logtalk installation found at /usr/local/share/logtalk"
elif [ -d "/usr/share/logtalk" ]; then
LOGTALKHOME=/usr/share/logtalk
echo "... using Logtalk installation found at /usr/share/logtalk"
elif [ -d "/opt/local/share/logtalk" ]; then
LOGTALKHOME=/opt/local/share/logtalk
echo "... using Logtalk installation found at /opt/local/share/logtalk"
elif [ -d "/opt/share/logtalk" ]; then
LOGTALKHOME=/opt/share/logtalk
echo "... using Logtalk installation found at /opt/share/logtalk"
elif [ -d "$HOME/share/logtalk" ]; then
LOGTALKHOME="$HOME/share/logtalk"
echo "... using Logtalk installation found at $HOME/share/logtalk"
elif [ -f "$( cd "$( dirname "$0" )" && pwd )/../core/core.pl" ]; then
LOGTALKHOME="$( cd "$( dirname "$0" )" && pwd )/.."
echo "... using Logtalk installation found at $( cd "$( dirname "$0" )" && pwd )/.."
else
echo "... unable to locate Logtalk installation directory!" >&2
echo
exit 1
fi
echo
export LOGTALKHOME=$LOGTALKHOME
elif ! [ -d "$LOGTALKHOME" ]; then
echo "The environment variable LOGTALKHOME points to a non-existing directory!" >&2
echo "Its current value is: $LOGTALKHOME" >&2
echo "The variable must be set to your Logtalk installation directory!" >&2
echo
exit 1
fi
if ! [ "$LOGTALKUSER" ]; then
echo "The environment variable LOGTALKUSER should be defined first, pointing"
echo "to your Logtalk user directory!"
echo "Trying the default location for the Logtalk user directory..."
echo
export LOGTALKUSER=$HOME/logtalk
fi
if [ -d "$LOGTALKUSER" ]; then
if ! [ -f "$LOGTALKUSER/VERSION.txt" ]; then
echo "Cannot find version information in the Logtalk user directory at $LOGTALKUSER!"
echo "Creating an up-to-date Logtalk user directory..."
logtalk_user_setup
else
system_version=$(cat "$LOGTALKHOME/VERSION.txt")
user_version=$(cat "$LOGTALKUSER/VERSION.txt")
if [ "$user_version" \< "$system_version" ]; then
echo "Logtalk user directory at $LOGTALKUSER is outdated: "
echo " $user_version < $system_version"
echo "Creating an up-to-date Logtalk user directory..."
logtalk_user_setup
fi
fi
else
echo "Cannot find \$LOGTALKUSER directory! Creating a new Logtalk user directory"
echo "by running the \"logtalk_user_setup\" shell script:"
logtalk_user_setup
fi
LOGTALK_STARTUP_DIRECTORY=$(pwd)
export LOGTALK_STARTUP_DIRECTORY
exec xsb-mt --shared_predicates -l -e "['$LOGTALKHOME/integration/logtalk_xsbmt.pl']." "$@"
|
#!/bin/bash
#####################################################################################
# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved.
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#####################################################################################
# Parameter "-r" represents the robot ID (to determine which Carter is being used).
# Parameter "-m" represents determines the map.
# Parameter "-o" represents determines the operator name.
# First two options have default values in case they're not provided.
# Application terminates if operator name is not provided.
while getopts :m:r:o: option
do
case ${option} in
m) MAP=${OPTARG};;
r) ROBOT_ID=${OPTARG};;
o) OPERATOR=${OPTARG};;
*) break;
esac
done
shift $((OPTIND -1))
if [ -z $ROBOT_ID ]
then
ROBOT_ID="2"
fi
if [ -z $MAP ]
then
MAP="nvidia_R_meeting_rooms_181213b"
fi
if [ -z $OPERATOR ]
then
echo 'Please provide operator name using option -o'
exit 1
fi
# Please do not indent strings like below
engine/alice/tools/main --app apps/carter/carter_delivery/carter_delivery.app.json \
--config "apps/carter/robots/carter_$ROBOT_ID.config.json,apps/assets/maps/$MAP.config.json,\
apps/carter/carter_delivery/operators/operator_$OPERATOR.config.json" \
--graph "apps/assets/maps/$MAP.graph.json" $@
|
from django.shortcuts import resolve_url
from django.test import TestCase
class ViewsTest(TestCase):
def test_admin(self):
response = self.client.get(resolve_url('admin:login'))
self.assertContains(response, 'Log in | Django site admin', html=True)
self.assertEqual(response.status_code, 200)
def test_index(self):
response = self.client.get(resolve_url('pages:index'))
self.assertContains(response, 'pages:index', html=True)
self.assertEqual(response.status_code, 200)
|
# src/bash/qto/funcs/backup-postgres-db.test.sh
doTestBackupPostgresDb(){
do_log "DEBUG START doTestBackupPostgresDb"
# Action !!!
bash src/bash/qto/qto.sh -a backup-postgres-db
sleep "$sleep_interval"
# there should be at least one file in the current daily dir which is not older than
while read -r f; do
count=$(( count + 1 ))
done < <(find $mix_data_dir/`date "+%Y"`/`date "+%Y-%m"`/`date "+%Y-%m-%d"`/sql/$postgres_app_db -name '*full.dmp.sql' -mmin -1)
if [ $count -lt 1 ]; then
msg="db dump files are not found !!!"
export exit_code=1 ;
do_exit "$msg";
exit 1 ;
fi
msg="at least one db dump file was created"
test $exit_code -ne 0 && return
do_log "DEBUG STOP doTestBackupPostgresDb"
}
# eof func doTestBackupPostgresDb
# eof file: src/bash/qto/funcs/backup-postgres-db.test.sh
|
#include <iostream>
#include <stdexcept>
class Rational {
public:
int numerator;
int denominator;
Rational(int num, int den) : numerator(num), denominator(den) {
if (denominator == 0) {
throw std::runtime_error("Denominator cannot be zero");
}
}
double to_double() const {
return static_cast<double>(numerator) / denominator;
}
Rational operator+(const Rational& other) const {
return Rational(numerator * other.denominator + other.numerator * denominator, denominator * other.denominator);
}
Rational operator-(const Rational& other) const {
return Rational(numerator * other.denominator - other.numerator * denominator, denominator * other.denominator);
}
Rational operator*(const Rational& other) const {
return Rational(numerator * other.numerator, denominator * other.denominator);
}
Rational operator/(const Rational& other) const {
if (other.numerator == 0) {
throw std::runtime_error("Division by zero");
}
return Rational(numerator * other.denominator, denominator * other.numerator);
}
bool operator==(const Rational& other) const {
return (numerator * other.denominator == other.numerator * denominator);
}
bool operator!=(const Rational& other) const {
return !(*this == other);
}
};
int main() {
try {
Rational r1(3, 4);
Rational r2(7, 4);
std::cout << r1 / r2 << "==" << r1.to_double() / r2.to_double() << "==" << static_cast<double>(r1.numerator) / r1.denominator / static_cast<double>(r2.numerator) / r2.denominator << '\n';
if (r2 == Rational(14, 8)) std::cout << "equal\n";
if (r2 != Rational(14, 8)) std::cout << "not equal\n";
Rational(3, 0); // this line will throw a runtime error due to division by zero
// keep_window_open("~"); // For some Windows(tm) setups
} catch (std::runtime_error& e) {
std::cout << e.what() << '\n';
}
return 0;
}
|
python3 bridgeFiltering.py
|
def render_pos(pos):
return 'line {}, column {}'.format(pos.line, pos.column)
class CircuitryException(Exception):
pass
class NoSuchComponentType(CircuitryException):
def __init__(self, ttype, pos):
super().__init__(
'No such component type as "{}". '.format(ttype) +
render_pos(pos)
)
class NoSuchComponentInScope(CircuitryException):
def __init__(self, name, pos):
super().__init__(
'No such component in scope as "{}". '.format(name) +
render_pos(pos)
)
class GraphSyntaxError(CircuitryException):
def __init__(self, expected, actual, pos):
super().__init__(
'Expected "{}", found "{}" at '.format(expected, actual) +
render_pos(pos)
)
class NoSuchPlug(CircuitryException):
def __init__(self, jack, name, ttype):
super().__init__(
'No such plug "{}" on "{}" of type "{}"'
.format(jack, name, ttype)
)
|
/*
* Copyright 2016-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.cli.net;
import org.apache.karaf.shell.commands.Argument;
import org.apache.karaf.shell.commands.Command;
import org.onosproject.cli.AbstractShellCommand;
import org.onosproject.net.region.Region;
import org.onosproject.net.region.RegionService;
import org.onosproject.ui.UiTopoLayoutService;
import org.onosproject.ui.model.topo.UiTopoLayout;
import org.onosproject.ui.model.topo.UiTopoLayoutId;
import static org.onosproject.net.region.RegionId.regionId;
import static org.onosproject.ui.model.topo.UiTopoLayoutId.layoutId;
/**
* Add a new UI layout.
*
* <pre>
* layout-add {layout-id} {bg-ref} \
* [ {region-id} {parent-layout-id} {scale} {offset-x} {offset-y} ]
* </pre>
* Note that if you want to skip a parameter, but set later parameters,
* use dot (".") as a placeholder for null. For example, no associated region
* or parent layout, but setting the scale and offset for the root layout...
* <pre>
* layout-add root @bayareaGEO . . 1.2 0.0 -4.0
* </pre>
*/
@Command(scope = "onos", name = "layout-add",
description = "Adds a new UI layout.")
public class LayoutAddCommand extends AbstractShellCommand {
private static final char CODE_GEO = '@';
private static final char CODE_GRID = '+';
private static final String NULL_TOKEN = ".";
private static final String ROOT = "root";
private static final double DEFAULT_SCALE = 1.0;
private static final double DEFAULT_OFFSET = 0.0;
@Argument(index = 0, name = "id", description = "Layout ID",
required = true, multiValued = false)
String id = null;
@Argument(index = 1, name = "bgref", description = "Background Ref",
required = true, multiValued = false)
String backgroundRef = null;
@Argument(index = 2, name = "rid", description = "Region ID (optional)",
required = false, multiValued = false)
String regionId = null;
@Argument(index = 3, name = "plid", description = "Parent layout ID (optional)",
required = false, multiValued = false)
String parentId = null;
@Argument(index = 4, name = "scale", description = "Zoom scale (optional; default 1.0)",
required = false, multiValued = false)
String zoomScale = null;
@Argument(index = 5, name = "offx", description = "Zoom offset-X (optional; default 0.0)",
required = false, multiValued = false)
String zoomOffsetX = null;
@Argument(index = 6, name = "offy", description = "Zoom offset-Y (optional; default 0.0)",
required = false, multiValued = false)
String zoomOffsetY = null;
private RegionService regionService;
@Override
protected void execute() {
UiTopoLayoutService service = get(UiTopoLayoutService.class);
RegionService regionService = get(RegionService.class);
UiTopoLayout layout;
if (ROOT.equals(id)) {
layout = service.getRootLayout();
setAppropriateBackground(layout);
setZoomParameters(layout);
return;
}
// Otherwise, it is a user-defined layout...
Region region = nullToken(regionId) ? null : regionService.getRegion(regionId(regionId));
UiTopoLayoutId pid = nullToken(parentId) ? UiTopoLayoutId.DEFAULT_ID : layoutId(parentId);
layout = new UiTopoLayout(layoutId(id)).region(region).parent(pid);
setAppropriateBackground(layout);
setZoomParameters(layout);
service.addLayout(layout);
}
private boolean nullToken(String token) {
return token == null || token.equals(NULL_TOKEN);
}
private void setAppropriateBackground(UiTopoLayout layout) {
/*
* A note about the format of bgref.. it should be one of:
* "." - signifies no background
* "@{map-id}" - signifies geo background (map)
* "+{sprite-id}" - signifies grid background (sprite)
*
* For example, ".", "@bayareaGEO", "+segmentRouting"
*/
char type = backgroundRef.charAt(0);
if (type == CODE_GEO) {
// GEO (map) reference
layout.geomap(backgroundRef.substring(1));
} else if (type == CODE_GRID) {
// Grid (sprite) reference
layout.sprites(backgroundRef.substring(1));
}
// simply ignore null token (".")
}
private double parseDouble(String s, double def) {
if (nullToken(s)) {
return def;
}
double result;
try {
result = Double.parseDouble(s);
} catch (NumberFormatException e) {
result = def;
}
return result;
}
private void setZoomParameters(UiTopoLayout layout) {
double scale = parseDouble(zoomScale, DEFAULT_SCALE);
double offsetX = parseDouble(zoomOffsetX, DEFAULT_OFFSET);
double offsetY = parseDouble(zoomOffsetY, DEFAULT_OFFSET);
layout.scale(scale).offsetX(offsetX).offsetY(offsetY);
}
}
|
#!/usr/bin/env bash
CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
. $CURDIR/../shell_config.sh
url="http://${CLICKHOUSE_HOST}:${CLICKHOUSE_PORT_HTTP}/?session_id=test_01194"
rnd=$RANDOM
${CLICKHOUSE_CURL} -sS "$url&query=SELECT+$rnd,1" > /dev/null
${CLICKHOUSE_CURL} -sS "$url&query=SELECT+$rnd,2" > /dev/null
${CLICKHOUSE_CURL} -sS "$url" --data "SELECT $rnd,3" > /dev/null
${CLICKHOUSE_CURL} -sS "$url" --data "SELECT $rnd,4" > /dev/null
${CLICKHOUSE_CURL} -sS "$url" --data "SYSTEM FLUSH LOGS"
${CLICKHOUSE_CURL} -sS "$url&query=SELECT+count(DISTINCT+query_id)+FROM+system.query_log+WHERE+query+LIKE+'SELECT+$rnd%25'"
|
package com.webcheckers.appl;
import java.util.HashMap;
import java.util.Map;
import com.webcheckers.model.Game;
public class GameList {
private Map<Integer, Game> games = new HashMap<>();
private Map<String, Game> gamesString = new HashMap<>();
private int currentID = 0;
/**
* Adds a game to the map of games
* @param game
* The game to be added
* @return
* The game id associated with that game
*/
public int addGame(Game game) {
games.put(currentID, game);
gamesString.put(currentID + "", game);
return currentID++;
}
/**
* Get a game given an id
* @param gameID
* the id associated with the game
* @return
* the game
*/
public Game getGame(int gameID) {
return games.get(gameID);
}
/**
* Get the map of games
*/
public Map<Integer, Game> getGames() {
return games;
}
public Map<String, Game> getGamesString() {
return gamesString;
}
}
|
def maxElement(arr):
max = arr[0]
for i in arr:
if i > max:
max = i
return max
arr = [5, 25, 10, 9]
out = maxElement(arr)
print(out)
|
/* eslint-disable no-alert */
import { wrapRootElement as wrap } from './wrap-root-element'
export const wrapRootElement = wrap
export const onServiceWorkerUpdateReady = () => {
const answer = window.confirm(`Cette application a été mise à jour. recharger pour afficher la dernière version?`)
if (answer === true) {
window.location.reload()
}
}
|
import styled from 'styled-components';
import colors from './index';
export const Card = styled.div`
padding: 16px;
border-radius: 8px;
color: ${colors.white};
&:hover {
cursor: pointer;
opacity: 0.9;
background-color: ${colors.darkGrey};
}
`;
|
#!/bin/bash -e
# Copyright 2016-2017 Nitor Creations Oy
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
source "$(dirname "${BASH_SOURCE[0]}")/common_tools.sh"
jenkins_setup_dotssh () {
check_parameters CF_paramDnsName
DOT_SSH_DIR=/var/lib/jenkins/jenkins-home/.ssh
mkdir -p $DOT_SSH_DIR
chmod 700 $DOT_SSH_DIR
fetch-secrets.sh get 600 $DOT_SSH_DIR/${CF_paramDnsName}.rsa
mv -v $DOT_SSH_DIR/${CF_paramDnsName}.rsa $DOT_SSH_DIR/id_rsa
ssh-keygen -y -f $DOT_SSH_DIR/id_rsa > $DOT_SSH_DIR/id_rsa.pub
chmod 600 $DOT_SSH_DIR/*
for SCAN_HOST in "github.com" $CF_extraScanHosts; do
if ! ssh-keygen -f $DOT_SSH_DIR/known_hosts -H -F "$SCAN_HOST" | grep . > /dev/null; then
ssh-keyscan -t rsa "$SCAN_HOST" >> $DOT_SSH_DIR/known_hosts
fi
done
cat > /var/lib/jenkins/jenkins-home/.gitconfig << MARKER
[user]
email = jenkins@${CF_paramDnsName}
name = Jenkins
[push]
default = simple
[branch]
autosetuprebase = always
[pull]
rebase = true
MARKER
if [ -n "$CF_paramMvnDeployId" ]; then
[ -n "$MAVEN_HOME" ] || MAVEN_HOME=/var/lib/jenkins/jenkins-home/.m2
mkdir -p "$MAVEN_HOME"
chmod 700 "$MAVEN_HOME"
if ! [ -r "$MAVEN_HOME/settings-security.xml" ]; then
if [ "$(set -o | grep xtrace | awk '{ print $2 }')" = "on" ]; then
set +x
RESET_XTRACE="true"
fi
MASTER_PWD=$(mvn -emp "$(cat /dev/urandom | tr -cd [:alnum:] | head -c 12)")
cat > "$MAVEN_HOME/settings-security.xml" << MARKER
<settingsSecurity>
<master>$MASTER_PWD</master>
</settingsSecurity>
MARKER
chmod 600 "$MAVEN_HOME/settings-security.xml"
fi
if ! [ -r "$MAVEN_HOME/settings.xml" ]; then
cat > "$MAVEN_HOME/settings.xml" << MARKER
<settings>
</settings>
MARKER
chmod 600 "$MAVEN_HOME/settings.xml"
fi
if [ "$(set -o | grep xtrace | awk '{ print $2 }')" = "on" ]; then
set +x
RESET_XTRACE="true"
fi
chown -R jenkins:jenkins $MAVEN_HOME
DEPLOYER_PWD=$(fetch-secrets.sh show "$CF_paramMvnDeployId")
export DEPLOYER_PASSWORD=$(sudo -iu jenkins mvn -ep "$DEPLOYER_PWD")
ndt add-deployer-server "$MAVEN_HOME/settings.xml" "$CF_paramMvnDeployId"
if [ "$RESET_XTRACE" ]; then
unset RESET_XTRACE
set -x
fi
fi
}
jenkins_mount_ebs_home () {
check_parameters CF_paramEBSTag
local SIZE=$1
if [ -z "$SIZE" ]; then
SIZE=32
fi
local MOUNT_PATH=/var/lib/jenkins/jenkins-home
ndt volume-from-snapshot --gp3 ${CF_paramEBSTag} ${CF_paramEBSTag} $MOUNT_PATH $SIZE
usermod -d /var/lib/jenkins/jenkins-home jenkins
mkdir -p /var/lib/jenkins/jenkins-home
if ! [ -e /var/lib/jenkins/jenkins-home/config.xml ]; then
if [ -e /var/lib/jenkins-default/config.xml ]; then
cp -a /var/lib/jenkins-default/* /var/lib/jenkins/jenkins-home/
fi
fi
cat > /etc/cron.d/${CF_paramEBSTag}-snapshot << MARKER
SHELL=/bin/bash
PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin:/root/bin
30 * * * * root ndt snapshot-from-volume -w ${CF_paramEBSTag} ${CF_paramEBSTag} $MOUNT_PATH >> /var/log/snapshots.log 2>&1
MARKER
}
jenkins_setup_snapshot_script () {
if [ ! -e /var/lib/jenkins/jenkins-home/snapshot_jenkins_home.sh ]; then
cat > /var/lib/jenkins/jenkins-home/snapshot_jenkins_home.sh << EOF
#!/bin/bash -xe
ndt snapshot-from-volume -w ${CF_paramEBSTag} ${CF_paramEBSTag} /var/lib/jenkins/jenkins-home
EOF
fi
chmod 755 /var/lib/jenkins/jenkins-home/snapshot_jenkins_home.sh
}
jenkins_setup_snapshot_on_shutdown () {
# Amend service script to call snapshot_jenkins_home right after stopping the service - original script saved as jenkins.orig
case "$SYSTEM_TYPE" in
ubuntu)
perl -i.orig -e 'while(<>){print;if(m!^(\s+)do_stop!){print $1.'\''retval="$?"'\''."\n".$1."/usr/local/bin/ndt snapshot-from-volume '${CF_paramEBSTag}' '${CF_paramEBSTag}' /var/lib/jenkins/jenkins-home\n";last;}}$_=<>;s/\$\?/\$retval/;print;while(<>){print}' /etc/init.d/jenkins
;;
centos|fedora|rhel|rocky)
perl -i.orig -e 'while(<>){print;if(m!^(\s+)killproc!){print $1.'\''retval="$?"'\''."\n".$1."/usr/local/bin/ndt snapshot-from-volume '${CF_paramEBSTag}' '${CF_paramEBSTag}' /var/lib/jenkins/jenkins-home\n";last;}}$_=<>;s/\$\?/\$retval/;print;while(<>){print}' /etc/init.d/jenkins
;;
*)
echo "Unkown system type $SYSTEM_TYPE"
;;
esac
}
jenkins_setup_snapshot_job () {
if ! find /var/lib/jenkins/jenkins-home/jobs -mindepth 2 -maxdepth 4 -name config.xml -print0 | xargs -0 fgrep -q snapshot_jenkins_home.sh ; then
sync_jenkins_conf_job_name="snapshot-jenkins-home"
mkdir -p /var/lib/jenkins/jenkins-home/jobs/${sync_jenkins_conf_job_name}
cat > /var/lib/jenkins/jenkins-home/jobs/${sync_jenkins_conf_job_name}/config.xml << 'EOF'
<?xml version='1.0' encoding='UTF-8'?>
<project>
<actions/>
<description>Runs the "snapshot_jenkins_home.sh" script that pushes the latest jenkins config to the remote Jenkins repo.</description>
<keepDependencies>false</keepDependencies>
<properties>
<jenkins.model.BuildDiscarderProperty>
<strategy class="hudson.tasks.LogRotator">
<daysToKeep>60</daysToKeep>
<numToKeep>-1</numToKeep>
<artifactDaysToKeep>-1</artifactDaysToKeep>
<artifactNumToKeep>-1</artifactNumToKeep>
</strategy>
</jenkins.model.BuildDiscarderProperty>
</properties>
<scm class="hudson.scm.NullSCM"/>
<canRoam>true</canRoam>
<disabled>true</disabled>
<blockBuildWhenDownstreamBuilding>false</blockBuildWhenDownstreamBuilding>
<blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding>
<triggers>
<hudson.triggers.TimerTrigger>
<spec>H H(18-19) * * *
H H(4-5) * * *</spec>
</hudson.triggers.TimerTrigger>
</triggers>
<concurrentBuild>false</concurrentBuild>
<builders>
<hudson.tasks.Shell>
<command>/var/lib/jenkins/jenkins-home/snapshot_jenkins_home.sh 2>&1 | tee -a /var/lib/jenkins/snapshot_jenkins_home.log</command>
</hudson.tasks.Shell>
</builders>
<publishers/>
<buildWrappers/>
</project>
EOF
fi
}
jenkins_discard_default_install () {
rm -rf /var/lib/jenkins-default
}
jenkins_fetch_additional_files () {
fetch-secrets.sh get 600 ${CF_paramAdditionalFiles}
for i in ${CF_paramAdditionalFiles} ; do
case "$i" in
/var/lib/jenkins/*)
chown -R jenkins:jenkins "$i"
;;
esac
done
}
jenkins_improve_config_security () {
mkdir -p /var/lib/jenkins/jenkins-home/secrets/
echo false > /var/lib/jenkins/jenkins-home/secrets/slave-to-master-security-kill-switch
}
jenkins_set_home () {
case "$SYSTEM_TYPE" in
ubuntu)
local SYSCONFIG=/etc/default/jenkins
;;
centos|fedora|rocky|rhel)
local SYSCONFIG=/etc/sysconfig/jenkins
;;
*)
echo "Unkown system type $SYSTEM_TYPE"
exit 1
esac
sed -i -e 's/JENKINS_HOME=.*/JENKINS_HOME=\/var\/lib\/jenkins\/jenkins-home/g' \
-e 's/\(JENKINS_JAVA_OPTIONS=\"[^\"]*\)\"/\1 -Dhudson.model.DirectoryBrowserSupport.CSP= -Djava.awt.headless=true -Dhudson.model.User.SECURITY_243_FULL_DEFENSE=false -Dhudson.model.ParametersAction.keepUndefinedParameters=true\"/g' \
-e 's/^JENKINS_AJP_PORT=.*$/JENKINS_AJP_PORT="-1"/g' $SYSCONFIG
}
jenkins_disable_and_shutdown_service () {
case "$SYSTEM_TYPE" in
ubuntu)
update-rc.d jenkins disable
service jenkins stop
;;
centos|fedora|rocky|rhel)
systemctl disable jenkins
systemctl stop jenkins
;;
*)
echo "Unkown system type $SYSTEM_TYPE"
exit 1
esac
}
jenkins_enable_and_start_service () {
chown -R jenkins:jenkins /var/lib/jenkins/ /var/lib/jenkins/jenkins-home/
systemctl enable jenkins
systemctl start jenkins
}
jenkins_setup() {
jenkins_mount_ebs_home ${CF_paramEBSSize}
jenkins_discard_default_install
jenkins_setup_dotssh
jenkins_setup_snapshot_script
jenkins_setup_snapshot_on_shutdown
jenkins_setup_snapshot_job
jenkins_improve_config_security
jenkins_fetch_additional_files
jenkins_set_home
jenkins_enable_and_start_service
}
jenkins_wait_service_up () {
# Tests to see if everything is OK
COUNT=0
SERVER=""
while [ $COUNT -lt 300 ] && [ "$SERVER" != "Jenkins" ]; do
sleep 1
SERVER="$(curl -sv http://localhost:8080 2>&1 | grep 'X-Jenkins:' | awk -NF'-|:' '{ print $2 }')"
COUNT=$(($COUNT + 1))
done
if [ "$SERVER" != "Jenkins" ]; then
fail "Jenkins server not started"
fi
}
|
#!/bin/bash
# Copyright 2014 Johns Hopkins University (author: Daniel Povey)
# Apache 2.0
remove_archive=false
if [ "$1" == --remove-archive ]; then
remove_archive=true
shift
fi
if [ $# -ne 3 ]; then
echo "Usage: $0 [--remove-archive] <data-base> <url-base> <corpus-part>"
echo "e.g.: $0 /export/a15/vpanayotov/data www.openslr.org/resources/11 dev-clean"
echo "With --remove-archive it will remove the archive after successfully un-tarring it."
echo "<corpus-part> can be one of: dev-clean, test-clean, dev-other, test-other,"
echo " train-clean-100, train-clean-360, train-other-500."
fi
data=$1
url=$2
part=$3
if [ ! -d "$data" ]; then
echo "$0: no such directory $data"
exit 1;
fi
part_ok=false
list="dev-clean test-clean dev-other test-other train-clean-100 train-clean-360 train-other-500"
for x in $list; do
if [ "$part" == $x ]; then part_ok=true; fi
done
if ! $part_ok; then
echo "$0: expected <corpus-part> to be one of $list, but got '$part'"
exit 1;
fi
if [ -z "$url" ]; then
echo "$0: empty URL base."
exit 1;
fi
if [ -f $data/LibriSpeech/$part/.complete ]; then
echo "$0: data part $part was already successfully extracted, nothing to do."
exit 0;
fi
# sizes of the archive files in bytes. This is some older versions.
sizes_old="371012589 347390293 379743611 361838298 6420417880 23082659865 30626749128"
# sizes_new is the archive file sizes of the final release. Some of these sizes are of
# things we probably won't download.
sizes_new="337926286 314305928 695964615 297279345 87960560420 33373768 346663984 328757843 6387309499 23049477885 30593501606"
if [ -f $data/$part.tar.gz ]; then
size=$(/bin/ls -l $data/$part.tar.gz | awk '{print $5}')
size_ok=false
for s in $sizes_old $sizes_new; do if [ $s == $size ]; then size_ok=true; fi; done
if ! $size_ok; then
echo "$0: removing existing file $data/$part.tar.gz because its size in bytes $size"
echo "does not equal the size of one of the archives."
rm $data/$part.tar.gz
else
echo "$data/$part.tar.gz exists and appears to be complete."
fi
fi
if [ ! -f $data/$part.tar.gz ]; then
if ! which wget >/dev/null; then
echo "$0: wget is not installed."
exit 1;
fi
full_url=$url/$part.tar.gz
echo "$0: downloading data from $full_url. This may take some time, please be patient."
cd $data
if ! wget --no-check-certificate $full_url; then
echo "$0: error executing wget $full_url"
exit 1;
fi
fi
cd $data
if ! tar -xvzf $part.tar.gz; then
echo "$0: error un-tarring archive $data/$part.tar.gz"
exit 1;
fi
touch $data/LibriSpeech/$part/.complete
echo "$0: Successfully downloaded and un-tarred $data/$part.tar.gz"
if $remove_archive; then
echo "$0: removing $data/$part.tar.gz file since --remove-archive option was supplied."
rm $data/$part.tar.gz
fi
|
<filename>ScalaExamples/src/main/scala/com/mblinn/mbfpp/oo/iterator/TheLambdaBarAndGrille.scala
package com.mblinn.mbfpp.oo.iterator
import com.mblinn.oo.iterator.Address;
object TheLambdaBarAndGrille {
case class Person(name: String, address: Address)
case class Address(zip: Int)
def generateGreetings(people: Seq[Person]) =
for (Person(name, address) <- people if isCloseZip(address.zip))
yield "Hello, %s, and welcome to the Lambda Bar And Grille!".format(name)
def isCloseZip(zipCode: Int) = zipCode == 19123 || zipCode == 19103
def printGreetings(people: Seq[Person]) =
for (Person(name, address) <- people if isCloseZip(address.zip))
println("Hello, %s, and welcome to the Lambda Bar And Grille!".format(name))
}
|
import { commands, ExtensionContext, workspace } from "vscode";
import { createUserProfile, editUserProfile, getUserProfile } from "./commands";
import { Profile } from "./models";
import { ProfileStatusBar as statusBar } from "./controls";
import * as constants from "./constants";
import { getCurrentConfig, isValidWorkspace } from "./util";
import { Logger } from "./util/logger";
export async function activate(context: ExtensionContext) {
try {
Logger.instance.logInfo("Activating extension");
Logger.instance.logInfo("Registering for config change event");
workspace.onDidChangeConfiguration(
async () => await commands.executeCommand(constants.CommandIds.GET_USER_PROFILE, false)
);
Logger.instance.logInfo("Initializing status bar");
statusBar.instance.attachCommand(constants.CommandIds.GET_USER_PROFILE);
Logger.instance.logInfo("Initializing commands");
context.subscriptions.push(statusBar.instance.StatusBar);
context.subscriptions.push(
commands.registerCommand(constants.CommandIds.CREATE_USER_PROFILE, createUserProfile)
);
context.subscriptions.push(commands.registerCommand(constants.CommandIds.EDIT_USER_PROFILE, editUserProfile));
context.subscriptions.push(
commands.registerCommand(constants.CommandIds.GET_USER_PROFILE, async (fromStatusBar: boolean = true) => {
let selectedProfile: Profile = await getUserProfile(fromStatusBar, true);
let validWorkspace = await isValidWorkspace();
let configInSync = false;
if (validWorkspace.isValid && validWorkspace.folder) {
let currentConfig = await getCurrentConfig(validWorkspace.folder);
configInSync =
currentConfig.email.toLowerCase() === selectedProfile.email.toLowerCase() &&
currentConfig.userName.toLowerCase() === selectedProfile.userName.toLowerCase() &&
currentConfig.signingKey.toLowerCase() === selectedProfile.signingKey.toLowerCase();
}
statusBar.instance.updateStatus(selectedProfile, configInSync);
})
);
Logger.instance.logInfo("Initializing commands complete.");
await commands.executeCommand(constants.CommandIds.GET_USER_PROFILE, false);
} catch (error) {
Logger.instance.logError("Error ocurred", error);
}
}
export function deactivate() {}
|
#!/bin/sh
docker images | awk '{print $3}' | xargs --no-run-if-empty docker rmi
|
<gh_stars>1-10
package com.github.drakepork.opme.Utils;
import com.google.inject.Inject;
import com.github.drakepork.opme.Main;
import org.bukkit.configuration.file.FileConfiguration;
import org.bukkit.configuration.file.YamlConfiguration;
import java.io.File;
import java.io.IOException;
public class LangCreator {
private Main plugin;
@Inject
public LangCreator(Main plugin) {
this.plugin = plugin;
}
public void init() {
File lang = new File(this.plugin.getDataFolder() + File.separator
+ "lang" + File.separator + plugin.getConfig().getString("lang-file"));
try {
FileConfiguration langConf = YamlConfiguration.loadConfiguration(lang);
langConf.addDefault("plugin-prefix", "&f[&cOpme&f] ");
langConf.addDefault("opme-message", "");
langConf.addDefault("deopme-message", "");
langConf.addDefault("op-message", "");
langConf.addDefault("deop-message", "");
langConf.addDefault("no-perm", "&4Error: &cYou do not have permission to execute this command...");
langConf.options().copyDefaults(true);
langConf.save(lang);
} catch (IOException e) {
e.printStackTrace();
}
}
}
|
<reponame>FredericW/privacy
"""Differentially private version of Keras optimizer v2."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import tensorflow as tf
import tables
import numpy as np
import pandas as pd
import tensorflow.experimental.numpy as tnp
from tensorflow_privacy.privacy.dp_query import gaussian_query
#===================================================================#
from cactus_sampling import cactus_sample
#===================================================================#
def clip_gradients_vmap(g, l2_norm_clip):
"""Clips gradients in a way that is compatible with vectorized_map."""
grads_flat = tf.nest.flatten(g)
squared_l2_norms = [
tf.reduce_sum(input_tensor=tf.square(g)) for g in grads_flat
]
global_norm = tf.sqrt(tf.add_n(squared_l2_norms))
"""grads_flat consists of 8 tensors [8 8 1 16]... """
div = tf.maximum(global_norm / l2_norm_clip, 1.)
clipped_flat = [g / div for g in grads_flat]
"""Tested, the l2 norm of clipped_flat is exactly 1"""
clipped_grads = tf.nest.pack_sequence_as(g, clipped_flat)
return clipped_grads
def make_vectorized_keras_optimizer_class(cls):
"""Given a subclass of `tf.keras.optimizers.Optimizer`, returns a vectorized DP-SGD subclass of it.
Args:
cls: Class from which to derive a DP subclass. Should be a subclass of
`tf.keras.optimizers.Optimizer`.
Returns:
A vectorized DP-SGD subclass of `cls`.
"""
class DPOptimizerClass(cls): # pylint: disable=empty-docstring
__doc__ = """Vectorized differentially private subclass of given class
`{base_class}`.
You can use this as a differentially private replacement for
`{base_class}`. This optimizer implements DP-SGD using
the standard Gaussian mechanism. It differs from `{dp_keras_class}` in that
it attempts to vectorize the gradient computation and clipping of
microbatches.
When instantiating this optimizer, you need to supply several
DP-related arguments followed by the standard arguments for
`{short_base_class}`.
Examples:
```python
# Create optimizer.
opt = {dp_vectorized_keras_class}(l2_norm_clip=1.0, noise_multiplier=0.5, num_microbatches=1,
<standard arguments>)
```
When using the optimizer, be sure to pass in the loss as a
rank-one tensor with one entry for each example.
The optimizer can be used directly via its `minimize` method, or
through a Keras `Model`.
```python
# Compute loss as a tensor by using tf.losses.Reduction.NONE.
# Compute vector of per-example loss rather than its mean over a minibatch.
# (Side note: Always verify that the output shape when using
# tf.losses.Reduction.NONE-- it can sometimes be surprising.
loss = tf.keras.losses.CategoricalCrossentropy(
from_logits=True, reduction=tf.losses.Reduction.NONE)
# Use optimizer in a Keras model.
opt.minimize(loss, var_list=[var])
```
```python
# Compute loss as a tensor by using tf.losses.Reduction.NONE.
# Compute vector of per-example loss rather than its mean over a minibatch.
loss = tf.keras.losses.CategoricalCrossentropy(
from_logits=True, reduction=tf.losses.Reduction.NONE)
# Use optimizer in a Keras model.
model = tf.keras.Sequential(...)
model.compile(optimizer=opt, loss=loss, metrics=['accuracy'])
model.fit(...)
```
""".format(base_class='tf.keras.optimizers.' + cls.__name__,
dp_keras_class='DPKeras' + cls.__name__,
short_base_class=cls.__name__,
dp_vectorized_keras_class='VectorizedDPKeras' + cls.__name__)
def __init__(
self,
dpsgd_type,
l2_norm_clip,
noise_multiplier,
num_microbatches=None,
logging=False,
*args, # pylint: disable=keyword-arg-before-vararg, g-doc-args
**kwargs):
"""Initialize the DPOptimizerClass.
Args:
l2_norm_clip: Clipping norm (max L2 norm of per microbatch gradients).
noise_multiplier: Ratio of the standard deviation to the clipping norm.
num_microbatches: Number of microbatches into which each minibatch
is split.
*args: These will be passed on to the base class `__init__` method.
**kwargs: These will be passed on to the base class `__init__` method.
"""
super(DPOptimizerClass, self).__init__(*args, **kwargs)
self._l2_norm_clip = l2_norm_clip
self._noise_multiplier = noise_multiplier
self._num_microbatches = num_microbatches
self._dp_sum_query = gaussian_query.GaussianSumQuery(
l2_norm_clip, l2_norm_clip * noise_multiplier)
self._global_state = None
self._was_dp_gradients_called = False
self._dpsgd_type=dpsgd_type
self._logging=logging
print('\n *** DP-optimizer class initialized. ***')
def _compute_gradients(self, loss, var_list, grad_loss=None, tape=None):
"""DP-SGD version of base class method."""
self._was_dp_gradients_called = True
# Compute loss.
if not callable(loss) and tape is None:
raise ValueError('`tape` is required when a `Tensor` loss is passed.')
tape = tape if tape is not None else tf.GradientTape()
if callable(loss):
with tape:
if not callable(var_list):
tape.watch(var_list)
loss = loss()
if self._num_microbatches is None:
num_microbatches = tf.shape(input=loss)[0]
else:
num_microbatches = self._num_microbatches
microbatch_losses = tf.reduce_mean(
tf.reshape(loss, [num_microbatches, -1]), axis=1)
if callable(var_list):
var_list = var_list()
else:
with tape:
if self._num_microbatches is None:
num_microbatches = tf.shape(input=loss)[0]
else:
num_microbatches = self._num_microbatches
microbatch_losses = tf.reduce_mean(
tf.reshape(loss, [num_microbatches, -1]), axis=1)
"""shape of var_list is [8 8 1 16]... 8 tensors """
var_list = tf.nest.flatten(var_list)
# Compute the per-microbatch losses using helpful jacobian method.
with tf.keras.backend.name_scope(self._name + '/gradients'):
jacobian = tape.jacobian(microbatch_losses, var_list)
clipped_gradients = tf.vectorized_map(
lambda g: clip_gradients_vmap(g, self._l2_norm_clip), jacobian)
"""shape of flatted clipped_gradients is [250 8 8 1 16]... 8 tensors"""
"""it has been checked that each dimension of 250 gradients is of norm max=1"""
#===================================================================#
if self._logging:
def my_numpy_func(grad):
filename = ('norm_%d_v%1.2f_gradient.npy' %(self._dpsgd_type,(self._l2_norm_clip * self._noise_multiplier)**2))
buf=np.load(filename)
buf=np.append(buf, grad)
np.save(filename, buf)
return grad.astype(np.float32)
@tf.function(input_signature=[tf.TensorSpec(None, tf.float32)])
def save_grad_norm(input):
return tf.numpy_function(my_numpy_func, [input], tf.float32)
def compute_norm(gradients):
grads_flat2 = tf.nest.flatten(gradients)
sq_l2_norms = [tf.reduce_sum(input_tensor=tf.square(g[0])) for g in grads_flat2]
norm = tf.sqrt(tf.add_n(sq_l2_norms))
return norm
norm=comupute_norm(clipped_gradients)
save_grad_norm(norm)
#===================================================================#
"""In what follows, we first sum gradients over all microbatches, each of which is clipped with l2_norm_limit.
Then, we add noise to the sum gradients, and divide them by the number of gradients."""
def reduce_noise_normalize_batch(g):
# Sum gradients over all microbatches.
summed_gradient = tf.reduce_sum(g, axis=0)
# Add noise to summed gradients.
noise_stddev = self._l2_norm_clip * self._noise_multiplier
#===================================================================#
if self._dpsgd_type==0:
noise = tf.random.normal(
tf.shape(input=summed_gradient), stddev=noise_stddev)
else:
def my_numpy_func(grad):
size=np.shape(grad)
if self._dpsgd_type==1:
result=np.random.laplace(size=size, scale=noise_stddev/np.sqrt(2))
return np.ndarray.astype(result, np.float32)
else:
filename = ('test_data/cactus_samples_d%d_v%1.2f.npy' %(self._l2_norm_clip, noise_stddev**2))
samples=np.load(filename)
result=np.zeros(size).reshape(-1)
for a in range(len(result)):
rand=np.random.uniform(1,len(samples)-1,1)
result[a]=samples[int(rand)]
result=result.reshape(size)
return np.ndarray.astype(result, np.float32)
@tf.function(input_signature=[tf.TensorSpec(None, tf.float32)])
def additive_noise(input):
return tf.numpy_function(my_numpy_func, [input], tf.float32)
noise = additive_noise(summed_gradient)
#-------------------------------------------------------------------#
noised_gradient = tf.add(summed_gradient, noise)
#===================================================================#
# Normalize by number of microbatches and return.
return tf.truediv(noised_gradient,
tf.cast(num_microbatches, tf.float32))
final_gradients = tf.nest.map_structure(reduce_noise_normalize_batch,
clipped_gradients)
return list(zip(final_gradients, var_list))
def get_gradients(self, loss, params):
"""DP-SGD version of base class method."""
self._was_dp_gradients_called = True
if self._global_state is None:
self._global_state = self._dp_sum_query.initial_global_state()
if self._num_microbatches is None:
num_microbatches = tf.shape(input=loss)[0]
else:
num_microbatches = self._num_microbatches
microbatch_losses = tf.reshape(loss, [num_microbatches, -1])
def process_microbatch(microbatch_loss):
"""Compute clipped grads for one microbatch."""
mean_loss = tf.reduce_mean(input_tensor=microbatch_loss)
grads = super(DPOptimizerClass, self).get_gradients(mean_loss, params)
grads_list = [
g if g is not None else tf.zeros_like(v)
for (g, v) in zip(list(grads), params)
]
clipped_grads = clip_gradients_vmap(grads_list, self._l2_norm_clip)
return clipped_grads
clipped_grads = tf.vectorized_map(process_microbatch, microbatch_losses)
def reduce_noise_normalize_batch(stacked_grads):
summed_grads = tf.reduce_sum(input_tensor=stacked_grads, axis=0)
noise_stddev = self._l2_norm_clip * self._noise_multiplier
noise = tf.random.normal(
tf.shape(input=summed_grads), stddev=noise_stddev)
noised_grads = summed_grads + noise
return noised_grads / tf.cast(num_microbatches, tf.float32)
final_grads = tf.nest.map_structure(reduce_noise_normalize_batch,
clipped_grads)
return final_grads
def apply_gradients(self, grads_and_vars, global_step=None, name=None):
"""DP-SGD version of base class method."""
assert self._was_dp_gradients_called, (
'Neither _compute_gradients() or get_gradients() on the '
'differentially private optimizer was called. This means the '
'training is not differentially private. It may be the case that '
'you need to upgrade to TF 2.4 or higher to use this particular '
'optimizer.')
return super(DPOptimizerClass,
self).apply_gradients(grads_and_vars, global_step, name)
return DPOptimizerClass
VectorizedDPKerasAdagradOptimizer = make_vectorized_keras_optimizer_class(
tf.keras.optimizers.Adagrad)
VectorizedDPKerasAdamOptimizer = make_vectorized_keras_optimizer_class(
tf.keras.optimizers.Adam)
VectorizedDPKerasSGDOptimizer = make_vectorized_keras_optimizer_class(
tf.keras.optimizers.SGD)
|
CREATE TABLE [dbo].[Linking] (
[PolicyId] INT NOT NULL,
[PolicyInfoId] INT NOT NULL,
[Description] VARCHAR (50) NULL,
CONSTRAINT [PK_Linking] PRIMARY KEY CLUSTERED ([PolicyId] ASC, [PolicyInfoId] ASC)
);
|
#!/bin/sh
#
# ____ _ _____ _ _
# | _ \ ___ | |_| ___(_) | ___ ___
# | | | |/ _ \| __| |_ | | |/ _ \/ __|
# | |_| | (_) | |_| _| | | | __/\__ \
# |____/ \___/ \__|_| |_|_|\___||___/
#
# DotFiles v0.2.447
# https://dotfiles.io
#
# Desription: Setup procedures for DotFiles v0.2.447.
#
# Copyright (c) Sebastien Rousseau 2021. All rights reserved
# Licensed under the MIT license
#
# Load configuration files
# shellcheck disable=SC2154
# shellcheck disable=SC2002
# shellcheck disable=SC3000
# shellcheck disable=SC4000
# shellcheck disable=SC1091
./dotfiles-colors-en.sh
./dotfiles-utilities-en.sh
# Create the setup function
setup (){
if [ -f ./tools/en/dotfiles-installer-en.sh ]; then
./tools/en/dotfiles-installer-en.sh
else
error "$LINENO: Installer file \"${0}\" not found. Check the file name and try again. "
fi
}
# Call the setup function
setup
|
set -e
echo
echo "*****************"
echo Julienning system
echo "*****************"
apt-get update
apt-get -y upgrade
echo
echo "************************"
echo Deep frying dependencies
echo "************************"
apt-get -y install git-core g++ autoconf
echo
echo "***************"
echo Parboiling Ruby
echo "***************"
\curl -sSL https://get.rvm.io | bash
usermod -a -G rvm vagrant
source /etc/profile.d/rvm.sh
rvm install 2.1
rvm default use 2.1
gem install guard guard-shell
echo
echo "****************"
echo Baking assembler
echo "****************"
mkdir tools
cd tools
git clone https://github.com/cc65/cc65.git
cd cc65
make
ln -s /home/vagrant/tools/cc65/bin/ca65 /usr/local/bin
ln -s /home/vagrant/tools/cc65/bin/cl65 /usr/local/bin
ln -s /home/vagrant/tools/cc65/bin/ld65 /usr/local/bin
echo
echo "**************************"
echo THE SHAME PLATE IS SERVED.
echo "**************************"
|
'use strict';
const expect = require('chai').expect;
const extractName = require('../../../name/name-source-spec-helper');
const NgRepeatNameSource = require('../../../../lib/angular/angular-js/name/ng-repeat-name-source');
describe('NgRepeatNameSource', () => {
let source = new NgRepeatNameSource();
it('should extract name', () => {
expect(extractName('<p></p>', source)).to.be.null;
expect(extractName('<p ng-repeat="item in items"></p>', source)).to.equal('items');
expect(extractName('<p data-ng-repeat="item in items | filter:x as results"></p>', source)).to.equal('items');
});
});
|
#!/bin/bash
# Determine environment from hostname
if [[ $HOSTNAME =~ "stg" ]]
then
environment="staging"
elif [[ $HOSTNAME =~ "prd" ]]
then
environment="prod"
else
environment="dev"
fi
# rdbms2s3 deployment path
rdbms2s3_install_dir="/space1/deploy/${environment}/rdbms2s3"
# python3 conda environment for hgsc_rdbms2s3 scripts in rdbms2s3 deployment
conda_install="/hgsc_software/miniconda/miniconda3/bin/conda"
hgsc_rdbms2s3_conda_env="/space1/deploy/${environment}/conda/rdbms2s3"
# Usage function (and exit)
usage() {
cat <<END
Usage: $0 [-h] [-c configfile.sh] [-t refspec] [-k] [-f] [-v]
-h Display this help screen
-t <refspec> Optional git reference (tag, branch, SHA) to use for reset
If omitted, will construct from \$environment and "_cron"
-k Keep all previous conda deployment directories
-f Force: Perform all operations without (much) checking.
-v Verbose: sets bash shopt of -x
Environment:
conda_install required path to find conda for env setup
rdbms2s3_install_dir required path to find git repo
hgsc_rdbms2s3_conda_env required path to create conda environment
END
exit 5
}
# Reinstall conda environment
redeploy_conda_env() {
set -e
source ${conda_env_setup}
[[ -d ${hgsc_rdbms2s3_conda_env} ]] && mv ${hgsc_rdbms2s3_conda_env} ${hgsc_rdbms2s3_conda_env}.$(date +%F)
conda create -y --prefix $hgsc_rdbms2s3_conda_env pip
conda activate $hgsc_rdbms2s3_conda_env
pip install -U ${rdbms2s3_install_dir}
conda deactivate
set +e
}
update_git_repo() {
UPDATE_CONDA=${FORCE:-0}
UPDATE_REPO=${FORCE:-0}
GIT="git --git-dir=${rdbms2s3_install_dir}/.git --work-tree=${rdbms2s3_install_dir}"
${GIT} fetch
# Determine if changes happened in python3 project
if ! ${GIT} diff --quiet origin/${git_target_ref} -- src
then
UPDATE_CONDA=1
UPDATE_REPO=1
# Determine if changes happened generally
elif ! ${GIT} diff --quiet origin/${git_target_ref}
then
UPDATE_REPO=1
fi
# Reset (not checkout or pull) latest in branch
[ $UPDATE_REPO -eq 1 ] && ${GIT} reset --hard origin/${git_target_ref}
[ $UPDATE_CONDA -eq 1 ] && redeploy_conda_env
}
remove_old_conda_dirs()
{
if [ -z "${KEEP_OLD_CONDA_DIRS}" ]; then
local conda_install_dir="$(dirname ${hgsc_rdbms2s3_conda_env})"
local conda_install_base="$(basename ${hgsc_rdbms2s3_conda_env})"
# Find previous conda deployment directories in $conda_install_dir and
# remove all but latest 3 directories, ordered numerically by date
find ${conda_install_dir} -maxdepth 1 -name "${conda_install_base}.*" |
sort -nr |
sed -e '1,3d' |
xargs -r rm -fr
fi
}
# Process arguments
while getopts "c:fhknt:v" options
do
case "${options}" in
c)
[ -f ${OPTARG} ] && source ${OPTARG}
CONFIG=1
;;
f)
FORCE=1
;;
h)
usage
;;
k)
KEEP_OLD_CONDA_DIRS=1
;;
t)
git_target_ref=${OPTARG}
;;
v)
set -x
;;
esac
done
# Variables and sanity checks
# Make assumptions from environment (but respect overrides)
if [[ -z ${git_target_ref} ]]
then
case "${environment}" in
prod)
git_target_ref="production_cron"
;;
staging)
git_target_ref="staging_cron"
;;
dev)
git_target_ref="main"
;;
esac
fi
# After two chances to read config file, check for vars we expect to be filled in
[[ ! -d ${rdbms2s3_install_dir}/.git ]] && echo 'ERROR: environment variable "rdbms2s3_install_dir" must point at a git repo clone' && usage
# Only check parent directory of conda environment exists in case we need to make a new one.
[[ ! -d $(dirname ${hgsc_rdbms2s3_conda_env}) ]] && echo 'ERROR: environment variable "hgsc_rdbms2s3_conda_env" must point at a conda environment' && usage
# Artifact of run_common.sh, we want to use the profile setup script, but the only var we have is the conda python script
[[ ! -f ${conda_install} ]] && echo 'ERROR: environment variable "conda_install" must point at a "conda" script' && usage
conda_env_setup=$(dirname $(dirname ${conda_install} ))/etc/profile.d/conda.sh
# I could be wrong here....
[[ ! -f ${conda_env_setup} ]] && echo 'ERROR: Bad assumption made in script that conda will provide a profile setup script at ${conda_env_setup}' && usage
update_git_repo
remove_old_conda_dirs
|
<reponame>lvonnied/Dsy_ChallengeTask<filename>backend/routes/index-routes.ts
import express from 'express';
const router = express.Router();
import {indexController} from '../controller/index-controller';
router.get("/api", indexController.index);
router.get("/switchTheme", indexController.switchTheme);
router.get("/createtodo", indexController.createToDo);
router.get("/createtodo/:id/", indexController.editEntry);
router.post("/api/newtodo", indexController.createEntry);
router.post("/newtodo/:id/", indexController.updateEntry);
export const indexRoutes = router;
|
<reponame>catroll/mailx
/*@ S-nail - a mail user agent derived from Berkeley Mail.
*@ Dig message objects. TODO Very very restricted (especially non-compose)
*@ Protocol change: adjust mx-config.h:mx_DIG_MSG_PLUMBING_VERSION + `~^' man.
*@ TODO - a_dmsg_cmd() should generate string lists, not perform real I/O.
*@ TODO I.e., drop FILE* arg, generate stringlist; up to callers...
*@ TODO - With our own I/O there should then be a StringListDevice as the
*@ TODO owner and I/O overlay provider: NO temporary file (sic)!
*@ XXX - Multiple objects per message could be possible (a_dmsg_find()),
*@ XXX except in compose mode
*
* Copyright (c) 2016 - 2020 <NAME>) Nurpmeso <<EMAIL>>.
* SPDX-License-Identifier: ISC
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#undef su_FILE
#define su_FILE dig_msg
#define mx_SOURCE
#define mx_SOURCE_DIG_MSG
#ifndef mx_HAVE_AMALGAMATION
# include "mx/nail.h"
#endif
#include <su/cs.h>
#include <su/icodec.h>
#include <su/mem.h>
#include "mx/attachments.h"
#include "mx/cmd.h"
#include "mx/file-streams.h"
#include "mx/mime-type.h"
#include "mx/names.h"
#include "mx/dig-msg.h"
#include "su/code-in.h"
#define a_DMSG_QUOTE(S) n_shexp_quote_cp(S, FAL0)
struct mx_dig_msg_ctx *mx_dig_msg_read_overlay; /* XXX HACK */
struct mx_dig_msg_ctx *mx_dig_msg_compose_ctx; /* Or NIL XXX HACK*/
/* Try to convert cp into an unsigned number that corresponds to an existing
* message number (or ERR_INVAL), search for an existing object (ERR_EXIST if
* oexcl and exists; ERR_NOENT if not oexcl and does not exist).
* On oexcl success *dmcp will be n_alloc()ated with .dmc_msgno and .dmc_mp
* etc. set; but not linked into mb.mb_digmsg and .dmc_fp not created etc. */
static s32 a_dmsg_find(char const *cp, struct mx_dig_msg_ctx **dmcpp,
boole oexcl);
/* Subcommand drivers */
static boole a_dmsg_cmd(FILE *fp, struct mx_dig_msg_ctx *dmcp,
struct mx_cmd_arg *cmd, struct mx_cmd_arg *args);
static boole a_dmsg__header(FILE *fp, struct mx_dig_msg_ctx *dmcp,
struct mx_cmd_arg *args);
static boole a_dmsg__attach(FILE *fp, struct mx_dig_msg_ctx *dmcp,
struct mx_cmd_arg *args);
static s32
a_dmsg_find(char const *cp, struct mx_dig_msg_ctx **dmcpp, boole oexcl){
struct mx_dig_msg_ctx *dmcp;
s32 rv;
u32 msgno;
NYD2_IN;
if(cp[0] == '-' && cp[1] == '\0'){
if((dmcp = mx_dig_msg_compose_ctx) != NIL){
*dmcpp = dmcp;
if(dmcp->dmc_flags & mx_DIG_MSG_COMPOSE_DIGGED)
rv = oexcl ? su_ERR_EXIST : su_ERR_NONE;
else
rv = oexcl ? su_ERR_NONE : su_ERR_NOENT;
}else
rv = su_ERR_INVAL;
goto jleave;
}
if((su_idec_u32_cp(&msgno, cp, 0, NIL
) & (su_IDEC_STATE_EMASK | su_IDEC_STATE_CONSUMED)
) != su_IDEC_STATE_CONSUMED ||
msgno == 0 || UCMP(z, msgno, >, msgCount)){
rv = su_ERR_INVAL;
goto jleave;
}
for(dmcp = mb.mb_digmsg; dmcp != NIL; dmcp = dmcp->dmc_next)
if(dmcp->dmc_msgno == msgno){
*dmcpp = dmcp;
rv = oexcl ? su_ERR_EXIST : su_ERR_NONE;
goto jleave;
}
if(!oexcl){
rv = su_ERR_NOENT;
goto jleave;
}
*dmcpp = dmcp = n_calloc(1, Z_ALIGN(sizeof *dmcp) + sizeof(struct header));
dmcp->dmc_mp = &message[msgno - 1];
dmcp->dmc_flags = mx_DIG_MSG_OWN_MEMBAG |
((TRU1/*TODO*/ || !(mb.mb_perm & MB_DELE))
? mx_DIG_MSG_RDONLY : mx_DIG_MSG_NONE);
dmcp->dmc_msgno = msgno;
dmcp->dmc_hp = R(struct header*,P2UZ(&dmcp[1]));
dmcp->dmc_membag = su_mem_bag_create(&dmcp->dmc__membag_buf[0], 0);
/* Rest done by caller */
rv = su_ERR_NONE;
jleave:
NYD2_OU;
return rv;
}
static boole
a_dmsg_cmd(FILE *fp, struct mx_dig_msg_ctx *dmcp, struct mx_cmd_arg *cmd,
struct mx_cmd_arg *args){
union {struct mx_cmd_arg *ca; char *c; struct str const *s; boole rv;} p;
NYD2_IN;
if(cmd == NIL)
goto jecmd;
p.s = &cmd->ca_arg.ca_str;
if(su_cs_starts_with_case_n("header", p.s->s, p.s->l))
p.rv = a_dmsg__header(fp, dmcp, args);
else if(su_cs_starts_with_case_n("attachment", p.s->s, p.s->l)){
if(!(dmcp->dmc_flags & mx_DIG_MSG_COMPOSE)) /* TODO attachment support */
p.rv = (fprintf(fp,
"505 `digmsg attachment' only in compose mode (yet)\n") > 0);
else
p.rv = a_dmsg__attach(fp, dmcp, args);
}else if(su_cs_starts_with_case_n("version", p.s->s, p.s->l)){
if(args != NIL)
goto jecmd;
p.rv = (fputs("210 " mx_DIG_MSG_PLUMBING_VERSION "\n", fp) != EOF);
}else if((p.s->l == 1 && p.s->s[0] == '?') ||
su_cs_starts_with_case_n("help", p.s->s, p.s->l)){
if(args != NIL)
goto jecmd;
p.rv = (fputs(_("211 (Arguments undergo shell-style evaluation)\n"),
fp) != EOF &&
#ifdef mx_HAVE_UISTRINGS
fputs(_(
"attachment:\n"
" attribute name (212; 501)\n"
" attribute-at position\n"
" attribute-set name key value (210; 505/501)\n"
" attribute-set-at position key value\n"
" insert file[=input-charset[#output-charset]] "
"(210; 501/505/506)\n"
" insert #message-number\n"
" list (212; 501)\n"
" remove name (210; 501/506)\n"
" remove-at position (210; 501/505)\n"), fp) != EOF &&
fputs(_(
"header\n"
" insert field content (210; 501/505/506)\n"
" list [field] (210; [501]);\n"
" remove field (210; 501/505)\n"
" remove-at field position (210; 501/505)\n"
" show field (211/212; 501)\n"
"help (211)\n"
"version (210)\n"), fp) != EOF &&
#endif
putc('\n', fp) != EOF);
}else{
jecmd:
fputs("500\n", fp);
p.rv = FAL0;
}
fflush(fp);
NYD2_OU;
return p.rv;
}
static boole
a_dmsg__header(FILE *fp, struct mx_dig_msg_ctx *dmcp,
struct mx_cmd_arg *args){
struct n_header_field *hfp;
struct mx_name *np, **npp;
uz i;
struct mx_cmd_arg *a3p;
char const *cp;
struct header *hp;
NYD2_IN;
hp = dmcp->dmc_hp;
UNINIT(a3p, NIL);
if(args == NIL){
cp = su_empty; /* xxx not NIL anyway */
goto jdefault;
}
cp = args->ca_arg.ca_str.s;
args = args->ca_next;
/* Strip the optional colon from header names */
if((a3p = args) != NIL){
char *xp;
a3p = a3p->ca_next;
for(xp = args->ca_arg.ca_str.s;; ++xp)
if(*xp == '\0')
break;
else if(*xp == ':'){
*xp = '\0';
break;
}
}
/* TODO ERR_2BIG should happen on the cmd_arg parser side */
if(a3p != NIL && a3p->ca_next != NIL)
goto jecmd;
if(su_cs_starts_with_case("insert", cp)){ /* TODO LOGIC BELONGS head.c
* TODO That is: Header::factory(string) -> object (blahblah).
* TODO I.e., as long as we don't have regular RFC compliant parsers
* TODO which differentiate in between structured and unstructured
* TODO header fields etc., a little workaround */
struct mx_name *xnp;
s8 aerr;
char const *mod_suff;
enum expand_addr_check_mode eacm;
enum gfield ntype;
boole mult_ok;
if(args == NIL || a3p == NIL)
goto jecmd;
if(dmcp->dmc_flags & mx_DIG_MSG_RDONLY)
goto j505r;
/* Strip [\r\n] which would render a body invalid XXX all controls? */
/* C99 */{
char c;
for(cp = a3p->ca_arg.ca_str.s; (c = *cp) != '\0'; ++cp)
if(c == '\n' || c == '\r')
*UNCONST(char*,cp) = ' ';
}
if(!su_cs_cmp_case(args->ca_arg.ca_str.s, cp = "Subject")){
if(a3p->ca_arg.ca_str.l == 0)
goto j501cp;
if(hp->h_subject != NIL)
hp->h_subject = savecatsep(hp->h_subject, ' ',
a3p->ca_arg.ca_str.s);
else
hp->h_subject = a3p->ca_arg.ca_str.s;
if(fprintf(fp, "210 %s 1\n", cp) < 0)
cp = NIL;
goto jleave;
}
mult_ok = TRU1;
ntype = GEXTRA | GFULL | GFULLEXTRA;
eacm = EACM_STRICT;
mod_suff = NIL;
if(!su_cs_cmp_case(args->ca_arg.ca_str.s, cp = "From")){
npp = &hp->h_from;
jins:
aerr = 0;
/* todo As said above, this should be table driven etc., but.. */
if(ntype & GBCC_IS_FCC){
np = nalloc_fcc(a3p->ca_arg.ca_str.s);
if(is_addr_invalid(np, eacm))
goto jins_505;
}else{
if((np = (mult_ok > FAL0 ? lextract : n_extract_single
)(a3p->ca_arg.ca_str.s, ntype | GNULL_OK)) == NIL)
goto j501cp;
if((np = checkaddrs(np, eacm, &aerr), aerr != 0)){
jins_505:
if(fprintf(fp, "505 %s\n", cp) < 0)
cp = NIL;
goto jleave;
}
}
/* Go to the end of the list, track whether it contains any
* non-deleted entries */
i = 0;
if((xnp = *npp) != NIL)
for(;; xnp = xnp->n_flink){
if(!(xnp->n_type & GDEL))
++i;
if(xnp->n_flink == NIL)
break;
}
if(!mult_ok && (i != 0 || np->n_flink != NIL)){
if(fprintf(fp, "506 %s\n", cp) < 0)
cp = NIL;
}else{
if(xnp == NIL)
*npp = np;
else
xnp->n_flink = np;
np->n_blink = xnp;
if(fprintf(fp, "210 %s %" PRIuZ "\n", cp, ++i) < 0)
cp = NIL;
}
goto jleave;
}
#undef a_X
#define a_X(F,H,INS) \
if(!su_cs_cmp_case(args->ca_arg.ca_str.s, cp = F)) \
{npp = &hp->H; INS; goto jins;}
if((cp = su_cs_find_c(args->ca_arg.ca_str.s, '?')) != NIL){
mod_suff = cp;
args->ca_arg.ca_str.s[P2UZ(cp - args->ca_arg.ca_str.s)] = '\0';
if(*++cp != '\0' && !su_cs_starts_with_case("single", cp)){
cp = mod_suff;
goto j501cp;
}
mult_ok = TRUM1;
}
/* Just like with ~t,~c,~b, immediately test *expandaddr* compliance */
a_X("To", h_to, ntype = GTO|GFULL su_COMMA eacm = EACM_NORMAL);
a_X("Cc", h_cc, ntype = GCC|GFULL su_COMMA eacm = EACM_NORMAL);
a_X("Bcc", h_bcc, ntype = GBCC|GFULL su_COMMA eacm = EACM_NORMAL);
if((cp = mod_suff) != NIL)
goto j501cp;
/* Not | EAF_FILE, depend on *expandaddr*! */
a_X("Fcc", h_fcc, ntype = GBCC|GBCC_IS_FCC su_COMMA eacm = EACM_NORMAL);
a_X("Sender", h_sender, mult_ok = FAL0);
a_X("Reply-To", h_reply_to, eacm = EACM_NONAME);
a_X("Mail-Followup-To", h_mft, eacm = EACM_NONAME);
a_X("Message-ID", h_message_id,
mult_ok = FAL0 su_COMMA ntype = GREF su_COMMA eacm = EACM_NONAME);
a_X("References", h_ref, ntype = GREF su_COMMA eacm = EACM_NONAME);
a_X("In-Reply-To", h_in_reply_to, ntype = GREF su_COMMA
eacm = EACM_NONAME);
#undef a_X
if((cp = n_header_is_known(args->ca_arg.ca_str.s, UZ_MAX)) != NIL)
goto j505r;
/* Free-form header fields */
/* C99 */{
uz nl, bl;
struct n_header_field **hfpp;
for(cp = args->ca_arg.ca_str.s; *cp != '\0'; ++cp)
if(!fieldnamechar(*cp)){
cp = args->ca_arg.ca_str.s;
goto j501cp;
}
for(i = 0, hfpp = &hp->h_user_headers; *hfpp != NIL; ++i)
hfpp = &(*hfpp)->hf_next;
nl = su_cs_len(cp = args->ca_arg.ca_str.s) +1;
bl = su_cs_len(a3p->ca_arg.ca_str.s) +1;
*hfpp = hfp = n_autorec_alloc(VSTRUCT_SIZEOF(struct n_header_field,
hf_dat) + nl + bl);
hfp->hf_next = NIL;
hfp->hf_nl = nl - 1;
hfp->hf_bl = bl - 1;
su_mem_copy(&hfp->hf_dat[0], cp, nl);
su_mem_copy(&hfp->hf_dat[nl], a3p->ca_arg.ca_str.s, bl);
if(fprintf(fp, "210 %s %" PRIuZ "\n", &hfp->hf_dat[0], ++i) < 0)
cp = NIL;
}
}else if(su_cs_starts_with_case("list", cp)){
jdefault:
if(args == NIL){
if(fputs("210", fp) == EOF){
cp = NIL;
goto jleave;
}
#undef a_X
#define a_X(F,S) \
if(su_CONCAT(hp->h_, F) != NIL && fputs(" " su_STRING(S), fp) == EOF){\
cp = NIL;\
goto jleave;\
}
a_X(subject, Subject);
a_X(from, From);
a_X(sender, Sender);
a_X(to, To);
a_X(cc, Cc);
a_X(bcc, Bcc);
a_X(fcc, Fcc);
a_X(reply_to, Reply-To);
a_X(mft, Mail-Followup-To);
a_X(message_id, Message-ID);
a_X(ref, References);
a_X(in_reply_to, In-Reply-To);
a_X(mailx_command, Mailx-Command);
a_X(mailx_raw_to, Mailx-Raw-To);
a_X(mailx_raw_cc, Mailx-Raw-Cc);
a_X(mailx_raw_bcc, Mailx-Raw-Bcc);
a_X(mailx_orig_sender, Mailx-Orig-Sender);
a_X(mailx_orig_from, Mailx-Orig-From);
a_X(mailx_orig_to, Mailx-Orig-To);
a_X(mailx_orig_cc, Mailx-Orig-Cc);
a_X(mailx_orig_bcc, Mailx-Orig-Bcc);
#undef a_X
/* Print only one instance of each free-form header */
for(hfp = hp->h_user_headers; hfp != NIL; hfp = hfp->hf_next){
struct n_header_field *hfpx;
for(hfpx = hp->h_user_headers;; hfpx = hfpx->hf_next)
if(hfpx == hfp){
putc(' ', fp);
fputs(&hfp->hf_dat[0], fp);
break;
}else if(!su_cs_cmp_case(&hfpx->hf_dat[0], &hfp->hf_dat[0]))
break;
}
if(putc('\n', fp) == EOF)
cp = NIL;
goto jleave;
}
if(a3p != NIL)
goto jecmd;
if(!su_cs_cmp_case(args->ca_arg.ca_str.s, cp = "Subject")){
np = (hp->h_subject != NIL) ? R(struct mx_name*,-1) : NIL;
goto jlist;
}
if(!su_cs_cmp_case(args->ca_arg.ca_str.s, cp = "From")){
np = hp->h_from;
jlist:
fprintf(fp, "%s %s\n", (np == NIL ? "501" : "210"), cp);
goto jleave;
}
#undef a_X
#define a_X(F,H) \
if(!su_cs_cmp_case(args->ca_arg.ca_str.s, cp = su_STRING(F))){\
np = hp->su_CONCAT(h_,H);\
goto jlist;\
}
a_X(Sender, sender);
a_X(To, to);
a_X(Cc, cc);
a_X(Bcc, bcc);
a_X(Fcc, fcc);
a_X(Reply-To, reply_to);
a_X(Mail-Followup-To, mft);
a_X(Message-ID, message_id);
a_X(References, ref);
a_X(In-Reply-To, in_reply_to);
a_X(Mailx-Raw-To, mailx_raw_to);
a_X(Mailx-Raw-Cc, mailx_raw_cc);
a_X(Mailx-Raw-Bcc, mailx_raw_bcc);
a_X(Mailx-Orig-Sender, mailx_orig_sender);
a_X(Mailx-Orig-From, mailx_orig_from);
a_X(Mailx-Orig-To, mailx_orig_to);
a_X(Mailx-Orig-Cc, mailx_orig_cc);
a_X(Mailx-Orig-Bcc, mailx_orig_bcc);
#undef a_X
if(!su_cs_cmp_case(args->ca_arg.ca_str.s, cp = "Mailx-Command")){
np = (hp->h_mailx_command != NIL) ? R(struct mx_name*,-1) : NIL;
goto jlist;
}
/* Free-form header fields */
for(cp = args->ca_arg.ca_str.s; *cp != '\0'; ++cp)
if(!fieldnamechar(*cp)){
cp = args->ca_arg.ca_str.s;
goto j501cp;
}
cp = args->ca_arg.ca_str.s;
for(hfp = hp->h_user_headers;; hfp = hfp->hf_next){
if(hfp == NIL)
goto j501cp;
else if(!su_cs_cmp_case(cp, &hfp->hf_dat[0])){
if(fprintf(fp, "210 %s\n", &hfp->hf_dat[0]) < 0)
cp = NIL;
break;
}
}
}else if(su_cs_starts_with_case("remove", cp)){
if(args == NIL || a3p != NIL)
goto jecmd;
if(dmcp->dmc_flags & mx_DIG_MSG_RDONLY)
goto j505r;
if(!su_cs_cmp_case(args->ca_arg.ca_str.s, cp = "Subject")){
if(hp->h_subject == NIL)
goto j501cp;
hp->h_subject = NIL;
if(fprintf(fp, "210 %s\n", cp) < 0)
cp = NIL;
goto jleave;
}
if(!su_cs_cmp_case(args->ca_arg.ca_str.s, cp = "From")){
npp = &hp->h_from;
jrem:
if(*npp != NIL){
*npp = NIL;
if(fprintf(fp, "210 %s\n", cp) < 0)
cp = NIL;
goto jleave;
}else
goto j501cp;
}
#undef a_X
#define a_X(F,H) \
if(!su_cs_cmp_case(args->ca_arg.ca_str.s, cp = su_STRING(F))){\
npp = &hp->su_CONCAT(h_,H);\
goto jrem;\
}
a_X(Sender, sender);
a_X(To, to);
a_X(Cc, cc);
a_X(Bcc, bcc);
a_X(Fcc, fcc);
a_X(Reply-To, reply_to);
a_X(Mail-Followup-To, mft);
a_X(Message-ID, message_id);
a_X(References, ref);
a_X(In-Reply-To, in_reply_to);
#undef a_X
if((cp = n_header_is_known(args->ca_arg.ca_str.s, UZ_MAX)) != NIL)
goto j505r;
/* Free-form header fields (note j501cp may print non-normalized name) */
/* C99 */{
struct n_header_field **hfpp;
boole any;
for(cp = args->ca_arg.ca_str.s; *cp != '\0'; ++cp)
if(!fieldnamechar(*cp)){
cp = args->ca_arg.ca_str.s;
goto j501cp;
}
cp = args->ca_arg.ca_str.s;
for(any = FAL0, hfpp = &hp->h_user_headers; (hfp = *hfpp) != NIL;){
if(!su_cs_cmp_case(cp, &hfp->hf_dat[0])){
*hfpp = hfp->hf_next;
if(!any){
if(fprintf(fp, "210 %s\n", &hfp->hf_dat[0]) < 0){
cp = NIL;
goto jleave;
}
}
any = TRU1;
}else
hfpp = &hfp->hf_next;
}
if(!any)
goto j501cp;
}
}else if(su_cs_starts_with_case("remove-at", cp)){
if(args == NIL || a3p == NIL)
goto jecmd;
if(dmcp->dmc_flags & mx_DIG_MSG_RDONLY)
goto j505r;
if((su_idec_uz_cp(&i, a3p->ca_arg.ca_str.s, 0, NIL
) & (su_IDEC_STATE_EMASK | su_IDEC_STATE_CONSUMED)
) != su_IDEC_STATE_CONSUMED || i == 0){
if(fprintf(fp, "505 invalid position: %s\n",
a3p->ca_arg.ca_str.s) < 0)
cp = NIL;
goto jleave;
}
if(!su_cs_cmp_case(args->ca_arg.ca_str.s, cp = "Subject")){
if(hp->h_subject != NIL && i == 1){
hp->h_subject = NIL;
if(fprintf(fp, "210 %s 1\n", cp) < 0)
cp = NIL;
goto jleave;
}else
goto j501cp;
}
if(!su_cs_cmp_case(args->ca_arg.ca_str.s, cp = "From")){
npp = &hp->h_from;
jremat:
if((np = *npp) == NIL)
goto j501cp;
while(--i != 0 && np != NIL)
np = np->n_flink;
if(np == NIL)
goto j501cp;
if(np->n_blink != NIL)
np->n_blink->n_flink = np->n_flink;
else
*npp = np->n_flink;
if(np->n_flink != NIL)
np->n_flink->n_blink = np->n_blink;
if(fprintf(fp, "210 %s\n", cp) < 0)
cp = NIL;
goto jleave;
}
#undef a_X
#define a_X(F,H) \
if(!su_cs_cmp_case(args->ca_arg.ca_str.s, cp = su_STRING(F))){\
npp = &hp->su_CONCAT(h_,H);\
goto jremat;\
}
a_X(Sender, sender);
a_X(To, to);
a_X(Cc, cc);
a_X(Bcc, bcc);
a_X(Fcc, fcc);
a_X(Reply-To, reply_to);
a_X(Mail-Followup-To, mft);
a_X(Message-ID, message_id);
a_X(References, ref);
a_X(In-Reply-To, in_reply_to);
#undef a_X
if((cp = n_header_is_known(args->ca_arg.ca_str.s, UZ_MAX)) != NIL)
goto j505r;
/* Free-form header fields */
/* C99 */{
struct n_header_field **hfpp;
for(cp = args->ca_arg.ca_str.s; *cp != '\0'; ++cp)
if(!fieldnamechar(*cp)){
cp = args->ca_arg.ca_str.s;
goto j501cp;
}
cp = args->ca_arg.ca_str.s;
for(hfpp = &hp->h_user_headers; (hfp = *hfpp) != NIL;){
if(--i == 0){
*hfpp = hfp->hf_next;
if(fprintf(fp, "210 %s %" PRIuZ "\n", &hfp->hf_dat[0], i) < 0){
cp = NIL;
goto jleave;
}
break;
}else
hfpp = &hfp->hf_next;
}
if(hfp == NIL)
goto j501cp;
}
}else if(su_cs_starts_with_case("show", cp)){
if(args == NIL || a3p != NIL)
goto jecmd;
if(!su_cs_cmp_case(args->ca_arg.ca_str.s, cp = "Subject")){
if(hp->h_subject == NIL)
goto j501cp;
if(fprintf(fp, "212 %s\n%s\n\n", cp, a_DMSG_QUOTE(hp->h_subject)) < 0)
cp = NIL;
goto jleave;
}
if(!su_cs_cmp_case(args->ca_arg.ca_str.s, cp = "From")){
np = hp->h_from;
jshow:
if(np == NIL)
goto j501cp;
fprintf(fp, "211 %s\n", cp);
do if(!(np->n_type & GDEL)){
switch(np->n_flags & mx_NAME_ADDRSPEC_ISMASK){
case mx_NAME_ADDRSPEC_ISFILE: cp = n_hy; break;
case mx_NAME_ADDRSPEC_ISPIPE: cp = "|"; break;
case mx_NAME_ADDRSPEC_ISNAME: cp = n_ns; break;
default: cp = np->n_name; break;
}
fprintf(fp, "%s %s\n", cp, a_DMSG_QUOTE(np->n_fullname));
}while((np = np->n_flink) != NIL);
if(putc('\n', fp) == EOF)
cp = NIL;
goto jleave;
}
#undef a_X
#define a_X(F,H) \
if(!su_cs_cmp_case(args->ca_arg.ca_str.s, cp = su_STRING(F))){\
np = hp->su_CONCAT(h_,H);\
goto jshow;\
}
a_X(Sender, sender);
a_X(To, to);
a_X(Cc, cc);
a_X(Bcc, bcc);
a_X(Fcc, fcc);
a_X(Reply-To, reply_to);
a_X(Mail-Followup-To, mft);
a_X(Message-ID, message_id);
a_X(References, ref);
a_X(In-Reply-To, in_reply_to);
a_X(Mailx-Raw-To, mailx_raw_to);
a_X(Mailx-Raw-Cc, mailx_raw_cc);
a_X(Mailx-Raw-Bcc, mailx_raw_bcc);
a_X(Mailx-Orig-Sender, mailx_orig_sender);
a_X(Mailx-Orig-From, mailx_orig_from);
a_X(Mailx-Orig-To, mailx_orig_to);
a_X(Mailx-Orig-Cc, mailx_orig_cc);
a_X(Mailx-Orig-Bcc, mailx_orig_bcc);
#undef a_X
if(!su_cs_cmp_case(args->ca_arg.ca_str.s, cp = "Mailx-Command")){
if(hp->h_mailx_command == NIL)
goto j501cp;
if(fprintf(fp, "212 %s\n%s\n\n", cp, hp->h_mailx_command) < 0)
cp = NIL;
goto jleave;
}
/* Free-form header fields */
/* C99 */{
boole any;
for(cp = args->ca_arg.ca_str.s; *cp != '\0'; ++cp)
if(!fieldnamechar(*cp)){
cp = args->ca_arg.ca_str.s;
goto j501cp;
}
cp = args->ca_arg.ca_str.s;
for(any = FAL0, hfp = hp->h_user_headers; hfp != NIL;
hfp = hfp->hf_next){
if(!su_cs_cmp_case(cp, &hfp->hf_dat[0])){
if(!any)
fprintf(fp, "212 %s\n", &hfp->hf_dat[0]);
any = TRU1;
fprintf(fp, "%s\n", a_DMSG_QUOTE(&hfp->hf_dat[hfp->hf_nl +1]));
}
}
if(!any)
goto j501cp;
if(putc('\n', fp) == EOF)
cp = NIL;
}
}else
goto jecmd;
jleave:
NYD2_OU;
return (cp != NIL);
jecmd:
if(fputs("500\n", fp) == EOF)
cp = NIL;
cp = NIL;
goto jleave;
j505r:
if(fprintf(fp, "505 read-only: %s\n", cp) < 0)
cp = NIL;
goto jleave;
j501cp:
if(fprintf(fp, "501 %s\n", cp) < 0)
cp = NIL;
goto jleave;
}
static boole
a_dmsg__attach(FILE *fp, struct mx_dig_msg_ctx *dmcp,
struct mx_cmd_arg *args){
boole status;
struct mx_attachment *ap;
char const *cp;
struct header *hp;
NYD2_IN;
hp = dmcp->dmc_hp;
if(args == NIL){
cp = su_empty; /* xxx not NIL anyway */
goto jdefault;
}
cp = args->ca_arg.ca_str.s;
args = args->ca_next;
if(su_cs_starts_with_case("attribute", cp)){
if(args == NIL || args->ca_next != NIL)
goto jecmd;
cp = args->ca_arg.ca_str.s;
if((ap = mx_attachments_find(hp->h_attach, cp, NIL)) == NIL)
goto j501;
jatt_att:
fprintf(fp, "212 %s\n", a_DMSG_QUOTE(cp));
if(ap->a_msgno > 0){
if(fprintf(fp, "message-number %d\n\n", ap->a_msgno) < 0)
cp = NIL;
}else{
fprintf(fp, "creation-name %s\nopen-path %s\nfilename %s\n",
a_DMSG_QUOTE(ap->a_path_user), a_DMSG_QUOTE(ap->a_path),
a_DMSG_QUOTE(ap->a_name));
if((cp = ap->a_content_description) != NIL)
fprintf(fp, "content-description %s\n", a_DMSG_QUOTE(cp));
if(ap->a_content_id != NIL)
fprintf(fp, "content-id %s\n", ap->a_content_id->n_name);
if((cp = ap->a_content_type) != NIL)
fprintf(fp, "content-type %s\n", a_DMSG_QUOTE(cp));
if((cp = ap->a_content_disposition) != NIL)
fprintf(fp, "content-disposition %s\n", a_DMSG_QUOTE(cp));
cp = (putc('\n', fp) != EOF) ? su_empty : NIL;
}
}else if(su_cs_starts_with_case("attribute-at", cp)){
uz i;
if(args == NIL || args->ca_next != NIL)
goto jecmd;
if((su_idec_uz_cp(&i, cp = args->ca_arg.ca_str.s, 0, NIL
) & (su_IDEC_STATE_EMASK | su_IDEC_STATE_CONSUMED)
) != su_IDEC_STATE_CONSUMED || i == 0)
goto j505invpos;
for(ap = hp->h_attach; ap != NIL && --i != 0; ap = ap->a_flink)
;
if(ap != NIL)
goto jatt_att;
goto j501;
}else if(su_cs_starts_with_case("attribute-set", cp)){
/* ATT-ID KEYWORD VALUE */
if(args == NIL)
goto jecmd;
cp = args->ca_arg.ca_str.s;
args = args->ca_next;
if(args == NIL || args->ca_next == NIL || args->ca_next->ca_next != NIL)
goto jecmd;
if(dmcp->dmc_flags & mx_DIG_MSG_RDONLY)
goto j505r;
if((ap = mx_attachments_find(hp->h_attach, cp, NIL)) == NIL)
goto j501;
jatt_attset:
if(ap->a_msgno > 0){
if(fprintf(fp, "505 RFC822 message attachment: %s\n", cp) < 0)
cp = NIL;
}else{
char c;
char const *keyw, *xcp;
keyw = args->ca_arg.ca_str.s;
cp = args->ca_next->ca_arg.ca_str.s;
for(xcp = cp; (c = *xcp) != '\0'; ++xcp)
if(su_cs_is_cntrl(c))
goto j505;
c = *cp;
if(!su_cs_cmp_case(keyw, "filename"))
ap->a_name = (c == '\0') ? ap->a_path_bname : cp;
else if(!su_cs_cmp_case(keyw, "content-description"))
ap->a_content_description = (c == '\0') ? NIL : cp;
else if(!su_cs_cmp_case(keyw, "content-id")){
ap->a_content_id = NIL;
if(c != '\0'){
struct mx_name *np;
/* XXX lextract->extract_single() */
np = checkaddrs(lextract(cp, GREF),
/*EACM_STRICT | TODO '/' valid!! */ EACM_NOLOG |
EACM_NONAME, NIL);
if(np != NIL && np->n_flink == NIL)
ap->a_content_id = np;
else
cp = NIL;
}
}else if(!su_cs_cmp_case(keyw, "content-type")){
if((ap->a_content_type = (c == '\0') ? NIL : cp) != NIL){
char *cp2;
for(cp2 = UNCONST(char*,cp); (c = *cp++) != '\0';)
*cp2++ = su_cs_to_lower(c);
if(!mx_mimetype_is_valid(ap->a_content_type, TRU1, FAL0)){
ap->a_content_type = NIL;
goto j505;
}
}
}else if(!su_cs_cmp_case(keyw, "content-disposition"))
ap->a_content_disposition = (c == '\0') ? NIL : cp;
else
cp = NIL;
if(cp != NIL){
uz i;
for(i = 0; ap != NIL; ++i, ap = ap->a_blink)
;
if(fprintf(fp, "210 %" PRIuZ "\n", i) < 0)
cp = NIL;
}else{
cp = xcp;
goto j505; /* xxx jecmd; */
}
}
}else if(su_cs_starts_with_case("attribute-set-at", cp)){
uz i;
cp = args->ca_arg.ca_str.s;
args = args->ca_next;
if(args == NIL || args->ca_next == NIL || args->ca_next->ca_next != NIL)
goto jecmd;
if(dmcp->dmc_flags & mx_DIG_MSG_RDONLY)
goto j505r;
if((su_idec_uz_cp(&i, cp, 0, NIL
) & (su_IDEC_STATE_EMASK | su_IDEC_STATE_CONSUMED)
) != su_IDEC_STATE_CONSUMED || i == 0)
goto j505invpos;
for(ap = hp->h_attach; ap != NIL && --i != 0; ap = ap->a_flink)
;
if(ap != NIL)
goto jatt_attset;
goto j501;
}else if(su_cs_starts_with_case("insert", cp)){
BITENUM_IS(u32,mx_attach_error) aerr;
if(args == NIL || args->ca_next != NIL)
goto jecmd;
if(dmcp->dmc_flags & mx_DIG_MSG_RDONLY)
goto j505r;
hp->h_attach = mx_attachments_append(hp->h_attach, args->ca_arg.ca_str.s,
&aerr, &ap);
switch(aerr){
case mx_ATTACHMENTS_ERR_FILE_OPEN: cp = "505"; goto jatt__ins;
case mx_ATTACHMENTS_ERR_ICONV_FAILED: cp = "506"; goto jatt__ins;
case mx_ATTACHMENTS_ERR_ICONV_NAVAIL: /* FALLTHRU */
case mx_ATTACHMENTS_ERR_OTHER: /* FALLTHRU */
default:
cp = "501";
jatt__ins:
if(fprintf(fp, "%s %s\n", cp, a_DMSG_QUOTE(args->ca_arg.ca_str.s)
) < 0)
cp = NIL;
break;
case mx_ATTACHMENTS_ERR_NONE:{
uz i;
for(i = 0; ap != NIL; ++i, ap = ap->a_blink)
;
if(fprintf(fp, "210 %" PRIuZ "\n", i) < 0)
cp = NIL;
}break;
}
}else if(su_cs_starts_with_case("list", cp)){
jdefault:
if(args != NIL)
goto jecmd;
if((ap = hp->h_attach) == NIL)
goto j501;
fputs("212\n", fp);
do
fprintf(fp, "%s\n", a_DMSG_QUOTE(ap->a_path_user));
while((ap = ap->a_flink) != NIL);
if(putc('\n', fp) == EOF)
cp = NIL;
}else if(su_cs_starts_with_case("remove", cp)){
if(args == NIL || args->ca_next != NIL)
goto jecmd;
if(dmcp->dmc_flags & mx_DIG_MSG_RDONLY)
goto j505r;
cp = args->ca_arg.ca_str.s;
if((ap = mx_attachments_find(hp->h_attach, cp, &status)) == NIL)
goto j501;
if(status == TRUM1)
goto j506;
hp->h_attach = mx_attachments_remove(hp->h_attach, ap);
if(fprintf(fp, "210 %s\n", a_DMSG_QUOTE(cp)) < 0)
cp = NIL;
}else if(su_cs_starts_with_case("remove-at", cp)){
uz i;
if(args == NIL || args->ca_next != NIL)
goto jecmd;
if(dmcp->dmc_flags & mx_DIG_MSG_RDONLY)
goto j505r;
if((su_idec_uz_cp(&i, cp = args->ca_arg.ca_str.s, 0, NIL
) & (su_IDEC_STATE_EMASK | su_IDEC_STATE_CONSUMED)
) != su_IDEC_STATE_CONSUMED || i == 0)
goto j505invpos;
for(ap = hp->h_attach; ap != NIL && --i != 0; ap = ap->a_flink)
;
if(ap != NIL){
hp->h_attach = mx_attachments_remove(hp->h_attach, ap);
if(fprintf(fp, "210 %s\n", cp) < 0)
cp = NIL;
}else
goto j501;
}else
goto jecmd;
jleave:
NYD2_OU;
return (cp != NIL);
jecmd:
if(fputs("500\n", fp) == EOF)
cp = NIL;
cp = NIL;
goto jleave;
j501:
if(fputs("501\n", fp) == EOF)
cp = NIL;
goto jleave;
j505:
if(fputs("505\n", fp) == EOF)
cp = NIL;
goto jleave;
j505r:
if(fprintf(fp, "505 read-only: %s\n", cp) < 0)
cp = NIL;
goto jleave;
j505invpos:
if(fprintf(fp, "505 invalid position: %s\n", cp) < 0)
cp = NIL;
goto jleave;
j506:
if(fputs("506\n", fp) == EOF)
cp = NIL;
goto jleave;
}
void
mx_dig_msg_on_mailbox_close(struct mailbox *mbp){ /* XXX HACK <- event! */
struct mx_dig_msg_ctx *dmcp;
NYD_IN;
while((dmcp = mbp->mb_digmsg) != NIL){
mbp->mb_digmsg = dmcp->dmc_next;
if(dmcp->dmc_flags & mx_DIG_MSG_FCLOSE)
fclose(dmcp->dmc_fp);
if(dmcp->dmc_flags & mx_DIG_MSG_OWN_MEMBAG)
su_mem_bag_gut(dmcp->dmc_membag);
n_free(dmcp);
}
NYD_OU;
}
int
c_digmsg(void *vp){
char const *cp, *emsg;
struct mx_dig_msg_ctx *dmcp;
struct mx_cmd_arg *cap;
struct mx_cmd_arg_ctx *cacp;
NYD_IN;
n_pstate_err_no = su_ERR_NONE;
cacp = vp;
cap = cacp->cac_arg;
if(su_cs_starts_with_case("create", cp = cap->ca_arg.ca_str.s)){
if(cacp->cac_no < 2 || cacp->cac_no > 3) /* XXX argparse is stupid */
goto jesynopsis;
cap = cap->ca_next;
/* Request to use STDOUT? */
if(cacp->cac_no == 3){
cp = cap->ca_next->ca_arg.ca_str.s;
if(*cp != '-' || cp[1] != '\0'){
emsg = N_("digmsg: create: invalid I/O channel: %s\n");
goto jeinval_quote;
}
}
/* First of all, our context object */
switch(a_dmsg_find(cp = cap->ca_arg.ca_str.s, &dmcp, TRU1)){
case su_ERR_INVAL:
emsg = N_("digmsg: create: message number invalid: %s\n");
goto jeinval_quote;
case su_ERR_EXIST:
emsg = N_("digmsg: create: message object already exists: %s\n");
goto jeinval_quote;
default:
break;
}
if(dmcp->dmc_flags & mx_DIG_MSG_COMPOSE)
dmcp->dmc_flags = mx_DIG_MSG_COMPOSE | mx_DIG_MSG_COMPOSE_DIGGED;
else{
FILE *fp;
if((fp = setinput(&mb, dmcp->dmc_mp, NEED_HEADER)) == NIL){
/* XXX Should have panicked before.. */
n_free(dmcp);
emsg = N_("digmsg: create: mailbox I/O error for message: %s\n");
goto jeinval_quote;
}
su_mem_bag_push(n_go_data->gdc_membag, dmcp->dmc_membag);
/* XXX n_header_extract error!! */
n_header_extract((n_HEADER_EXTRACT_FULL |
n_HEADER_EXTRACT_PREFILL_RECEIVERS |
n_HEADER_EXTRACT_IGNORE_FROM_), fp, dmcp->dmc_hp, NIL);
su_mem_bag_pop(n_go_data->gdc_membag, dmcp->dmc_membag);
}
if(cacp->cac_no == 3)
dmcp->dmc_fp = n_stdout;
/* For compose mode simply use FS_O_REGISTER, the number of dangling
* deleted files with open descriptors until next fs_close_all()
* should be very small; if this paradigm is changed
* DIG_MSG_COMPOSE_GUT() needs to be adjusted */
else if((dmcp->dmc_fp = mx_fs_tmp_open("digmsg", (mx_FS_O_RDWR |
mx_FS_O_UNLINK | (dmcp->dmc_flags & mx_DIG_MSG_COMPOSE
? mx_FS_O_REGISTER : 0)),
NIL)) != NIL)
dmcp->dmc_flags |= mx_DIG_MSG_HAVE_FP |
(dmcp->dmc_flags & mx_DIG_MSG_COMPOSE ? 0 : mx_DIG_MSG_FCLOSE);
else{
n_err(_("digmsg: create: cannot create temporary file: %s\n"),
su_err_doc(n_pstate_err_no = su_err_no()));
vp = NIL;
goto jeremove;
}
if(!(dmcp->dmc_flags & mx_DIG_MSG_COMPOSE)){
dmcp->dmc_last = NIL;
if((dmcp->dmc_next = mb.mb_digmsg) != NIL)
dmcp->dmc_next->dmc_last = dmcp;
mb.mb_digmsg = dmcp;
}
}else if(su_cs_starts_with_case("remove", cp)){
if(cacp->cac_no != 2)
goto jesynopsis;
cap = cap->ca_next;
switch(a_dmsg_find(cp = cap->ca_arg.ca_str.s, &dmcp, FAL0)){
case su_ERR_INVAL:
emsg = N_("digmsg: remove: message number invalid: %s\n");
goto jeinval_quote;
default:
if(!(dmcp->dmc_flags & mx_DIG_MSG_COMPOSE) ||
(dmcp->dmc_flags & mx_DIG_MSG_COMPOSE_DIGGED))
break;
/* FALLTHRU */
case su_ERR_NOENT:
emsg = N_("digmsg: remove: no such message object: %s\n");
goto jeinval_quote;
}
if(!(dmcp->dmc_flags & mx_DIG_MSG_COMPOSE)){
if(dmcp->dmc_last != NIL)
dmcp->dmc_last->dmc_next = dmcp->dmc_next;
else{
ASSERT(dmcp == mb.mb_digmsg);
mb.mb_digmsg = dmcp->dmc_next;
}
if(dmcp->dmc_next != NIL)
dmcp->dmc_next->dmc_last = dmcp->dmc_last;
}
if((dmcp->dmc_flags & mx_DIG_MSG_HAVE_FP) &&
mx_dig_msg_read_overlay == dmcp)
mx_dig_msg_read_overlay = NIL;
if(dmcp->dmc_flags & mx_DIG_MSG_FCLOSE)
fclose(dmcp->dmc_fp);
jeremove:
if(dmcp->dmc_flags & mx_DIG_MSG_OWN_MEMBAG)
su_mem_bag_gut(dmcp->dmc_membag);
if(dmcp->dmc_flags & mx_DIG_MSG_COMPOSE)
dmcp->dmc_flags = mx_DIG_MSG_COMPOSE;
else
n_free(dmcp);
}else{
switch(a_dmsg_find(cp, &dmcp, FAL0)){
case su_ERR_INVAL:
emsg = N_("digmsg: message number invalid: %s\n");
goto jeinval_quote;
case su_ERR_NOENT:
emsg = N_("digmsg: no such message object: %s\n");
goto jeinval_quote;
default:
break;
}
cap = cap->ca_next;
if(dmcp->dmc_flags & mx_DIG_MSG_HAVE_FP){
rewind(dmcp->dmc_fp);
ftruncate(fileno(dmcp->dmc_fp), 0);
}
su_mem_bag_push(n_go_data->gdc_membag, dmcp->dmc_membag);
if(!a_dmsg_cmd(dmcp->dmc_fp, dmcp, cap,
((cap != NIL) ? cap->ca_next : NIL)))
vp = NIL;
su_mem_bag_pop(n_go_data->gdc_membag, dmcp->dmc_membag);
if(dmcp->dmc_flags & mx_DIG_MSG_HAVE_FP){
rewind(dmcp->dmc_fp);
/* This will be reset by go_input() _if_ we read to EOF */
mx_dig_msg_read_overlay = dmcp;
}
}
jleave:
NYD_OU;
return (vp == NIL);
jesynopsis:
mx_cmd_print_synopsis(mx_cmd_firstfit("digmsg"), NIL);
goto jeinval;
jeinval_quote:
emsg = V_(emsg);
n_err(emsg, n_shexp_quote_cp(cp, FAL0));
jeinval:
n_pstate_err_no = su_ERR_INVAL;
vp = NIL;
goto jleave;
}
boole
mx_dig_msg_circumflex(struct mx_dig_msg_ctx *dmcp, FILE *fp, char const *cmd){
/* Identical to (subset of) c_digmsg() cmd-tab */
mx_CMD_ARG_DESC_SUBCLASS_DEF_NAME(dm, "digmsg", 5, pseudo_cad){
{mx_CMD_ARG_DESC_SHEXP | mx_CMD_ARG_DESC_HONOUR_STOP,
n_SHEXP_PARSE_IGNORE_EMPTY | n_SHEXP_PARSE_TRIM_IFSSPACE},
{mx_CMD_ARG_DESC_SHEXP | mx_CMD_ARG_DESC_OPTION |
mx_CMD_ARG_DESC_HONOUR_STOP,
n_SHEXP_PARSE_TRIM_IFSSPACE}, /* arg1 */
{mx_CMD_ARG_DESC_SHEXP | mx_CMD_ARG_DESC_OPTION |
mx_CMD_ARG_DESC_HONOUR_STOP,
n_SHEXP_PARSE_TRIM_IFSSPACE}, /* arg2 */
{mx_CMD_ARG_DESC_SHEXP | mx_CMD_ARG_DESC_OPTION |
mx_CMD_ARG_DESC_HONOUR_STOP,
n_SHEXP_PARSE_TRIM_IFSSPACE}, /* arg3 */
{mx_CMD_ARG_DESC_SHEXP | mx_CMD_ARG_DESC_OPTION |
mx_CMD_ARG_DESC_HONOUR_STOP |
mx_CMD_ARG_DESC_GREEDY | mx_CMD_ARG_DESC_GREEDY_JOIN,
n_SHEXP_PARSE_TRIM_IFSSPACE} /* arg4 */
}mx_CMD_ARG_DESC_SUBCLASS_DEF_END;
struct mx_cmd_arg_ctx cac;
boole rv;
NYD_IN;
cac.cac_desc = mx_CMD_ARG_DESC_SUBCLASS_CAST(&pseudo_cad);
cac.cac_indat = cmd;
cac.cac_inlen = UZ_MAX;
cac.cac_msgflag = cac.cac_msgmask = 0;
if((rv = mx_cmd_arg_parse(&cac)))
rv = a_dmsg_cmd(fp, dmcp, cac.cac_arg, cac.cac_arg->ca_next);
NYD_OU;
return rv;
}
#include "su/code-ou.h"
/* s-it-mode */
|
<reponame>akoss/instapipe<filename>database.rb
require "sequel"
require "google/cloud/storage"
class Database
def self.database
@_db ||= Sequel.connect(ENV["DATABASE_URL"])
unless @_db.table_exists?("stories")
@_db.create_table :stories do
primary_key :id
String :ig_id
String :signed_url
String :bucket_path
String :user_id
Integer :height
Integer :width
Integer :timestamp
Boolean :is_video
String :location
end
end
unless @_db.table_exists?("views")
@_db.create_table :views do
primary_key :id
Date :date
Integer :prefetches
Integer :count
end
end
return @_db
end
def self.file_storage_bucket
credentials = "./gc_keys.json"
credentials = JSON.parse(ENV["GC_KEYS"]) if ENV["GC_KEYS"]
storage = Google::Cloud::Storage.new(
project_id: ENV["GC_PROJECT_ID"],
credentials: credentials
)
return storage.bucket(ENV["GC_BUCKET_NAME"])
end
end
|
#!/usr/bin/env bash
set -e
export PATH=${GOROOT}/bin:${PATH}
go version
export ASHERAH_SERVICE_NAME=service
export ASHERAH_PRODUCT_NAME=product
export ASHERAH_KMS_MODE=static
export ASHERAH_METASTORE_MODE=memory
# Test Go server
echo "------------Testing clients using go server------------"
cd ../../go
export ASHERAH_EXPIRE_AFTER=60m
export ASHERAH_CHECK_INTERVAL=10m
go run main.go -s /tmp/appencryption.sock &
ASHERAH_GO_SIDECAR_PID=$!
cd -
sleep 10
cd python
pip3.7 install -r requirements.txt
python3.7 appencryption_client.py
cd ..
cd node
npm install
node appencryption_client.js
cd ..
kill $ASHERAH_GO_SIDECAR_PID
rm -rf /tmp/appencryption.sock
# Test Java server
echo "------------Testing clients using java server----------"
# TODO : Remove this after unifying configurations
# https://github.com/godaddy/asherah/issues/143
export ASHERAH_EXPIRE_AFTER=90
export ASHERAH_CHECK_INTERVAL=10
find ~/.m2 -name '*grpc-server*dependencies.jar' | xargs java -jar &
ASHERAH_JAVA_SIDECAR_PID=$!
sleep 10
cd python
python3.7 appencryption_client.py
cd ..
cd node
node appencryption_client.js
cd ..
kill $ASHERAH_JAVA_SIDECAR_PID
rm -rf /tmp/appencryption.sock
|
# Generated by Django 3.2.7 on 2021-09-08 06:41
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('invoice2', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='invoice',
name='invoice_status',
field=models.CharField(choices=[('waiting', 'En cours'), ('done', 'Payée')], default='waiting', max_length=25, verbose_name='Statut'),
),
]
|
<filename>src/com/twu/biblioteca/BibliotecaApp.java
package com.twu.biblioteca;
import java.util.Scanner;
public class BibliotecaApp {
/**
* Main function to run the whole application
*
* @param args
*/
public static void main(String[] args) {
NewMainMenu menu = new NewMainMenu(System.out, new Scanner(System.in));
menu.run();
}
}
|
// Import packages
const express = require('express');
const app = express();
// Set up body-parser
const bodyParser = require('body-parser');
app.use(bodyParser.json());
// Set up the database
const mongoose = require('mongoose');
mongoose.connect('mongodb://localhost/users_db');
// Create the model
const User = mongoose.model('User', {
name: String,
email: String,
});
// Create the routes
app.post('/users', (req, res) => {
const user = new User(req.body);
user.save().then(user => {
res.send(user);
});
});
app.get('/users', (req, res) => {
User.find((err, users) => {
if (err) return console.error(err);
res.send(users);
});
});
// Listen
app.listen(3000);
|
<gh_stars>0
# (C) Datadog, Inc. 2018
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import pytest
from datadog_checks.checks import AgentCheck
@pytest.fixture
def aggregator():
from datadog_checks.stubs import aggregator
aggregator.reset()
return aggregator
def test_instance():
"""
Simply assert the class can be insantiated
"""
AgentCheck()
class TestTags:
def test_default_string(self):
check = AgentCheck()
tag = 'default:string'
tags = [tag]
normalized_tags = check._normalize_tags(tags, None)
normalized_tag = normalized_tags[0]
assert normalized_tags is not tags
assert normalized_tag == tag.encode('utf-8')
def test_bytes_string(self):
check = AgentCheck()
tag = b'bytes:string'
tags = [tag]
normalized_tags = check._normalize_tags(tags, None)
normalized_tag = normalized_tags[0]
assert normalized_tags is not tags
# Ensure no new allocation occurs
assert normalized_tag is tag
def test_unicode_string(self):
check = AgentCheck()
tag = u'unicode:string'
tags = [tag]
normalized_tags = check._normalize_tags(tags, None)
normalized_tag = normalized_tags[0]
assert normalized_tags is not tags
assert normalized_tag == tag.encode('utf-8')
class LimitedCheck(AgentCheck):
DEFAULT_METRIC_LIMIT = 10
class TestLimits():
def test_metric_limit_gauges(self, aggregator):
check = LimitedCheck()
assert check.get_warnings() == []
for i in range(0, 10):
check.gauge("metric", 0)
assert len(check.get_warnings()) == 0
assert len(aggregator.metrics("metric")) == 10
for i in range(0, 10):
check.gauge("metric", 0)
assert len(check.get_warnings()) == 1
assert len(aggregator.metrics("metric")) == 10
def test_metric_limit_count(self, aggregator):
check = LimitedCheck()
assert check.get_warnings() == []
# Multiple calls for a single context should not trigger
for i in range(0, 20):
check.count("metric", 0, hostname="host-single")
assert len(check.get_warnings()) == 0
assert len(aggregator.metrics("metric")) == 20
# Multiple contexts should trigger
# Only 9 new contexts should pass through
for i in range(0, 20):
check.count("metric", 0, hostname="host-{}".format(i))
assert len(check.get_warnings()) == 1
assert len(aggregator.metrics("metric")) == 29
def test_metric_limit_instance_config(self, aggregator):
instances = [
{
"max_returned_metrics": 42,
}
]
check = AgentCheck("test", {}, instances)
assert check.get_warnings() == []
for i in range(0, 42):
check.gauge("metric", 0)
assert len(check.get_warnings()) == 0
assert len(aggregator.metrics("metric")) == 42
check.gauge("metric", 0)
assert len(check.get_warnings()) == 1
assert len(aggregator.metrics("metric")) == 42
def test_metric_limit_instance_config_zero(self, aggregator):
instances = [
{
"max_returned_metrics": 0,
}
]
check = LimitedCheck("test", {}, instances)
assert len(check.get_warnings()) == 1
for i in range(0, 42):
check.gauge("metric", 0)
assert len(check.get_warnings()) == 1 # get_warnings resets the array
assert len(aggregator.metrics("metric")) == 10
|
/*
* Copyright (c) 2013, 2016 Oracle and/or its affiliates. All rights reserved. This
* code is released under a tri EPL/GPL/LGPL license. You can use it,
* redistribute it and/or modify it under the terms of the:
*
* Eclipse Public License version 1.0
* GNU General Public License version 2
* GNU Lesser General Public License version 2.1
*/
package org.jruby.truffle.core.proc;
import com.oracle.truffle.api.CompilerDirectives;
import com.oracle.truffle.api.CompilerDirectives.TruffleBoundary;
import com.oracle.truffle.api.dsl.Specialization;
import com.oracle.truffle.api.frame.Frame;
import com.oracle.truffle.api.frame.FrameInstance.FrameAccess;
import com.oracle.truffle.api.frame.MaterializedFrame;
import com.oracle.truffle.api.frame.VirtualFrame;
import com.oracle.truffle.api.object.DynamicObject;
import com.oracle.truffle.api.object.Shape;
import com.oracle.truffle.api.source.SourceSection;
import org.jcodings.specific.UTF8Encoding;
import org.jruby.runtime.ArgumentDescriptor;
import org.jruby.truffle.RubyContext;
import org.jruby.truffle.core.CoreClass;
import org.jruby.truffle.core.CoreMethod;
import org.jruby.truffle.core.CoreMethodArrayArgumentsNode;
import org.jruby.truffle.core.Layouts;
import org.jruby.truffle.core.UnaryCoreMethodNode;
import org.jruby.truffle.core.YieldingCoreMethodNode;
import org.jruby.truffle.core.binding.BindingNodes;
import org.jruby.truffle.core.string.StringOperations;
import org.jruby.truffle.language.NotProvided;
import org.jruby.truffle.language.arguments.ArgumentDescriptorUtils;
import org.jruby.truffle.language.arguments.RubyArguments;
import org.jruby.truffle.language.control.RaiseException;
import org.jruby.truffle.language.dispatch.CallDispatchHeadNode;
import org.jruby.truffle.language.dispatch.DispatchHeadNodeFactory;
import org.jruby.truffle.language.objects.AllocateObjectNode;
import org.jruby.truffle.language.objects.AllocateObjectNodeGen;
import org.jruby.truffle.language.yield.YieldNode;
@CoreClass(name = "Proc")
public abstract class ProcNodes {
@CoreMethod(names = "allocate", constructor = true)
public abstract static class AllocateNode extends UnaryCoreMethodNode {
public AllocateNode(RubyContext context, SourceSection sourceSection) {
super(context, sourceSection);
}
@Specialization
public DynamicObject allocate(DynamicObject rubyClass) {
throw new RaiseException(coreLibrary().typeErrorAllocatorUndefinedFor(rubyClass, this));
}
}
@CoreMethod(names = "new", constructor = true, needsBlock = true, rest = true)
public abstract static class ProcNewNode extends CoreMethodArrayArgumentsNode {
@Child private CallDispatchHeadNode initializeNode;
@Child private AllocateObjectNode allocateObjectNode;
protected final DynamicObject PROC_CLASS = coreLibrary().getProcClass();
protected final Shape PROC_SHAPE = coreLibrary().getProcFactory().getShape();
public ProcNewNode(RubyContext context, SourceSection sourceSection) {
super(context, sourceSection);
}
public abstract DynamicObject executeProcNew(
VirtualFrame frame,
DynamicObject procClass,
Object[] args,
Object block);
@Specialization
public DynamicObject proc(VirtualFrame frame, DynamicObject procClass, Object[] args, NotProvided block) {
final Frame parentFrame = getContext().getCallStack().getCallerFrameIgnoringSend()
.getFrame(FrameAccess.READ_ONLY, true);
final DynamicObject parentBlock = RubyArguments.getBlock(parentFrame.getArguments());
if (parentBlock == null) {
throw new RaiseException(coreLibrary().argumentErrorProcWithoutBlock(this));
}
return executeProcNew(frame, procClass, args, parentBlock);
}
@Specialization(guards = { "procClass == PROC_CLASS", "block.getShape() == PROC_SHAPE" })
public DynamicObject procNormalOptimized(DynamicObject procClass, Object[] args, DynamicObject block) {
return block;
}
@Specialization(guards = "procClass == metaClass(block)")
public DynamicObject procNormal(DynamicObject procClass, Object[] args, DynamicObject block) {
return block;
}
@Specialization(guards = "procClass != metaClass(block)")
public DynamicObject procSpecial(VirtualFrame frame, DynamicObject procClass, Object[] args, DynamicObject block) {
// Instantiate a new instance of procClass as classes do not correspond
final DynamicObject proc = getAllocateObjectNode().allocate(
procClass,
Layouts.PROC.getType(block),
Layouts.PROC.getSharedMethodInfo(block),
Layouts.PROC.getCallTargetForType(block),
Layouts.PROC.getCallTargetForLambdas(block),
Layouts.PROC.getDeclarationFrame(block),
Layouts.PROC.getMethod(block),
Layouts.PROC.getSelf(block),
Layouts.PROC.getBlock(block),
Layouts.PROC.getFrameOnStackMarker(block));
getInitializeNode().call(frame, proc, "initialize", block, args);
return proc;
}
protected DynamicObject metaClass(DynamicObject object) {
return Layouts.BASIC_OBJECT.getMetaClass(object);
}
private AllocateObjectNode getAllocateObjectNode() {
if (allocateObjectNode == null) {
CompilerDirectives.transferToInterpreter();
allocateObjectNode = insert(AllocateObjectNodeGen.create(getContext(), null, null, null));
}
return allocateObjectNode;
}
private CallDispatchHeadNode getInitializeNode() {
if (initializeNode == null) {
CompilerDirectives.transferToInterpreter();
initializeNode = insert(DispatchHeadNodeFactory.createMethodCallOnSelf(getContext()));
}
return initializeNode;
}
}
@CoreMethod(names = { "dup", "clone" })
public abstract static class DupNode extends UnaryCoreMethodNode {
@Child private AllocateObjectNode allocateObjectNode;
public DupNode(RubyContext context, SourceSection sourceSection) {
super(context, sourceSection);
}
@Specialization
public DynamicObject dup(DynamicObject proc) {
final DynamicObject copy = getAllocateObjectNode().allocate(
Layouts.BASIC_OBJECT.getLogicalClass(proc),
Layouts.PROC.getType(proc),
Layouts.PROC.getSharedMethodInfo(proc),
Layouts.PROC.getCallTargetForType(proc),
Layouts.PROC.getCallTargetForLambdas(proc),
Layouts.PROC.getDeclarationFrame(proc),
Layouts.PROC.getMethod(proc),
Layouts.PROC.getSelf(proc),
Layouts.PROC.getBlock(proc),
Layouts.PROC.getFrameOnStackMarker(proc));
return copy;
}
private AllocateObjectNode getAllocateObjectNode() {
if (allocateObjectNode == null) {
CompilerDirectives.transferToInterpreter();
allocateObjectNode = insert(AllocateObjectNodeGen.create(getContext(), null, null, null));
}
return allocateObjectNode;
}
}
@CoreMethod(names = "arity")
public abstract static class ArityNode extends CoreMethodArrayArgumentsNode {
public ArityNode(RubyContext context, SourceSection sourceSection) {
super(context, sourceSection);
}
@Specialization
public int arity(DynamicObject proc) {
return Layouts.PROC.getSharedMethodInfo(proc).getArity().getArityNumber();
}
}
@CoreMethod(names = "binding")
public abstract static class BindingNode extends CoreMethodArrayArgumentsNode {
public BindingNode(RubyContext context, SourceSection sourceSection) {
super(context, sourceSection);
}
@Specialization
public DynamicObject binding(DynamicObject proc) {
final MaterializedFrame frame = Layouts.PROC.getDeclarationFrame(proc);
return BindingNodes.createBinding(getContext(), frame);
}
}
@CoreMethod(names = {"call", "[]", "yield"}, rest = true, needsBlock = true)
public abstract static class CallNode extends YieldingCoreMethodNode {
public CallNode(RubyContext context, SourceSection sourceSection) {
super(context, sourceSection);
}
@Specialization
public Object call(VirtualFrame frame, DynamicObject proc, Object[] args, NotProvided block) {
return yield(frame, proc, args);
}
@Specialization
public Object call(VirtualFrame frame, DynamicObject proc, Object[] args, DynamicObject block) {
return yieldWithModifiedBlock(frame, proc, block, args);
}
}
@CoreMethod(names = "lambda?")
public abstract static class LambdaNode extends CoreMethodArrayArgumentsNode {
public LambdaNode(RubyContext context, SourceSection sourceSection) {
super(context, sourceSection);
}
@Specialization
public boolean lambda(DynamicObject proc) {
return Layouts.PROC.getType(proc) == ProcType.LAMBDA;
}
}
@CoreMethod(names = "parameters")
public abstract static class ParametersNode extends CoreMethodArrayArgumentsNode {
public ParametersNode(RubyContext context, SourceSection sourceSection) {
super(context, sourceSection);
}
@TruffleBoundary
@Specialization
public DynamicObject parameters(DynamicObject proc) {
final ArgumentDescriptor[] argsDesc = Layouts.PROC.getSharedMethodInfo(proc).getArgumentDescriptors();
final boolean isLambda = Layouts.PROC.getType(proc) == ProcType.LAMBDA;
return ArgumentDescriptorUtils.argumentDescriptorsToParameters(getContext(), argsDesc, isLambda);
}
}
@CoreMethod(names = "source_location")
public abstract static class SourceLocationNode extends CoreMethodArrayArgumentsNode {
public SourceLocationNode(RubyContext context, SourceSection sourceSection) {
super(context, sourceSection);
}
@TruffleBoundary
@Specialization
public Object sourceLocation(DynamicObject proc) {
SourceSection sourceSection = Layouts.PROC.getSharedMethodInfo(proc).getSourceSection();
if (sourceSection.getSource() == null) {
return nil();
} else {
final DynamicObject file = createString(StringOperations.encodeRope(
sourceSection.getSource().getName(), UTF8Encoding.INSTANCE));
final Object[] objects = new Object[]{file, sourceSection.getStartLine()};
return Layouts.ARRAY.createArray(coreLibrary().getArrayFactory(), objects, objects.length);
}
}
}
}
|
import logo from '@/images/logo.png'
import logoFold from '@/images/logo_fold.png'
const sysInfo = {
title: '欧蕾克后台',
logo: logo,
logoFold: logoFold
}
export { sysInfo }
|
# frozen_string_literal: true
##
# Check a solr ping via `Blacklight.default_index`
#
# This check should fail if the solr server is unreachable
class Starlight::SolrPingCheck < OkComputer::Check
def check
if Blacklight.default_index.ping
mark_message "Solr connection OK (ping)"
else
mark_failure
mark_message "Solr connection failed (ping)"
end
end
end
|
<reponame>elernal-tree/elernal-tree
import { Component, Input, OnInit } from '@angular/core';
@Component({
selector: 'app-atk',
templateUrl: './atk.component.html',
styleUrls: ['./atk.component.scss'],
})
export class AtkComponent {
constructor() {}
@Input() magunaAtk: number;
@Input() normalAtk: number;
@Input() exAtk: number;
}
|
manage_repository() {
if [ ! -d "$master" ]; then
echo ":: %F{red}ERROR%f: master worktree does not exist"
return 1
fi
files=( $(git -C $master ls-files -ci -X $repo/info/sparse-checkout) $(yadm diff --name-only --cached) )
yadm stash push -- ':/:'${^files} || return 1
yadm merge --no-edit master
yadm stash pop
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.