text stringlengths 1 1.05M |
|---|
#!/bin/bash
mail -s "TrainTicket instance is setting up!" $(geni-get slice_email)
source /local/repository/aptSetup.sh
source /local/repository/shcSetup.sh
source /local/repository/dockerSetup.sh
source /local/repository/setupTrainTicket.sh
mail -s "TrainTicket instance finished setting up!" $(geni-get slice_email)
|
#!/usr/bin/env bash
set -eu
cd "$(dirname "${BASH_SOURCE[0]}")/../../../.."
SCRIPTDIR=$(realpath './internal/cmd/precise-code-intel-tester/scripts')
declare -A REVS=(
[etcd]='1044a8b07c56f3d32a1f3fe91c8ec849a8b17b5e dfb0a405096af39e694a501de5b0a46962b3050e fb77f9b1d56391318823c434f586ffe371750321'
[tidb]='2f9a487ebbd2f1a46b5f2c2262ae8f0ef4c4d42f 43764a59b7dcb846dc1e9754e8f125818c69a96f b4f42abc36d893ec3f443af78fc62705a2e54236'
[titan]='0ad2e75d529bda74472a1dbb5e488ec095b07fe7 33623cc32f8d9f999fd69189d29124d4368c20ab aef232fbec9089d4468ff06705a3a7f84ee50ea6'
[zap]='2aa9fa25da83bdfff756c36a91442edc9a84576c a6015e13fab9b744d96085308ce4e8f11bad1996'
)
KEYS=()
VALS=()
for k in "${!REVS[@]}"; do
for v in ${REVS[$k]}; do
KEYS+=("${k}")
VALS+=("${v}")
done
done
./dev/ci/parallel_run.sh "${SCRIPTDIR}/clone.sh" {} ::: "${!REVS[@]}"
./dev/ci/parallel_run.sh "${SCRIPTDIR}/go-index.sh" {} {} ::: "${KEYS[@]}" :::+ "${VALS[@]}"
|
package pers.ruikai.pwms.formatter;
import java.util.ArrayList;
import java.util.Formattable;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.jakewharton.fliptables.FlipTable;
import pers.ruikai.pwms.models.Category;
import pers.ruikai.pwms.models.Transaction;
import pers.ruikai.pwms.models.Transaction.InOrOut;
import pers.ruikai.pwms.utils.DateConverter;
import pers.ruikai.pwms.warehouse.Warehouse;
/**
* format the ware house as a string
*
*/
public class TextFormatter {
private static int numObject(Category category) {
int ret = 0;
for(Transaction tx : category.getTransactions()) {
int cnt = tx.getNumber();
if(tx.getInOrOut()==InOrOut.OUT) {
cnt = -cnt;
}
ret += cnt;
}
return ret;
}
private static List<String> format(Transaction tx) {
List<String> ret = new ArrayList<>();
ret.add(DateConverter.date2String(tx.getDate()));
ret.add(tx.getInOrOut()==InOrOut.IN?"IN":"OUT");
ret.add(String.valueOf(tx.getNumber()));
for(String value: tx.getValues()) {
ret.add(value);
}
return ret;
}
private static String formatTable(String []header, String [][]cells) {
for(int i=0; i<header.length; i++) {
Pattern pattern = Pattern.compile("([^\u0000-\u007F])");
Matcher matcher = pattern.matcher(header[i]);
String rep = matcher.replaceAll(".$1");
header[i] = rep;
}
int rows = cells.length;
int cols = header.length;
for(int i=0; i<rows; i++) {
for(int j=0; j<cols; j++) {
Pattern pattern = Pattern.compile("([^\u0000-\u007F])");
Matcher matcher = pattern.matcher(cells[i][j]);
String rep = matcher.replaceAll(".$1");
cells[i][j] = rep;
}
}
String table = FlipTable.of(header, cells);
table = table.replace(".", "");
return table;
}
private static String format(Category category) {
List<String> lines = new ArrayList<>();
String ret = null;
lines.add(String.format("[%s] %d %d", category.getName(), numObject(category),
category.getTransactions().size()));
List<String> header = new ArrayList<>();
header.add("Date");
header.add("IN/OUT");
header.add("Number");
for(String name : category.getAttrNames()) {
header.add(name);
}
List<String []> rows = new ArrayList<>();
for(Transaction tx: category.getTransactions()) {
List<String> row = format(tx);
String [] rowArray = new String[row.size()];
rows.add(row.toArray(rowArray));
}
String [] headerArray = new String[header.size()];
String [][]rowsArray = new String[rows.size()][header.size()];
headerArray = header.toArray(headerArray);
rowsArray = rows.toArray(rowsArray);
lines.add(formatTable(headerArray, rowsArray));
ret = String.join("\n", lines);
return ret;
}
/**
*
* @param warehouse Warehouse object
* @return the formatted string
*/
public static String format(Warehouse warehouse) {
String ret = "";
List<Category> categories = warehouse.getCategories();
for(Category category: categories) {
ret += format(category);
ret += "\n\n";
}
return ret;
}
}
|
def sum_of_squares(n):
total = 0
for i in range(n + 1):
total += i**2
return total |
package clientAPI.impl.OncardAPI;
import clientAPI.impl.CommandHeader;
import clientAPI.impl.CommandHeader.CmdType;
/**
* Schnittstelle zur Low-Level Bekanntmachung der Instruktionen und
* Fehlercodes des Bonuspunkte-Applets.
*
*/
public interface BonusCreditStoreOncard {
/**
* OnCard AID
*/
public final static byte[] AID = { (byte) 0xFD, 'u', 'B', 'a', 'y', 'B', 'o', 'n', 'u', 's', 'C', 'r', 'e', 'd', 'i', 't' };
/* -------------------- OnCard Anweisungen ------------------------------------ */
/**
* APDU für Hinzufügen von Bonuspunkten. Es wird stets die feste Größe eines
* shorts erwartet.
*/
public final static CommandHeader ADD_CREDITS = new CommandHeader((byte) 0xE0, (byte) 0x10, (byte) 0x00, (byte) 0x00, (short) 2, CmdType.LC_NoLE);
/**
* APDU für entfernen von Bonuspunkten. Es wird stets die feste Größe eines
* shorts erwartet.
*/
public final static CommandHeader SUB_CREDITS = new CommandHeader((byte) 0xE0, (byte) 0x20, (byte) 0x00, (byte) 0x00, (short) 2, CmdType.LC_NoLE);
/**
* APDU für Abfrage des Bonuspunktekontostandes. Es wird stets die feste
* Größe eines shorts geliefert.
*/
public final static CommandHeader CHECK_BALANCE = new CommandHeader((byte) 0xE0, (byte) 0x30, (byte) 0x00, (byte) 0x00, CmdType.NoLC_LE, (short) 2);
/* -------------------- OnCard Error-Codes ------------------------------------ */
/**
* Signalisiert, dass der Kontostand zu gering für den geforderten Zahlbetrag ist.
*/
public final static short ERROR_INSUFFICIENT_BALANCE = 0x6A20;
/**
* Signalisiert, dass der Kontostand bei Hinzufügen der Bonuspunkte das Maximum überschreiten würde.
*/
public final static short ERROR_TRANS_EXCEED_MAXIMUM_BALANCE = 0x6A21;
}
|
#include<bits/stdc++.h>
using namespace std;
int main () {
int n;
cin >> n;
vector < long long > ps(n, 0);
for(int i = 0; i < n; i++) {
int x;
cin >> x;
if(i)
ps[i] = ps[i - 1];
ps[i] += x;
}
int q;
cin >> q;
for(int i = 0; i < q; i++) {
int l, r;
cin >> l >> r;
l--, r--;
long long ans = ps[r];
if(l) ans -= ps[l - 1];
cout << ans << endl;
}
return 0;
}
|
# models.py
from django.db import models
from main.models import Listing # Assuming the Listing model is defined in main app
class Labourer(models.Model):
# Define other fields for the Labourer model here
allproj = models.ManyToManyField(Listing, blank=True) |
const projectInput = (() => {
const createProjectId = () => {
const projectId = Math.round(Math.random() * 999999999999999999999, 0);
return projectId;
};
const getProjectName = () => {
const name = document.querySelector('#project-name').value;
return name;
};
return {
createProjectId,
getProjectName,
};
})();
export default projectInput;
|
<reponame>midasplatform/MidasClient
/******************************************************************************
* Copyright 2011 Kitware Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*****************************************************************************/
#include "DeleteThread.h"
#include "MidasTreeItem.h"
#include "Midas3TreeItem.h"
#include "Midas3FolderTreeItem.h"
#include "Midas3ItemTreeItem.h"
#include "Midas3BitstreamTreeItem.h"
#include "m3doFolder.h"
#include "m3dsFolder.h"
#include "m3doItem.h"
#include "m3dsItem.h"
#include "m3doBitstream.h"
#include "m3dsBitstream.h"
DeleteThread::DeleteThread()
{
m_DeleteOnDisk = false;
m_Resource = NULL;
m_Resource3 = NULL;
}
DeleteThread::~DeleteThread()
{
}
void DeleteThread::SetResource(MidasTreeItem* resource)
{
m_Resource = resource;
}
void DeleteThread::SetResource3(Midas3TreeItem* resource)
{
m_Resource3 = resource;
}
void DeleteThread::SetDeleteOnDisk(bool val)
{
m_DeleteOnDisk = val;
}
void DeleteThread::run()
{
emit EnableActions(false);
if( m_Resource )
{
mds::DatabaseAPI db;
if( !db.DeleteResource(m_Resource->GetUuid(), m_DeleteOnDisk) )
{
emit ErrorMessage("Error deleting resource " + m_Resource->GetData(0).toString() );
}
}
else if( m_Resource3 )
{
Midas3FolderTreeItem* folderTreeItem = dynamic_cast<Midas3FolderTreeItem *>(m_Resource3);
Midas3ItemTreeItem* itemTreeItem = dynamic_cast<Midas3ItemTreeItem *>(m_Resource3);
Midas3BitstreamTreeItem* bitstreamTreeItem = dynamic_cast<Midas3BitstreamTreeItem *>(m_Resource3);
if( folderTreeItem )
{
m3ds::Folder folder;
folder.SetObject(folderTreeItem->GetFolder() );
if( !folder.Delete(m_DeleteOnDisk) )
{
emit ErrorMessage("Error deleting folder " + QString(folderTreeItem->GetFolder()->GetName().c_str() ) );
}
}
else if( itemTreeItem )
{
m3ds::Item item;
item.SetObject(itemTreeItem->GetItem() );
if( !item.Delete(m_DeleteOnDisk) )
{
emit ErrorMessage("Error deleting item " + QString(itemTreeItem->GetItem()->GetName().c_str() ) );
}
}
else if( bitstreamTreeItem )
{
m3ds::Bitstream bitstream;
bitstream.SetObject(bitstreamTreeItem->GetBitstream() );
if( !bitstream.Delete(m_DeleteOnDisk) )
{
emit ErrorMessage("Error deleting bitstream " +
QString(bitstreamTreeItem->GetBitstream()->GetName().c_str() ) );
}
}
}
emit EnableActions(true);
}
|
#!/bin/bash
TOP_DIR=${1:-tdir}
TMP_DIR="/tmp"
DURATION="2m"
CONCURRENTS=(32 64 128 256 512 1024)
LOCATIONS=("sequential" "random")
SIZES=(1 10 100)
echo "top test directory: $TOP_DIR"
if [ ! -d "$TOP_DIR" ]; then
mkdir -p "$TOP_DIR"
fi
start=$(date +"%Y-%m-%d %H:%M:%S")
echo "start benchmark: $start"
for concurrent in "${CONCURRENTS[@]}"; do
for loc in "${LOCATIONS[@]}"; do
for size in "${SIZES[@]}"; do
prefix="${loc}-con${concurrent}-${size}KB"
echo "running $prefix ..."
now=$(date +"%Y%m%d%H%M%S")
iostat_logfile="${TMP_DIR}/iostat-${prefix}-${now}.log"
dir_fadv="${TOP_DIR}/fadv-${prefix}"
dir_fsync="${TOP_DIR}/fsync-${prefix}"
dir_nosync="${TOP_DIR}/nosync-${prefix}"
make IOSTAT_LOGFILE="$iostat_logfile" DIR_FADV="$dir_fadv" DIR_FSYNC="$dir_fsync" DIR_NOSYNC="$dir_nosync" CONCURRENT=$concurrent DURATION=$DURATION SIZE=$size DIR_MAKER=$loc bench;
echo
echo
done
done
done
end=$(date +"%Y-%m-%d %H:%M:%S")
echo "end benchmark: $end"
|
<filename>src/components/common/Navbar/Navbar.js
import React, { Component } from 'react';
import AnchorLink from 'react-anchor-link-smooth-scroll';
import Scrollspy from 'react-scrollspy';
import styled from 'styled-components';
import { Container } from '@components/global';
import {
Nav,
NavItem,
Brand,
StyledContainer,
NavListWrapper,
MobileMenu,
Mobile,
} from './style';
import { ReactComponent as MenuIcon } from '@static/icons/menu.svg';
import GithubIcon from '@static/icons/github.svg';
import LinkedInIcon from '@static/icons/linkedin.svg';
import ExternalLink from '@common/ExternalLink';
const NAV_ITEMS = ['About', 'Features', 'Team', 'FAQ'];
const SOCIAL = [
{
icon: GithubIcon,
name: 'Star',
link: 'https://github.com/oslabs-beta/react-fetch-tree',
},
{
icon: LinkedInIcon,
name: 'Follow',
link: 'https://www.linkedin.com/company/react-fetch-tree/',
},
];
class Navbar extends Component {
state = {
mobileMenuOpen: false,
};
toggleMobileMenu = () => {
this.setState(prevState => ({ mobileMenuOpen: !prevState.mobileMenuOpen }));
};
closeMobileMenu = () => {
if (this.state.mobileMenuOpen) {
this.setState({ mobileMenuOpen: false });
}
};
getNavAnchorLink = item => (
<AnchorLink href={`#${item.toLowerCase()}`} onClick={this.closeMobileMenu}>
{item}
</AnchorLink>
);
getNavList = ({ mobile = false }) => (
<NavListWrapper mobile={mobile}>
<StyledContainer>
<SocialIcons>
{SOCIAL.map(({ icon, link, name }) => (
<ExternalLink
style={{ textDecoration: 'none', color: 'black' }}
key={link}
href={link}
>
<div
style={{
display: 'flex',
flexDirection: 'row',
flexWrap: 'nowrap',
alignItems: 'flex-end',
backgroundColor: '#7bc19c',
borderRadius: '20px',
padding: `7px ${name.length + 7}px 7px ${name.length}px`,
marginLeft: '0.75em',
':hover': { backgroundColor: '#BCDBCA' },
}}
>
<img src={icon} alt="link" />
<p
style={{
textDecoration: 'none',
textAlign: 'center',
alignSelf: 'flex-end',
fontFamily: `${props => props.theme.font.secondary}`,
fontSize: '18px',
lineHeight: '25px',
margin: '-6px 0 0 0',
color: 'black',
}}
>
{name}
</p>
</div>
</ExternalLink>
))}
</SocialIcons>
<Scrollspy
items={NAV_ITEMS.map(item => item.toLowerCase())}
currentClassName="active"
mobile={mobile}
offset={-64}
>
{NAV_ITEMS.map(navItem => (
<NavItem key={navItem}>{this.getNavAnchorLink(navItem)}</NavItem>
))}
</Scrollspy>
</StyledContainer>
</NavListWrapper>
);
render() {
const { mobileMenuOpen } = this.state;
return (
<Nav {...this.props}>
<StyledContainer>
<Brand>React Fetch Tree</Brand>
<Mobile>
<button onClick={this.toggleMobileMenu} style={{ color: 'black' }}>
<MenuIcon />
</button>
</Mobile>
<Mobile hide>{this.getNavList({})}</Mobile>
</StyledContainer>
<Mobile>
{mobileMenuOpen && (
<MobileMenu>
<Container>{this.getNavList({ mobile: true })}</Container>
</MobileMenu>
)}
</Mobile>
</Nav>
);
}
}
const SocialIcons = styled.div`
display: flex;
img {
margin: 0 8px;
width: 24px;
height: 24px;
}
@media (max-width: ${props => props.theme.screen.sm}) {
margin-top: 40px;
}
`;
export default Navbar;
|
SELECT department, MAX(salary)
FROM employees
GROUP BY department; |
from sklearn.ensemble import RandomForestClassifier
import joblib
def load_model(MODEL_NAME: str) -> RandomForestClassifier:
"""
Load a pre-trained RandomForestClassifier model from the given file path.
Args:
MODEL_NAME (str): The file path of the pre-trained model.
Returns:
RandomForestClassifier: The loaded pre-trained model.
"""
loaded_model = joblib.load(MODEL_NAME)
if not isinstance(loaded_model, RandomForestClassifier):
raise TypeError("The loaded model is not a RandomForestClassifier.")
return loaded_model |
import {Injectable} from 'angular2/core';
import {BackendService} from './backend';
import {User} from '../model/user';
@Injectable()
export class AuthenticationService {
private auth: {};
constructor(private backend: BackendService){}
login(username: string, password: string){
return this.backend.post({
"username": username,
"password": password
}, "auth/identity/callback",[], false)
.then((auth) => {
this.auth = auth;
this.setAuth();
this.currentUser();
});
}
loginWith(provider: string){
// call get auth/:provider
}
setAuth(){
if (!this.userLoaded()){
var auth = localStorage.getItem("auth");
if (auth){
this.backend.setHeader("Authorization", auth.token);
this.auth = JSON.parse(auth);
}
}
else {
this.backend.setHeader("Authorization", this.auth["token"]);
}
if (this.auth) localStorage.setItem("auth", JSON.stringify(this.auth));
}
clearAuth(){
this.auth = {};
this.backend.removeHeader("Authorization");
localStorage.removeItem("auth");
}
currentUser(){
this.setAuth();
if (!this.userLoaded()) return undefined;
return {
id: this.auth["id"],
name: this.auth["name"]
}
}
private userLoaded(): boolean {
return (this.auth === {} || (!!this.auth && this.auth["id"] !== undefined));
}
}
|
/*
* Activiti Modeler component part of the Activiti project
* Copyright 2005-2014 Alfresco Software, Ltd. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
'use strict';
var KISBPM = KISBPM || {};
KISBPM.PROPERTY_CONFIG =
{
"string": {
"readModeTemplateUrl": "../../editor-app/configuration/properties/default-value-display-template.html",
"writeModeTemplateUrl": "../../editor-app/configuration/properties/string-property-write-mode-template.html"
},
"boolean": {
"templateUrl": "../../editor-app/configuration/properties/boolean-property-template.html"
},
"text": {
"readModeTemplateUrl": "../../editor-app/configuration/properties/default-value-display-template.html",
"writeModeTemplateUrl": "../../editor-app/configuration/properties/text-property-write-template.html"
},
"kisbpm-multiinstance": {
"readModeTemplateUrl": "../../editor-app/configuration/properties/default-value-display-template.html",
"writeModeTemplateUrl": "../../editor-app/configuration/properties/multiinstance-property-write-template.html"
},
"oryx-formproperties-complex": {
"readModeTemplateUrl": "../../editor-app/configuration/properties/form-properties-display-template.html",
"writeModeTemplateUrl": "../../editor-app/configuration/properties/form-properties-write-template.html"
},
"oryx-executionlisteners-multiplecomplex": {
"readModeTemplateUrl": "../../editor-app/configuration/properties/execution-listeners-display-template.html",
"writeModeTemplateUrl": "../../editor-app/configuration/properties/execution-listeners-write-template.html"
},
"oryx-tasklisteners-multiplecomplex": {
"readModeTemplateUrl": "../../editor-app/configuration/properties/task-listeners-display-template.html",
"writeModeTemplateUrl": "../../editor-app/configuration/properties/task-listeners-write-template.html"
},
"oryx-eventlisteners-multiplecomplex": {
"readModeTemplateUrl": "../../editor-app/configuration/properties/event-listeners-display-template.html",
"writeModeTemplateUrl": "../../editor-app/configuration/properties/event-listeners-write-template.html"
},
"oryx-usertaskassignment-complex": {
"readModeTemplateUrl": "../../editor-app/configuration/properties/assignment-display-template.html",
"writeModeTemplateUrl": "../../editor-app/configuration/properties/assignment-write-template.html"
},
"oryx-servicetaskfields-complex": {
"readModeTemplateUrl": "../../editor-app/configuration/properties/fields-display-template.html",
"writeModeTemplateUrl": "../../editor-app/configuration/properties/fields-write-template.html"
},
"oryx-callactivityinparameters-complex": {
"readModeTemplateUrl": "../../editor-app/configuration/properties/in-parameters-display-template.html",
"writeModeTemplateUrl": "../../editor-app/configuration/properties/in-parameters-write-template.html"
},
"oryx-callactivityoutparameters-complex": {
"readModeTemplateUrl": "../../editor-app/configuration/properties/out-parameters-display-template.html",
"writeModeTemplateUrl": "../../editor-app/configuration/properties/out-parameters-write-template.html"
},
"oryx-subprocessreference-complex": {
"readModeTemplateUrl": "../../editor-app/configuration/properties/subprocess-reference-display-template.html",
"writeModeTemplateUrl": "../../editor-app/configuration/properties/subprocess-reference-write-template.html"
},
"oryx-sequencefloworder-complex": {
"readModeTemplateUrl": "../../editor-app/configuration/properties/sequenceflow-order-display-template.html",
"writeModeTemplateUrl": "../../editor-app/configuration/properties/sequenceflow-order-write-template.html"
},
"oryx-conditionsequenceflow-complex": {
"readModeTemplateUrl": "../../editor-app/configuration/properties/condition-expression-display-template.html",
"writeModeTemplateUrl": "../../editor-app/configuration/properties/condition-expression-write-template.html"
},
"oryx-signaldefinitions-multiplecomplex": {
"readModeTemplateUrl": "../../editor-app/configuration/properties/signal-definitions-display-template.html",
"writeModeTemplateUrl": "../../editor-app/configuration/properties/signal-definitions-write-template.html"
},
"oryx-signalref-string": {
"readModeTemplateUrl": "../../editor-app/configuration/properties/default-value-display-template.html",
"writeModeTemplateUrl": "../../editor-app/configuration/properties/signal-property-write-template.html"
},
"oryx-messagedefinitions-multiplecomplex": {
"readModeTemplateUrl": "../../editor-app/configuration/properties/message-definitions-display-template.html",
"writeModeTemplateUrl": "../../editor-app/configuration/properties/message-definitions-write-template.html"
},
"oryx-messageref-string": {
"readModeTemplateUrl": "../../editor-app/configuration/properties/default-value-display-template.html",
"writeModeTemplateUrl": "../../editor-app/configuration/properties/message-property-write-template.html"
}
};
|
#!/usr/bin/env bash
set -xe
make config.h CONFIG_WERROR=y
make -C test/lib/nvme/unit CONFIG_WERROR=y
test/lib/nvme/unit/nvme_c/nvme_ut
test/lib/nvme/unit/nvme_ctrlr_c/nvme_ctrlr_ut
test/lib/nvme/unit/nvme_ctrlr_cmd_c/nvme_ctrlr_cmd_ut
test/lib/nvme/unit/nvme_ns_cmd_c/nvme_ns_cmd_ut
test/lib/nvme/unit/nvme_qpair_c/nvme_qpair_ut
make -C test/lib/ioat/unit CONFIG_WERROR=y
test/lib/ioat/unit/ioat_ut
make -C test/lib/json CONFIG_WERROR=y
test/lib/json/parse/json_parse_ut
test/lib/json/util/json_util_ut
test/lib/json/write/json_write_ut
make -C lib/log CONFIG_WERROR=y
make -C lib/json CONFIG_WERROR=y
make -C test/lib/jsonrpc CONFIG_WERROR=y
test/lib/jsonrpc/server/jsonrpc_server_ut
make -C test/lib/log CONFIG_WERROR=y
test/lib/log/log_ut
|
/*
* Copyright (c) 2015, EURECOM (www.eurecom.fr)
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are those
* of the authors and should not be interpreted as representing official policies,
* either expressed or implied, of the FreeBSD Project.
*/
#ifndef UI_FILTERS_H_
#define UI_FILTERS_H_
#include <stdint.h>
#include "itti_types.h"
#define SIGNAL_NAME_LENGTH 100
#define COLOR_SIZE 10
typedef enum
{
FILTER_UNKNOWN, FILTER_MESSAGES, FILTER_ORIGIN_TASKS, FILTER_DESTINATION_TASKS, FILTER_INSTANCES,
} ui_filter_e;
typedef enum
{
ENTRY_ENABLED_FALSE, ENTRY_ENABLED_TRUE, ENTRY_ENABLED_UNDEFINED,
} ui_entry_enabled_e;
typedef struct
{
uint32_t id;
char name[SIGNAL_NAME_LENGTH];
uint8_t enabled;
char foreground[COLOR_SIZE];
char background[COLOR_SIZE];
GtkWidget *menu_item;
} ui_filter_item_t;
typedef struct
{
char *name;
uint32_t allocated;
uint32_t used;
ui_filter_item_t *items;
} ui_filter_t;
typedef struct
{
gboolean filters_enabled;
ui_filter_t messages;
ui_filter_t origin_tasks;
ui_filter_t destination_tasks;
ui_filter_t instances;
} ui_filters_t;
extern ui_filters_t ui_filters;
int ui_init_filters(int reset, int clear_ids);
gboolean ui_filters_enable(gboolean enabled);
int ui_filters_search_id(ui_filter_t *filter, uint32_t value);
void ui_filters_add(ui_filter_e filter, uint32_t value, const char *name, ui_entry_enabled_e entry_enabled,
const char *foreground, const char *background);
gboolean ui_filters_message_enabled(const uint32_t message, const uint32_t origin_task, const uint32_t destination_task,
const uint32_t instance);
int ui_filters_read(const char *file_name);
int ui_filters_file_write(const char *file_name);
void ui_create_filter_menus(void);
void ui_destroy_filter_menus(void);
void ui_destroy_filter_menu(ui_filter_e filter);
void ui_show_filter_menu(GtkWidget **menu, ui_filter_t *filter);
#endif /* UI_FILTERS_H_ */
|
#!/usr/bin/env bash
source ./pip_common.sh
# -------------------------------------
echo "Ensuring PIP is upgraded"
pip install --upgrade pip
# -------------------------------------
./pip_uninstall_all.sh
# -------------------------------------
echo "Installing packages for development"
EXIT=""
for pkg in $PACKAGES; do
if ! (cd ../$pkg && python setup.py develop ); then
echo "Development setup of $bold${pkg}$normal failed" >&2
exit 1
fi
done |
package com.ty.fm.models;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
public class RequestRelationshipLine
{
@JsonProperty("data")
private RequestRelationshipLineData requestRelationshipLineData;
}
|
<filename>sputnik/vector_utils.h<gh_stars>100-1000
// Copyright 2020 The Sputnik Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef THIRD_PARTY_SPUTNIK_VECTOR_UTILS_H_
#define THIRD_PARTY_SPUTNIK_VECTOR_UTILS_H_
/**
* @file @brief Utilities for working with CUDA vector data types.
*/
#include "sputnik/cuda_utils.h"
#include "sputnik/type_utils.h"
namespace sputnik {
/**
* @brief Functor for computing FMAs & MULs on mixes of vector and scalar
* data types.
*/
template <typename Value>
struct VectorCompute {
typedef typename TypeUtils<Value>::Accumulator Accumulator;
static __device__ __forceinline__ void FMA(float x1, Value x2,
Accumulator *out);
// Complementary index type to our load type.
typedef typename Value2Index<Value>::Index Index;
static __device__ __forceinline__ void Mul(int, Index x2, Index *out);
static __device__ __forceinline__ void Dot(Value x1, Value x2,
Accumulator *out);
};
template <>
struct VectorCompute<float> {
static __device__ __forceinline__ void FMA(float x1, float x2, float *out) {
out[0] += x1 * x2;
}
static __device__ __forceinline__ void Mul(int x1, int x2, int *out) {
out[0] = x1 * x2;
}
static __device__ __forceinline__ void Dot(float x1, float x2, float *out) {
out[0] += x1 * x2;
}
};
template <>
struct VectorCompute<float2> {
static __device__ __forceinline__ void FMA(float x1, float2 x2, float2 *out) {
out[0].x += x1 * x2.x;
out[0].y += x1 * x2.y;
}
static __device__ __forceinline__ void Mul(int x1, int2 x2, int2 *out) {
out[0].x = x1 * x2.x;
out[0].y = x1 * x2.y;
}
static __device__ __forceinline__ void Dot(float2 x1, float2 x2, float *out) {
out[0] += x1.x * x2.x;
out[0] += x1.y * x2.y;
}
};
template <>
struct VectorCompute<float4> {
static __device__ __forceinline__ void FMA(float x1, float4 x2, float4 *out) {
out[0].x += x1 * x2.x;
out[0].y += x1 * x2.y;
out[0].z += x1 * x2.z;
out[0].w += x1 * x2.w;
}
static __device__ __forceinline__ void Mul(int x1, int4 x2, int4 *out) {
out[0].x = x1 * x2.x;
out[0].y = x1 * x2.y;
out[0].z = x1 * x2.z;
out[0].w = x1 * x2.w;
}
static __device__ __forceinline__ void Dot(float4 x1, float4 x2, float *out) {
out[0] += x1.x * x2.x;
out[0] += x1.y * x2.y;
out[0] += x1.z * x2.z;
out[0] += x1.w * x2.w;
}
};
template <>
struct VectorCompute<half2> {
static __device__ __forceinline__ void FMA(float x1, half2 x2, float2 *out) {
float2 x2_f2 = __half22float2(x2);
VectorCompute<float2>::FMA(x1, x2_f2, out);
}
static __device__ __forceinline__ void Mul(int x1, short2 x2, short2 *out) {
out[0].x = static_cast<short>(x1 * x2.x);
out[0].y = static_cast<short>(x1 * x2.y);
}
};
template <>
struct VectorCompute<half4> {
static __device__ __forceinline__ void FMA(float x1, half4 x2, float4 *out) {
float2 x2x_f2 = __half22float2(x2.x);
float2 x2y_f2 = __half22float2(x2.y);
float4 x2_f4 = make_float4(x2x_f2.x, x2x_f2.y, x2y_f2.x, x2y_f2.y);
VectorCompute<float4>::FMA(x1, x2_f4, out);
}
static __device__ __forceinline__ void Mul(int x1, short4 x2, short4 *out) {
VectorCompute<half2>::Mul(x1, x2.x, &out[0].x);
VectorCompute<half2>::Mul(x1, x2.y, &out[0].y);
}
};
template <>
struct VectorCompute<half8> {
static __device__ __forceinline__ void FMA(float x1, half8 x2, float4 *out) {
half4 x2x_h4;
x2x_h4.x = x2.x;
x2x_h4.y = x2.y;
VectorCompute<half4>::FMA(x1, x2x_h4, out);
half4 x2y_h4;
x2y_h4.x = x2.z;
x2y_h4.y = x2.w;
VectorCompute<half4>::FMA(x1, x2y_h4, out + 1);
}
static __device__ __forceinline__ void Mul(int x1, short8 x2, short8 *out) {
VectorCompute<half2>::Mul(x1, x2.x, &out[0].x);
VectorCompute<half2>::Mul(x1, x2.y, &out[0].y);
VectorCompute<half2>::Mul(x1, x2.z, &out[0].z);
VectorCompute<half2>::Mul(x1, x2.w, &out[0].w);
}
};
} // namespace sputnik
#endif // THIRD_PARTY_SPUTNIK_VECTOR_UTILS_H_
|
#!/usr/bin/env bash
# The MIT License (MIT)
#
# Copyright (c) 2021 Alessandro De Blasis <alex@deblasis.net>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
./wait-for-it.sh centralcommandsvc:9482 --timeout=60 -- /exe
|
#!/bin/sh
#
# Copyright (c) 2004-2005 The Trustees of Indiana University and Indiana
# University Research and Technology
# Corporation. All rights reserved.
# Copyright (c) 2004-2005 The University of Tennessee and The University
# of Tennessee Research Foundation. All rights
# reserved.
# Copyright (c) 2004-2005 High Performance Computing Center Stuttgart,
# University of Stuttgart. All rights reserved.
# Copyright (c) 2004-2005 The Regents of the University of California.
# All rights reserved.
# Copyright (c) 2019 Intel, Inc. All rights reserved.
# Copyright (c) 2020 Cisco Systems, Inc. All rights reserved
# $COPYRIGHT$
#
# Additional copyrights may follow
#
# $HEADER$
#
CFILE=/tmp/prte_asm_$$.c
trap "/bin/rm -f $CFILE; exit 0" 0 1 2 15
echo Updating asm.s from atomic.h and timer.h using gcc
cat > $CFILE<<EOF
#include <stdlib.h>
#include <inttypes.h>
#define static
#define inline
#define PRTE_GCC_INLINE_ASSEMBLY 1
#include "../architecture.h"
#include "atomic.h"
#include "timer.h"
EOF
gcc -O1 -mpowerpc64 -mcpu=970 -DPRTE_ASSEMBLY_ARCH=POWERPC32 -DPRTE_ASM_SUPPORT_64BIT=1 -I. -S $CFILE -o asm-32-64.s
gcc -O1 -DPRTE_ASSEMBLY_ARCH=PRTE_POWERPC32 -DPRTE_ASM_SUPPORT_64BIT=0 -I. -S $CFILE -o asm-32.s
gcc -m64 -O1 -finline-functions -DPRTE_ASSEMBLY_ARCH=PRTE_POWERPC64 -DPRTE_ASM_SUPPORT64BIT=1 -I. -S $CFILE -o asm-64.s
|
def find_duplicates(input_list):
duplicate = []
visited = set()
for item in input_list:
if item in visited:
duplicate.append(item)
else:
visited.add(item)
return duplicate
result = find_duplicates([2, 3, 5, 6, 7, 2, 6])
print(result) |
import logging
class Logger:
class LogHelper:
handler = logging.StreamHandler() # Assuming LogHelper has a predefined StreamHandler
@staticmethod
def get_logger(name, level=logging.DEBUG):
l = logging.getLogger(name)
l.setLevel(level)
l.addHandler(Logger.LogHelper.handler)
return l |
import React from "react";
import { connect } from "react-redux";
import { fetchNoticias } from "./actionCreator";
const FetchNoticia = ({ fetchNoticias }) => {
return <button onClick={fetchNoticias}>Fetch Noticias</button>;
};
const mapDispatchToProps = (dispatch) => {
return {
fetchNoticias() {
dispatch(fetchNoticias());
}
};
};
export default connect(null, mapDispatchToProps)(FetchNoticia);
|
<filename>src/pages/BeerMap/styles.ts
import { Animated } from 'react-native'
import { RectButton } from 'react-native-gesture-handler'
import styled from 'styled-components/native'
export const Container = styled.View`
flex: 1;
align-items: center;
justify-content: center;
`
export const CalloutContainer = styled.View`
width: 170px;
padding: 12px;
background: rgba(255, 192, 0, 0.8);
border-radius: 16px;
flex-direction: row;
justify-content: center;
align-items: center;
`
export const CalloutImage = styled.Image`
width: 32px;
height: 32px;
border-radius: 16px;
`
export const CalloutText = styled.Text`
flex: 1;
color: #282828;
font-size: 14px;
font-family: 'Poppins_600SemiBold';
text-align: center;
`
export const ListIconContainer = styled(RectButton)`
position: absolute;
flex-direction: row;
align-items: center;
justify-content: center;
border-radius: 24px;
padding: 14px;
width: 90px;
background: #ffc000;
opacity: 0.8;
bottom: 23%;
`
export const ListIconText = styled.Text`
font-family: 'Poppins_600SemiBold';
color: #282828;
margin-left: 2px;
` |
import { Field, ObjectType } from "type-graphql";
@ObjectType()
export default class User {
@Field(() => String)
id: String;
@Field(() => String)
name: String;
@Field(() => String)
username: String;
}
|
/* AUTO-GENERATED FILE. DO NOT MODIFY.
*
* This class was automatically generated by the
* aapt tool from the resource data it found. It
* should not be modified by hand.
*/
package com.google.android.gms;
public final class R {
public static final class anim {
}
public static final class attr {
public static final int adSize = 0x7f010000;
public static final int adSizes = 0x7f010001;
public static final int adUnitId = 0x7f010002;
public static final int allowShortcuts = 0x7f010012;
public static final int buttonSize = 0x7f010026;
public static final int circleCrop = 0x7f01001d;
public static final int colorScheme = 0x7f010027;
public static final int contentProviderUri = 0x7f010005;
public static final int corpusId = 0x7f010003;
public static final int corpusVersion = 0x7f010004;
public static final int defaultIntentAction = 0x7f01000f;
public static final int defaultIntentActivity = 0x7f010011;
public static final int defaultIntentData = 0x7f010010;
public static final int featureType = 0x7f010025;
public static final int imageAspectRatio = 0x7f01001c;
public static final int imageAspectRatioAdjust = 0x7f01001b;
public static final int indexPrefixes = 0x7f010022;
public static final int inputEnabled = 0x7f010015;
public static final int noIndex = 0x7f010020;
public static final int paramName = 0x7f01000a;
public static final int paramValue = 0x7f01000b;
public static final int perAccountTemplate = 0x7f010009;
public static final int schemaOrgProperty = 0x7f010024;
public static final int schemaOrgType = 0x7f010007;
public static final int scopeUris = 0x7f010028;
public static final int searchEnabled = 0x7f01000c;
public static final int searchLabel = 0x7f01000d;
public static final int sectionContent = 0x7f010014;
public static final int sectionFormat = 0x7f01001f;
public static final int sectionId = 0x7f01001e;
public static final int sectionType = 0x7f010013;
public static final int sectionWeight = 0x7f010021;
public static final int semanticallySearchable = 0x7f010008;
public static final int settingsDescription = 0x7f01000e;
public static final int sourceClass = 0x7f010016;
public static final int subsectionSeparator = 0x7f010023;
public static final int toAddressesSection = 0x7f01001a;
public static final int trimmable = 0x7f010006;
public static final int userInputSection = 0x7f010018;
public static final int userInputTag = 0x7f010017;
public static final int userInputValue = 0x7f010019;
}
public static final class bool {
}
public static final class color {
public static final int common_action_bar_splitter = 0x7f0a0000;
public static final int common_google_signin_btn_text_dark = 0x7f0a0011;
public static final int common_google_signin_btn_text_dark_default = 0x7f0a0001;
public static final int common_google_signin_btn_text_dark_disabled = 0x7f0a0002;
public static final int common_google_signin_btn_text_dark_focused = 0x7f0a0003;
public static final int common_google_signin_btn_text_dark_pressed = 0x7f0a0004;
public static final int common_google_signin_btn_text_light = 0x7f0a0012;
public static final int common_google_signin_btn_text_light_default = 0x7f0a0005;
public static final int common_google_signin_btn_text_light_disabled = 0x7f0a0006;
public static final int common_google_signin_btn_text_light_focused = 0x7f0a0007;
public static final int common_google_signin_btn_text_light_pressed = 0x7f0a0008;
public static final int common_plus_signin_btn_text_dark = 0x7f0a0013;
public static final int common_plus_signin_btn_text_dark_default = 0x7f0a0009;
public static final int common_plus_signin_btn_text_dark_disabled = 0x7f0a000a;
public static final int common_plus_signin_btn_text_dark_focused = 0x7f0a000b;
public static final int common_plus_signin_btn_text_dark_pressed = 0x7f0a000c;
public static final int common_plus_signin_btn_text_light = 0x7f0a0014;
public static final int common_plus_signin_btn_text_light_default = 0x7f0a000d;
public static final int common_plus_signin_btn_text_light_disabled = 0x7f0a000e;
public static final int common_plus_signin_btn_text_light_focused = 0x7f0a000f;
public static final int common_plus_signin_btn_text_light_pressed = 0x7f0a0010;
}
public static final class dimen {
}
public static final class drawable {
public static final int common_full_open_on_phone = 0x7f020047;
public static final int common_google_signin_btn_icon_dark = 0x7f020048;
public static final int common_google_signin_btn_icon_dark_disabled = 0x7f020049;
public static final int common_google_signin_btn_icon_dark_focused = 0x7f02004a;
public static final int common_google_signin_btn_icon_dark_normal = 0x7f02004b;
public static final int common_google_signin_btn_icon_dark_pressed = 0x7f02004c;
public static final int common_google_signin_btn_icon_light = 0x7f02004d;
public static final int common_google_signin_btn_icon_light_disabled = 0x7f02004e;
public static final int common_google_signin_btn_icon_light_focused = 0x7f02004f;
public static final int common_google_signin_btn_icon_light_normal = 0x7f020050;
public static final int common_google_signin_btn_icon_light_pressed = 0x7f020051;
public static final int common_google_signin_btn_text_dark = 0x7f020052;
public static final int common_google_signin_btn_text_dark_disabled = 0x7f020053;
public static final int common_google_signin_btn_text_dark_focused = 0x7f020054;
public static final int common_google_signin_btn_text_dark_normal = 0x7f020055;
public static final int common_google_signin_btn_text_dark_pressed = 0x7f020056;
public static final int common_google_signin_btn_text_light = 0x7f020057;
public static final int common_google_signin_btn_text_light_disabled = 0x7f020058;
public static final int common_google_signin_btn_text_light_focused = 0x7f020059;
public static final int common_google_signin_btn_text_light_normal = 0x7f02005a;
public static final int common_google_signin_btn_text_light_pressed = 0x7f02005b;
public static final int common_ic_googleplayservices = 0x7f02005c;
public static final int common_plus_signin_btn_icon_dark = 0x7f02005d;
public static final int common_plus_signin_btn_icon_dark_disabled = 0x7f02005e;
public static final int common_plus_signin_btn_icon_dark_focused = 0x7f02005f;
public static final int common_plus_signin_btn_icon_dark_normal = 0x7f020060;
public static final int common_plus_signin_btn_icon_dark_pressed = 0x7f020061;
public static final int common_plus_signin_btn_icon_light = 0x7f020062;
public static final int common_plus_signin_btn_icon_light_disabled = 0x7f020063;
public static final int common_plus_signin_btn_icon_light_focused = 0x7f020064;
public static final int common_plus_signin_btn_icon_light_normal = 0x7f020065;
public static final int common_plus_signin_btn_icon_light_pressed = 0x7f020066;
public static final int common_plus_signin_btn_text_dark = 0x7f020067;
public static final int common_plus_signin_btn_text_dark_disabled = 0x7f020068;
public static final int common_plus_signin_btn_text_dark_focused = 0x7f020069;
public static final int common_plus_signin_btn_text_dark_normal = 0x7f02006a;
public static final int common_plus_signin_btn_text_dark_pressed = 0x7f02006b;
public static final int common_plus_signin_btn_text_light = 0x7f02006c;
public static final int common_plus_signin_btn_text_light_disabled = 0x7f02006d;
public static final int common_plus_signin_btn_text_light_focused = 0x7f02006e;
public static final int common_plus_signin_btn_text_light_normal = 0x7f02006f;
public static final int common_plus_signin_btn_text_light_pressed = 0x7f020070;
}
public static final class id {
public static final int adjust_height = 0x7f0d000d;
public static final int adjust_width = 0x7f0d000e;
public static final int auto = 0x7f0d001d;
public static final int contact = 0x7f0d000a;
public static final int dark = 0x7f0d001e;
public static final int demote_common_words = 0x7f0d0014;
public static final int demote_rfc822_hostnames = 0x7f0d0015;
public static final int email = 0x7f0d000b;
public static final int html = 0x7f0d0010;
public static final int icon_only = 0x7f0d001a;
public static final int icon_uri = 0x7f0d0000;
public static final int index_entity_types = 0x7f0d0016;
public static final int instant_message = 0x7f0d000c;
public static final int intent_action = 0x7f0d0001;
public static final int intent_activity = 0x7f0d0002;
public static final int intent_data = 0x7f0d0003;
public static final int intent_data_id = 0x7f0d0004;
public static final int intent_extra_data = 0x7f0d0005;
public static final int large_icon_uri = 0x7f0d0006;
public static final int light = 0x7f0d001f;
public static final int match_global_nicknames = 0x7f0d0017;
public static final int none = 0x7f0d000f;
public static final int omnibox_title_section = 0x7f0d0018;
public static final int omnibox_url_section = 0x7f0d0019;
public static final int plain = 0x7f0d0011;
public static final int rfc822 = 0x7f0d0012;
public static final int standard = 0x7f0d001b;
public static final int text1 = 0x7f0d0007;
public static final int text2 = 0x7f0d0008;
public static final int thing_proto = 0x7f0d0009;
public static final int url = 0x7f0d0013;
public static final int wide = 0x7f0d001c;
}
public static final class integer {
public static final int google_play_services_version = 0x7f0b0000;
}
public static final class interpolator {
}
public static final class layout {
}
public static final class raw {
}
public static final class string {
public static final int accept = 0x7f06001f;
public static final int common_google_play_services_api_unavailable_text = 0x7f060000;
public static final int common_google_play_services_enable_button = 0x7f060001;
public static final int common_google_play_services_enable_text = 0x7f060002;
public static final int common_google_play_services_enable_title = 0x7f060003;
public static final int common_google_play_services_install_button = 0x7f060004;
public static final int common_google_play_services_install_text_phone = 0x7f060005;
public static final int common_google_play_services_install_text_tablet = 0x7f060006;
public static final int common_google_play_services_install_title = 0x7f060007;
public static final int common_google_play_services_invalid_account_text = 0x7f060008;
public static final int common_google_play_services_invalid_account_title = 0x7f060009;
public static final int common_google_play_services_network_error_text = 0x7f06000a;
public static final int common_google_play_services_network_error_title = 0x7f06000b;
public static final int common_google_play_services_notification_ticker = 0x7f06000c;
public static final int common_google_play_services_resolution_required_text = 0x7f06000d;
public static final int common_google_play_services_resolution_required_title = 0x7f06000e;
public static final int common_google_play_services_restricted_profile_text = 0x7f06000f;
public static final int common_google_play_services_restricted_profile_title = 0x7f060010;
public static final int common_google_play_services_sign_in_failed_text = 0x7f060011;
public static final int common_google_play_services_sign_in_failed_title = 0x7f060012;
public static final int common_google_play_services_unknown_issue = 0x7f060013;
public static final int common_google_play_services_unsupported_text = 0x7f060014;
public static final int common_google_play_services_unsupported_title = 0x7f060015;
public static final int common_google_play_services_update_button = 0x7f060016;
public static final int common_google_play_services_update_text = 0x7f060017;
public static final int common_google_play_services_update_title = 0x7f060018;
public static final int common_google_play_services_updating_text = 0x7f060019;
public static final int common_google_play_services_updating_title = 0x7f06001a;
public static final int common_google_play_services_wear_update_text = 0x7f06001b;
public static final int common_open_on_phone = 0x7f06001c;
public static final int common_signin_button_text = 0x7f06001d;
public static final int common_signin_button_text_long = 0x7f06001e;
public static final int create_calendar_message = 0x7f060023;
public static final int create_calendar_title = 0x7f060024;
public static final int decline = 0x7f060025;
public static final int store_picture_message = 0x7f06002f;
public static final int store_picture_title = 0x7f060030;
}
public static final class style {
public static final int Theme_IAPTheme = 0x7f070002;
}
public static final class styleable {
public static final int[] AdsAttrs = { 0x7f010000, 0x7f010001, 0x7f010002 };
public static final int AdsAttrs_adSize = 0;
public static final int AdsAttrs_adSizes = 1;
public static final int AdsAttrs_adUnitId = 2;
public static final int[] AppDataSearch = { };
public static final int[] Corpus = { 0x7f010003, 0x7f010004, 0x7f010005, 0x7f010006, 0x7f010007, 0x7f010008, 0x7f010009 };
public static final int Corpus_contentProviderUri = 2;
public static final int Corpus_corpusId = 0;
public static final int Corpus_corpusVersion = 1;
public static final int Corpus_perAccountTemplate = 6;
public static final int Corpus_schemaOrgType = 4;
public static final int Corpus_semanticallySearchable = 5;
public static final int Corpus_trimmable = 3;
public static final int[] FeatureParam = { 0x7f01000a, 0x7f01000b };
public static final int FeatureParam_paramName = 0;
public static final int FeatureParam_paramValue = 1;
public static final int[] GlobalSearch = { 0x7f01000c, 0x7f01000d, 0x7f01000e, 0x7f01000f, 0x7f010010, 0x7f010011 };
public static final int[] GlobalSearchCorpus = { 0x7f010012 };
public static final int GlobalSearchCorpus_allowShortcuts = 0;
public static final int[] GlobalSearchSection = { 0x7f010013, 0x7f010014 };
public static final int GlobalSearchSection_sectionContent = 1;
public static final int GlobalSearchSection_sectionType = 0;
public static final int GlobalSearch_defaultIntentAction = 3;
public static final int GlobalSearch_defaultIntentActivity = 5;
public static final int GlobalSearch_defaultIntentData = 4;
public static final int GlobalSearch_searchEnabled = 0;
public static final int GlobalSearch_searchLabel = 1;
public static final int GlobalSearch_settingsDescription = 2;
public static final int[] IMECorpus = { 0x7f010015, 0x7f010016, 0x7f010017, 0x7f010018, 0x7f010019, 0x7f01001a };
public static final int IMECorpus_inputEnabled = 0;
public static final int IMECorpus_sourceClass = 1;
public static final int IMECorpus_toAddressesSection = 5;
public static final int IMECorpus_userInputSection = 3;
public static final int IMECorpus_userInputTag = 2;
public static final int IMECorpus_userInputValue = 4;
public static final int[] LoadingImageView = { 0x7f01001b, 0x7f01001c, 0x7f01001d };
public static final int LoadingImageView_circleCrop = 2;
public static final int LoadingImageView_imageAspectRatio = 1;
public static final int LoadingImageView_imageAspectRatioAdjust = 0;
public static final int[] Section = { 0x7f01001e, 0x7f01001f, 0x7f010020, 0x7f010021, 0x7f010022, 0x7f010023, 0x7f010024 };
public static final int[] SectionFeature = { 0x7f010025 };
public static final int SectionFeature_featureType = 0;
public static final int Section_indexPrefixes = 4;
public static final int Section_noIndex = 2;
public static final int Section_schemaOrgProperty = 6;
public static final int Section_sectionFormat = 1;
public static final int Section_sectionId = 0;
public static final int Section_sectionWeight = 3;
public static final int Section_subsectionSeparator = 5;
public static final int[] SignInButton = { 0x7f010026, 0x7f010027, 0x7f010028 };
public static final int SignInButton_buttonSize = 0;
public static final int SignInButton_colorScheme = 1;
public static final int SignInButton_scopeUris = 2;
}
}
|
<reponame>lanpinguo/rootfs_build
/*
******************************************************************************
*
* isp_platform_drv.h
*
* Hawkview ISP - isp_platform_drv.h module
*
* Copyright (c) 2014 by Allwinnertech Co., Ltd. http:
*
* Version Author Date Description
*
* 2.0 <NAME> 2014/06/20 Second Version
*
******************************************************************************
*/
#ifndef _ISP_PLATFORM_DRV_H_
#define _ISP_PLATFORM_DRV_H_
#include <linux/string.h>
#include "bsp_isp_comm.h"
enum isp_channel_real {
SUB_CH_REAL = 0,
MAIN_CH_REAL = 1,
ROT_CH_REAL = 2,
ISP_MAX_CH_NUM_REAL,
};
struct isp_bsp_fun_array {
void (*map_reg_addr) (unsigned long, unsigned long);
void (*map_load_dram_addr) (unsigned long, unsigned long);
void (*map_saved_dram_addr) (unsigned long, unsigned long);
void (*isp_set_interface) (unsigned long, enum isp_src_interface, enum isp_src);
void (*isp_enable) (unsigned long, int);
void (*isp_ch_enable) (unsigned long, int, int);
void (*isp_wdr_ch_seq) (unsigned long, int);
void (*isp_set_para_ready) (unsigned long, enum ready_flag);
unsigned int (*isp_get_para_ready) (unsigned long);
void (*isp_capture_start) (unsigned long, int);
void (*isp_capture_stop) (unsigned long, int);
void (*isp_irq_enable) (unsigned long, unsigned int);
void (*isp_irq_disable) (unsigned long, unsigned int);
unsigned int (*isp_get_irq_status) (unsigned long, unsigned int);
void (*isp_clr_irq_status) (unsigned long, unsigned int);
void (*isp_debug_output_cfg) (unsigned long, int, int);
int (*isp_int_get_enable) (unsigned long);
void (*isp_set_line_int_num) (unsigned long, unsigned int);
void (*isp_set_rot_of_line_num) (unsigned long, unsigned int);
void (*isp_set_load_addr) (unsigned long, unsigned long);
void (*isp_set_saved_addr) (unsigned long, unsigned long);
void (*isp_set_table_addr) (unsigned long, enum isp_input_tables, unsigned long);
void (*isp_set_statistics_addr) (unsigned long, unsigned long);
void (*isp_channel_enable) (unsigned long, enum isp_channel);
void (*isp_channel_disable) (unsigned long, enum isp_channel);
void (*isp_reg_test) (unsigned long);
void (*isp_print_reg_saved) (unsigned long);
void (*isp_update_table) (unsigned long, unsigned short);
void (*isp_set_output_speed) (unsigned long, enum isp_output_speed);
unsigned int (*isp_get_isp_ver)(unsigned long, unsigned int *, unsigned int *);
};
struct isp_platform_drv {
int platform_id;
struct isp_bsp_fun_array *fun_array;
};
int isp_platform_register(struct isp_platform_drv *isp_drv);
int isp_platform_init(unsigned int platform_id);
struct isp_platform_drv *isp_get_driver(void);
#endif /*_ISP_PLATFORM_DRV_H_*/ |
<filename>code/iaas/model/src/main/java/io/cattle/platform/core/constants/HealthcheckConstants.java
package io.cattle.platform.core.constants;
public class HealthcheckConstants {
public static final String HEALTH_STATE_HEALTHY = "healthy";
public static final String HEALTH_STATE_UPDATING_HEALTHY = "updating-healthy";
public static final String HEALTH_STATE_UNHEALTHY = "unhealthy";
public static final String HEALTH_STATE_UPDATING_UNHEALTHY = "updating-unhealthy";
public static final String HEALTH_STATE_INITIALIZING = "initializing";
}
|
package weixin.business.service;
import weixin.business.entity.WeixinFoodEntity;
import org.jeecgframework.core.common.service.CommonService;
import java.io.Serializable;
import java.util.List;
public interface WeixinFoodServiceI extends CommonService{
public <T> void delete(T entity);
public <T> Serializable save(T entity);
public <T> void saveOrUpdate(T entity);
/**
* 默认按钮-sql增强-新增操作
* @param id
* @return
*/
public boolean doAddSql(WeixinFoodEntity t);
/**
* 默认按钮-sql增强-更新操作
* @param id
* @return
*/
public boolean doUpdateSql(WeixinFoodEntity t);
/**
* 默认按钮-sql增强-删除操作
* @param id
* @return
*/
public boolean doDelSql(WeixinFoodEntity t);
/**
* 促销商品
* @param accountid
* @return
*/
public List getHotList(String accountid);
/**
* 新品推荐
* @param accountid
* @return
*/
public List getNewList(String accountid);
/**
* 热销商品
* @param accountid
* @return
*/
public List getHotSaleList(String accountid);
}
|
package ru.zzz.demo.sber.shs.rest.dto;
import com.fasterxml.jackson.annotation.JsonGetter;
import org.springframework.lang.NonNull;
public class DeviceUnregistrationResponseDto {
private final boolean deviceExistedAndWasUnrigistered;
@NonNull
public static DeviceUnregistrationResponseDto ofReallyUnregistered(Boolean wasUnregistered) {
if (wasUnregistered == null) throw new IllegalArgumentException();
return new DeviceUnregistrationResponseDto(wasUnregistered);
}
private DeviceUnregistrationResponseDto(boolean deviceExistedAndWasUnrigistered) {
this.deviceExistedAndWasUnrigistered = deviceExistedAndWasUnrigistered;
}
@JsonGetter("device-existed-and-was-unregistered")
public String isDeviceExistedAndWasUnrigistered() {
return deviceExistedAndWasUnrigistered ? "true" : "false";
}
}
|
package tree.declarations;
import tree.DefaultTreeNode;
import tree.symbols.TSAtomic;
import tree.symbols.TSConst;
import tree.symbols.TSRestrict;
import tree.symbols.TSVolatile;
public class TTypeQualifier extends DefaultTreeNode {
public TTypeQualifier(TTypeQualifier node) {
super(node);
}
public TTypeQualifier(TSConst qualifier) {
addChild(qualifier);
}
public TTypeQualifier(TSRestrict qualifier) {
addChild(qualifier);
}
public TTypeQualifier(TSVolatile qualifier) {
addChild(qualifier);
}
public TTypeQualifier(TSAtomic qualifier) {
addChild(qualifier);
}
}
|
#!/bin/sh
YO="./parser_test"
binaryoutput="./a.out"
preproc_path="preprocessor.py"
# Set time limit for all operations
ulimit -t 30
globallog=testall.log
rm -f $globallog
error=0
globalerror=0
keep=0
Usage() {
echo "Usage: test.sh [options] [.yo files]"
echo "-k Keep intermediate files"
echo "-h Print this help"
exit 1
}
SignalError() {
if [ $error -eq 0 ] ; then
echo "\033[31m FAILED \033[0m"
error=1
fi
echo " $1"
}
# Compare <outfile> <reffile> <difffile>
# Compares the outfile with reffile. Differences, if any, written to difffile
Compare() {
generatedfiles="$generatedfiles $3"
echo diff -b $1 $2 ">" $3 1>&2
diff -b "$1" "$2" > "$3" 2>&1 || {
SignalError "$1 differs"
echo "FAILED $1 differs from $2" 1>&2
}
}
# Run <args>
# Report the command, run it, and report any errors
Run() {
echo $* 1>&2
eval $* || {
if [[ $5 != *fail* ]]; then
SignalError "$1 failed on $*"
return 1
fi
}
}
CheckPreprocessor() {
error=0
basename=`echo $1 | sed 's/.*\\///
s/.yo//'`
reffile=`echo $1 | sed 's/.yo$//'`
basedir="`echo $1 | sed 's/\/[^\/]*$//'`/."
echo -n "$basename.................."
echo 1>&2
echo "###### Testing $basename" 1>&2
generatedfiles=""
Compare "../test/preprocessor/intermediate/$basename.yo" ${reffile}.out ${basename}.a.diff
if [ $error -eq 0 ] ; then
if [ $keep -eq 0 ] ; then
rm -f $generatedfiles
fi
echo "\033[32m OK \033[0m"
echo "###### SUCCESS" 1>&2
else
echo "###### FAILED" 1>&2
globalerror=$error
fi
}
CheckParser() {
error=0
basename=`echo $1 | sed 's/.*\\///
s/.yo//'`
reffile=`echo $1 | sed 's/.yo$//'`
basedir="`echo $1 | sed 's/\/[^\/]*$//'`/."
echo -n "$basename.................."
echo 1>&2
echo "###### Testing $basename" 1>&2
generatedfiles=""
YO="./parser_test"
generatedfiles="$generatedfiles ${basename}.a.out" &&
Run "$YO" "<" "../test/parser/intermediate/$basename.yo" ">" ${basename}.a.out "2>" ${basename}.a.out &&
Compare ${basename}.a.out ${reffile}.out ${basename}.a.diff
if [ $error -eq 0 ] ; then
if [ $keep -eq 0 ] ; then
rm -f $generatedfiles
fi
echo "\033[32m OK \033[0m"
echo "###### SUCCESS" 1>&2
else
echo "###### FAILED" 1>&2
globalerror=$error
fi
}
Check() {
error=0
basename=`echo $1 | sed 's/.*\\///
s/.yo//'`
reffile=`echo $1 | sed 's/.yo$//'`
basedir="`echo $1 | sed 's/\/[^\/]*$//'`/."
echo -n "$basename..."
echo 1>&2
echo "###### Testing $basename" 1>&2
generatedfiles=""
# old from microc - interpreter
# generatedfiles="$generatedfiles ${basename}.i.out" &&
# Run "$YO" "-i" "<" $1 ">" ${basename}.i.out &&
# Compare ${basename}.i.out ${reffile}.out ${basename}.i.diff
generatedfiles="$generatedfiles ${basename}.c.out" &&
Run "$YO" "-c" $1 ">" ${basename}.c.out &&
Compare ${basename}.c.out ${reffile}.out ${basename}.c.diff
# Report the status and clean up the generated files
if [ $error -eq 0 ] ; then
if [ $keep -eq 0 ] ; then
rm -f $generatedfiles
fi
echo "OK"
echo "###### SUCCESS" 1>&2
else
echo "###### FAILED" 1>&2
globalerror=$error
fi
}
CheckFail() {
error=0
basename=`echo $1 | sed 's/.*\\///
s/.yo//'`
reffile=`echo $1 | sed 's/.yo$//'`
basedir="`echo $1 | sed 's/\/[^\/]*$//'`/."
echo -n "$basename..."
echo 1>&2
echo "###### Testing $basename" 1>&2
generatedfiles=""
# old from microc - interpreter
# generatedfiles="$generatedfiles ${basename}.i.out" &&
# Run "$YO" "-i" "<" $1 ">" ${basename}.i.out &&
# Compare ${basename}.i.out ${reffile}.out ${basename}.i.diff
generatedfiles="$generatedfiles ${basename}.c.out" &&
{
Run "$YO" "-b" $1 "2>" ${basename}.c.out ||
Run "$binaryoutput" ">" ${basename}.b.out
} &&
Compare ${basename}.c.out ${reffile}.out ${basename}.c.diff
# Report the status and clean up the generated files
if [ $error -eq 0 ] ; then
if [ $keep -eq 0 ] ; then
rm -f $generatedfiles
fi
echo "OK"
echo "###### SUCCESS" 1>&2
else
echo "###### FAILED" 1>&2
globalerror=$error
fi
}
CheckSemanticAnalysis() {
error=0
basename=`echo $1 | sed 's/.*\\///
s/.yo//'`
reffile=`echo $1 | sed 's/.yo$//'`
basedir="`echo $1 | sed 's/\/[^\/]*$//'`/."
echo -n "$basename..."
echo 1>&2
echo "###### Testing $basename" 1>&2
generatedfiles=""
YO="./semantic_test"
generatedfiles="$generatedfiles ${basename}.f.cpp ${basename}.f.out yo.prog"
Run "$YO" "<" "../test/semantic/intermediate/$basename.yo" ">" ${basename}.s.out "2>" ${basename}.s.out &&
Compare ${basename}.s.out ${reffile}.out ${basename}.s.diff
if [ $error -eq 0 ] ; then
if [ $keep -eq 0 ] ; then
rm -f $generatedfiles
fi
echo "OK"
echo "###### SUCCESS" 1>&2
else
echo "###### FAILED" 1>&2
globalerror=$error
fi
}
TestTypeReader() {
error=0
basename=`echo $1 | sed 's/.*\\///
s/.yo//'`
reffile=`echo $1 | sed 's/.yo$//'`
basedir="`echo $1 | sed 's/\/[^\/]*$//'`/."
echo -n "$basename..."
echo 1>&2
echo "###### Testing $basename" 1>&2
generatedfiles=""
tmpfiles=""
# old from microc - interpreter
# generatedfiles="$generatedfiles ${basename}.i.out" &&
# Run "$YO" "-i" "<" $1 ">" ${basename}.i.out &&
# Compare ${basename}.i.out ${reffile}.out ${basename}.i.diff
YO="./typereader_test"
generatedfiles="$generatedfiles ${basename}.f.cpp ${basename}.f.out yo.prog"
Run "$YO" "<" "../test/typereader/intermediate/$basename.yo" ">" ${basename}.f.out "2>" ${basename}.f.out &&
#g++ ${basename}.f.cpp libclip.cpp yolib.h -lstdc++ -lopenshot-audio -lopenshot -I/usr/local/include/libopenshot -I/usr/local/include/libopenshot-audio -lconfig++ -lavdevice -lavformat -lavcodec -lavutil -lz `pkg-config --cflags --libs libconfig++ Qt5Gui Qt5Widgets Magick++` -fPIC -std=c++11 -o yo.prog
#g++ -o yo.prog ${basename}.f.cpp yolib.h -std=c++11 &&
#Run "./yo.prog" ">" ${basename}.f.out &&
Compare ${basename}.f.out ${reffile}.out ${basename}.f.diff
#generatedfiles="$generatedfiles ${basename}.f.out" &&
#tmpfiles="$tmpfiles tests/${basename}.lrx_lrxtmp.c a.out" &&
#Run "$YO" "-b" $1 &&
#Run "$binaryoutput" ">" ${basename}.f.out &&
#Compare ${basename}.f.out ${reffile}.out ${basename}.f.diff
#rm -f $tmpfiles
# Report the status and clean up the generated files
if [ $error -eq 0 ] ; then
if [ $keep -eq 0 ] ; then
rm -f $generatedfiles
fi
echo "OK"
echo "###### SUCCESS" 1>&2
else
echo "###### FAILED" 1>&2
globalerror=$error
fi
}
TestRunningProgram() {
error=0
basename=`echo $1 | sed 's/.*\\///
s/.yo//'`
reffile=`echo $1 | sed 's/.yo$//'`
basedir="`echo $1 | sed 's/\/[^\/]*$//'`/."
echo -n "$basename..."
echo 1>&2
echo "###### Testing $basename" 1>&2
generatedfiles=""
tmpfiles=""
# old from microc - interpreter
# generatedfiles="$generatedfiles ${basename}.i.out" &&
# Run "$YO" "-i" "<" $1 ">" ${basename}.i.out &&
# Compare ${basename}.i.out ${reffile}.out ${basename}.i.diff
YO="./generate_test"
generatedfiles="$generatedfiles ${basename}.f.cpp ${basename}.f.out yo.prog"
Run "$YO" "<" "../test/intermediate/$basename.yo" ">" ${basename}.f.cpp &&
g++ ${basename}.f.cpp yolib.h -lstdc++ -lopenshot-audio -lopenshot -I/usr/local/include/libopenshot -I/usr/local/include/libopenshot-audio -lconfig++ -lavdevice -lavformat -lavcodec -lavutil -lz `pkg-config --cflags --libs libconfig++ Qt5Gui Qt5Widgets Magick++` -fPIC -std=c++11 -o yo.prog
#g++ -o yo.prog ${basename}.f.cpp yolib.h -std=c++11 &&
Run "./yo.prog" ">" ${basename}.f.out &&
Compare ${basename}.f.out ${reffile}.out ${basename}.f.diff
#generatedfiles="$generatedfiles ${basename}.f.out" &&
#tmpfiles="$tmpfiles tests/${basename}.lrx_lrxtmp.c a.out" &&
#Run "$YO" "-b" $1 &&
#Run "$binaryoutput" ">" ${basename}.f.out &&
#Compare ${basename}.f.out ${reffile}.out ${basename}.f.diff
#rm -f $tmpfiles
# Report the status and clean up the generated files
if [ $error -eq 0 ] ; then
if [ $keep -eq 0 ] ; then
rm -f $generatedfiles
fi
echo "OK"
echo "###### SUCCESS" 1>&2
else
echo "###### FAILED" 1>&2
globalerror=$error
fi
}
MunanTest() {
error=0
basename=`echo $1 | sed 's/.*\\///
s/.yo//'`
reffile=`echo $1 | sed 's/.yo$//'`
basedir="`echo $1 | sed 's/\/[^\/]*$//'`/."
echo -n "$basename..."
echo 1>&2
echo "###### Testing $basename" 1>&2
generatedfiles=""
tmpfiles=""
YO="./generate_test"
generatedfiles="$generatedfiles ${basename}.f.cpp ${basename}.f.out yo.prog"
echo "\n"
echo "\n"
Run "$YO" "<" "../test/intermediate/$basename.yo" &&
#g++ ${basename}.f.cpp libclip.cpp yolib.h -lstdc++ -lopenshot-audio -lopenshot -I/usr/local/include/libopenshot -I/usr/local/include/libopenshot-audio -lconfig++ -lavdevice -lavformat -lavcodec -lavutil -lz `pkg-config --cflags --libs libconfig++ Qt5Gui Qt5Widgets Magick++` -fPIC -std=c++11 -o yo.prog
#g++ -o yo.prog ${basename}.f.cpp yolib.h -std=c++11 &&
#Run "./yo.prog" ">" ${basename}.f.out &&
#Compare ${basename}.f.out ${reffile}.out ${basename}.f.diff
if [ $error -eq 0 ] ; then
if [ $keep -eq 0 ] ; then
rm -f $generatedfiles
fi
echo "\n"
globalerror=$error
fi
}
while getopts kdpsh c; do
case $c in
k) # Keep intermediate files
keep=1
;;
h) # Help
Usage
;;
esac
done
shift `expr $OPTIND - 1`
if [ $# -ge 1 ]
then
files=$@
else
files="tests/test-*.yo"
fi
for file in $files
do
case $file in
*test-preprocess*)
echo "##### Now Testing Preprocessor #####"
echo "preprocessing....."
python $preproc_path $file
echo "\033[32m OK \033[0m"
CheckPreprocessor $file 2>> $globallog
;;
*test-parser*)
echo "##### Now Testing Parser #####"
echo "preprocessing....."
python $preproc_path $file
echo "\033[32m OK \033[0m"
CheckParser $file 2>> $globallog
;;
*test-semantic*)
echo "##### Now Testing Semantic Analysis #####"
echo "preprocessing....."
python $preproc_path $file
echo "\033[32m OK \033[0m"
CheckSemanticAnalysis $file 2>> $globallog
;;
*test-full*)
echo "##### Now Testing FullStack #####"
echo "preprocessing....."
python $preproc_path $file
echo "\033[32m OK \033[0m"
TestRunningProgram $file 2>> $globallog
;;
*test-munan*)
echo "##### Now Testing Munan #####"
echo "preprocessing....."
python $preproc_path $file
echo "\033[32m OK \033[0m"
MunanTest $file
;;
*test-typereader*)
echo "##### Now Testing Single FullStack #####"
echo "preprocessing....."
python $preproc_path $file
echo "\033[32m OK \033[0m"
TestTypeReader $file 2>> $globallog
;;
*test-fail*)
CheckFail $file 2>> $globallog
;;
*test-*)
Check $file 2>> $globallog
;;
#echo "unknown file type $file"
#globalerror=1
#;;
esac
done
exit $globalerror
|
<gh_stars>10-100
import mock
import requests_mock
from integration_mocks import IntegrationMocks
class GetAccessTokenTest(IntegrationMocks):
# real_http is required for the redirect in integration_mocks to work
@requests_mock.Mocker(real_http=True)
@mock.patch("builtins.print")
def test_get_access_token(self, rm, mock_print):
rm.put(
"https://api.pinterest.com/v3/oauth/access_token/",
json={
"status": "test-status",
"scope": "test-scope",
"access_token": "test-access-token",
"data": {"refresh_token": "<PASSWORD>-refresh-token"},
},
)
rm.get(
"https://api.pinterest.com/v3/users/me/",
json={
"data": {
"full_name": "<NAME>",
"id": "test user id",
"about": "test about",
"profile_url": "test profile url",
"pin_count": "pin count",
}
},
)
from scripts.get_access_token import main # import here to see monkeypatches
with mock.patch("builtins.open") as mock_open:
with mock.patch.dict("os.environ", self.mock_os_environ, clear=True):
mock_open.side_effect = FileNotFoundError # no access_token.json file
with self.mock_redirect():
main() # run get_access_token
# verify expected values printed. see unit tests for values
mock_print.assert_any_call(
"mock_open_new: "
+ "https://www.pinterest.com/oauth/?consumer_id=test-app-id"
+ "&redirect_uri=http://localhost:8085/&response_type=code"
+ "&refreshable=True&state=test-token-hex"
)
mock_print.assert_any_call(
"hashed access token: "
+ "597480d4b62ca612193f19e73fe4cc3ad17f0bf9cfc16a7cbf4b5064131<PASSWORD>"
)
mock_print.assert_any_call(
"hashed refresh token: "
+ "0a9b110d5e553bd98e9965c70a601c15c36805016ba60d54f20f5830c39edcde"
)
|
import HSVTypes from "./HSVTypes";
/*eslint no-unused-expressions: "off"*/
/*eslint no-sequences: "off"*/
export default class HSVTools {
static HSVtoRGB (c: HSVTypes.HSVColor): HSVTypes.RGBColor {
const {h, s, v} = c;
let i, f, p, q, t;
i = Math.floor(h * 6);
f = h * 6 - i;
p = v * (1 - s);
q = v * (1 - f * s);
t = v * (1 - (1 - f) * s);
switch (i % 6) {
case 0: return { r: v, g: t, b: p };
case 1: return { r: q, g: v, b: p };
case 2: return { r: p, g: v, b: t };
case 3: return { r: p, g: q, b: v };
case 4: return { r: t, g: p, b: v };
case 5: return { r: v, g: p, b: q };
default: return { r: 0, g: 0, b: 0 };
}
}
static generateColor (): HSVTypes.RGBColor {
let c = HSVTools.HSVtoRGB({ h: Math.random(), s: 1.0, v: 1.0 });
c.r *= 0.15;
c.g *= 0.15;
c.b *= 0.15;
return c;
}
} |
public class School
{
// School class implementation
}
public class SchoolDto
{
// SchoolDto class implementation
public School ToModel()
{
// Implement the conversion logic
School school = new School();
// Copy relevant properties from SchoolDto to School
school.Property1 = this.Property1;
school.Property2 = this.Property2;
// Continue for other properties
return school;
}
}
public static class SchoolConverter
{
public static IList<SchoolDto> ToList(IList<School> list)
{
IList<SchoolDto> dto_list = new List<SchoolDto>();
foreach (School item in list)
{
SchoolDto dto = new SchoolDto();
// Copy relevant properties from School to SchoolDto
dto.Property1 = item.Property1;
dto.Property2 = item.Property2;
// Continue for other properties
dto_list.Add(dto);
}
return dto_list;
}
} |
#!/bin/bash
fw_depends mysql php7 nginx composer
sed -i 's|database_host: .*|database_host: '"${DBHOST}"'|g' app/config/parameters.yml
sed -i 's|root .*/FrameworkBenchmarks/php-symfony2| root '"${TROOT}"'|g' deploy/nginx.conf
sed -i 's|/usr/local/nginx/|'"${IROOT}"'/nginx/|g' deploy/nginx.conf
php bin/console cache:clear --env=prod --no-debug --no-warmup
php bin/console cache:warmup --env=prod --no-debug
php-fpm --fpm-config $FWROOT/toolset/setup/linux/languages/php/php-fpm.conf -g $TROOT/deploy/php-fpm.pid
nginx -c $TROOT/deploy/nginx.conf
|
/**
* @file LBInstallation.h
*
* @author <NAME>
* @copyright (c) 2013 StrongLoop. All rights reserved.
*/
#import "LBPersistedModel.h"
#import "LBRESTAdapter.h"
@class LBInstallation;
@class LBInstallationRepository;
/**
* LBInstallation represents the installation of a given app on the device. It
* connects the device token with application/user/timeZone/subscriptions for
* the server to find devices of interest for push notifications.
*/
@interface LBInstallation : LBPersistedModel
/**
* The app id received from LoopBack application signup.
* It's usaully configurd in the Settings.plist file
*/
@property (nonatomic, copy) NSString *appId;
/**
* The application version, default to @"1.0.0"
*/
@property (nonatomic, copy) NSString *appVersion;
/**
* The id for the signed in user for the installation
*/
@property (nonatomic, copy) NSString *userId;
/**
* It's always @"ios"
*/
@property (nonatomic, readonly, copy) NSString *deviceType;
/**
* The device token in hex string format
*/
@property (nonatomic, copy) NSString *deviceToken;
/**
* The current badge
*/
@property (nonatomic, copy) NSNumber *badge;
/**
* An array of topic names that the device subscribes to
*/
@property (nonatomic, copy) NSArray *subscriptions;
/**
* The time zone for the server side to decide a good time for push
*/
@property (nonatomic, readonly, copy) NSString *timeZone;
/**
* Status of the installation
*/
@property (nonatomic, copy) NSString *status;
/**
* Convert the device token from NSData to NSString
*
* @param token The device token in NSData type
* @return The device token in NSString type
*/
+ (NSString *)deviceTokenWithData: (NSData *) token;
/**
* Register the device against LoopBack server
* @param adapter The REST adapter
* @param deviceToken The device token
* @param registrationId The registration id
* @param appId The application id
* @param appVersion The application version
* @param userId The user id
* @param badge The badge
* @param subscriptions An array of string values representing subscriptions to push events
* @param success The success callback block for device registration
* @param failure The failure callback block for device registration
*/
+ (void)registerDeviceWithAdapter: (LBRESTAdapter *) adapter
deviceToken: (NSData *) deviceToken
registrationId: (id) registrationId
appId: (NSString *) appId
appVersion: (NSString *) appVersion
userId: (NSString *) userId
badge: (NSNumber *) badge
subscriptions: (NSArray *) subscriptions
success: (SLSuccessBlock) success
failure: (SLFailureBlock) failure;
@end
/**
* Custom ModelRepository subclass for LBInstallation
*/
@interface LBInstallationRepository : LBPersistedModelRepository
+ (instancetype)repository;
@end
|
import numpy as np
# input data
X = np.array([[0.1, 0.9], [0.25, 0.12]])
# output data
y = np.array([1, 0])
# architecture of the network
model = tf.keras.Sequential([
tf.keras.layers.Dense(4, input_shape=(2,)),
tf.keras.layers.Dense(4, activation='sigmoid'),
tf.keras.layers.Dense(1, activation='sigmoid')
])
# compile and fit the model
model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
model.fit(X, y, epochs=100)
# evaluate the model
test_loss, test_acc = model.evaluate(X, y)
print('Test accuracy:', test_acc) |
import { sbClient } from "~/lib/supabase/index";
import {
Tables,
CustomFunction,
Link,
Page,
PageWithMetadata,
Theme,
User,
} from "@linkto/core";
import {
ChangePasswordDto,
CreateLinkDto,
ReorderLinkDto,
SignUpDto,
UpdateLinkDto,
UpdatePageDto,
UpdateUserDto,
} from "~/types";
/****************************************************
* STORAGE OPERATIONS *
****************************************************/
export const uploadAvatar = async (name: string, file: File) => {
const { data, error } = await sbClient.storage
.from("avatars")
.upload(name, file);
if (!data || error) {
throw new Error("Avatar upload failed!");
}
return data.Key;
};
export const removeAvatar = async (name: string) => {
const { data, error } = await sbClient.storage.from("avatars").remove([name]);
if (!data || error) {
throw new Error("Avatar could not be removed!");
}
return true;
};
/****************************************************
* USER CRUD OPERATIONS *
****************************************************/
export const doesUsernameExist = async (username: string) => {
const { data, error } = await sbClient
.from<User>(Tables.USERS)
.select("*")
.eq("username", username)
.single();
if (!data || error) {
return false;
}
return true;
};
export const getUserByUsername = async (username: string) => {
const { data, error } = await sbClient
.from<User>(Tables.USERS)
.select("*")
.eq("username", username)
.single();
if (!data || error) {
throw new Error("User record not found");
}
return data;
};
export const getUserById = async (userId: string) => {
const { data, error } = await sbClient
.from<User>(Tables.USERS)
.select("*")
.eq("id", userId)
.single();
if (!data || error) {
throw new Error("User record not found");
}
return data;
};
export const createUserWithEmailAndPassword = async ({
email,
password,
username,
}: SignUpDto) => {
/**
* We need to make these 2 separate calls as supabase doesn't
* check before auth if any credentials already exist
*/
// START CHECKS
const { data: usernameExists } = await sbClient
.from<User>(Tables.USERS)
.select("*")
.eq("username", username)
.single();
if (usernameExists) {
throw new Error("Username already in use!");
}
const { data: emailExists } = await sbClient
.from<User>(Tables.USERS)
.select("*")
.eq("email", email)
.single();
if (emailExists) {
throw new Error("Email address already in use!");
}
// END CHECKS
const { user, error } = await sbClient.auth.signUp(
{ email, password },
{
// additional metadata needed to auto create a new user entry
// on auth ( new.raw_user_meta_data->>'user_name' )
data: {
user_name: username,
avatar_url:
"https://qatejhwdylvqgwcegrjn.supabase.in/storage/v1/object/public/avatars/default-avatar.jpg",
},
}
);
if (error) {
throw new Error(error.message);
}
return user;
};
export const updateUser = async (userDto: UpdateUserDto, userId: string) => {
const { data, error } = await sbClient
.from<User>(Tables.USERS)
.update(userDto)
.match({ id: userId })
.single();
if (error) {
throw new Error(error.message);
}
return data;
};
export const updateAuthEmail = async (email: string) => {
const { user, error } = await sbClient.auth.update({ email });
if (error) {
throw new Error(error.message);
}
return user;
};
export const changePassword = async ({
old_password,
new_password,
}: ChangePasswordDto) => {
const { data, error } = await sbClient.rpc(CustomFunction.CHANGE_PASSWORD, {
current_plain_password: <PASSWORD>,
new_plain_password: <PASSWORD>,
});
if (error) {
throw new Error(error.message);
}
return data;
};
export const _UNSAFE_deleteAccount = async () => {
const { data, error } = await sbClient.rpc(CustomFunction.DELETE_ACCOUNT);
if (error) {
throw new Error(error.message);
}
return data;
};
/****************************************************
* LINK CRUD OPERATIONS *
****************************************************/
export const createNewLink = async (newLink: CreateLinkDto) => {
const { data, error } = await sbClient
.from<Link>(Tables.LINKS)
.upsert(newLink)
.single();
if (error) {
throw new Error(error.message);
}
return data;
};
export const getLinksByUserId = async (id: string) => {
const { data, error } = await sbClient
.from<Link>(Tables.LINKS)
.select("*")
.eq("user_id", id)
.order("display_order", { ascending: true });
if (error) {
throw new Error(error.message);
}
if (!data) {
throw new Error("No link record found");
}
return data;
};
export const updateLink = async (linkDto: UpdateLinkDto, linkId: string) => {
const { data, error } = await sbClient
.from<Link>(Tables.LINKS)
.update(linkDto)
.match({ id: linkId })
.single();
if (error) {
throw new Error(error.message);
}
return data;
};
export const reorderLinks = async (listDto: ReorderLinkDto[]) => {
const { data, error } = await sbClient.rpc(CustomFunction.REORDER, {
payload: listDto,
});
if (error) {
throw new Error(error.message);
}
return data;
};
export const deleteLink = async (linkId: string) => {
const { data, error } = await sbClient
.from<Link>(Tables.LINKS)
.delete()
.eq("id", linkId);
if (error) {
throw new Error(error.message);
}
return data;
};
/****************************************************
* PAGE CRUD OPERATIONS *
****************************************************/
export const getPageWithMetadata = async (
userId: string
): Promise<PageWithMetadata> => {
const { data, error } = await sbClient
.from(Tables.PAGES)
.select(
`
id,
title,
seo_title,
seo_description,
nsfw_content,
show_branding,
social_link_position,
integrations,
user:users(
id,
username,
email,
avatar_url,
full_name,
biography,
status,
page_link,
is_banned,
onboarding_process
),
theme:themes(
id,
style,
name,
kind,
state
)
`
)
.eq("user_id", userId)
.single();
if (error) {
throw new Error(error.message);
}
return data;
};
export const updatePage = async (pageDto: UpdatePageDto, userId: string) => {
const { data, error } = await sbClient
.from<Page>(Tables.PAGES)
.update(pageDto)
.match({ user_id: userId })
.single();
if (error) {
throw new Error(error.message);
}
return data;
};
/****************************************************
* THEME CRUD OPERATIONS *
****************************************************/
export const getThemes = async () => {
const { data, error } = await sbClient.from<Theme>(Tables.THEMES).select("*");
if (error) {
throw new Error(error.message);
}
if (!data) {
throw new Error("No theme record found");
}
return data;
};
|
import React from 'react';
import { radios } from '@storybook/addon-knobs';
import { storiesOf } from '@storybook/react';
import { Row } from 'antd';
import Logo from './logo';
storiesOf('Components/Logos', module)
.add('default', () => {
return (
<div>
<Row>
<Logo name="bepswap" type="normal" />
</Row>
<Row>
<Logo name="bepswap" type="long" />
</Row>
<Row>
<Logo name="bepswap" type="large" />
</Row>
<Row>
<Logo name="thorchain" type="long" />
</Row>
<Row>
<Logo name="binanceDex" type="long" />
</Row>
</div>
);
})
.add('properties', () => {
const nameOptions = ['bepswap', 'thorchain'];
const typeOptions = ['normal', 'long', 'large'];
const name = radios('name', nameOptions, 'bepswap');
const type = radios('type', typeOptions, 'long');
return (
<Row>
<Logo name={name} type={type} />
</Row>
);
});
|
<reponame>hylophile/frontend<gh_stars>0
import PropTypes from 'prop-types';
import { connect } from 'react-redux';
import React, { Component } from 'react';
import { bindActionCreators } from 'redux';
import { Form as FinalForm } from 'react-final-form';
import { Button, Form } from 'react-bootstrap';
import {
DashboardLink as DashLink,
DashboardLayout as Layout,
RoleUsers
} from 'components/Dashboard';
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
import { actions as rolesActions } from 'reducers/roles';
import { actions as dashActions } from 'reducers/dashboard';
export class RoleDelete extends Component {
static propTypes = {
actions: PropTypes.object.isRequired,
name: PropTypes.string.isRequired,
layoutOptions: PropTypes.object.isRequired,
roleId: PropTypes.number.isRequired
};
constructor(props) {
super(props);
this.handleSubmit = this.handleSubmit.bind(this);
}
handleSubmit() {
const { actions, roleId, name } = this.props;
actions.deleteRole({ roleId, name });
actions.selectDashboard({ name: 'Roles' });
}
render() {
const { name, layoutOptions, roleId } = this.props;
return (
<Layout
pageTitle="Delete Role - Dashboard"
header={`Roles > Delete Role > ${name}`}
options={layoutOptions}
>
<FontAwesomeIcon icon="chevron-left" />
<DashLink to="#roles" name="Roles">
<span>Back</span>
</DashLink>
<FinalForm
onSubmit={this.handleSubmit}
render={({ handleSubmit, submitting }) => (
<Form noValidate onSubmit={handleSubmit}>
<Form.Group>
<Form.Label>Delete</Form.Label>
<h3>Delete Role: {name}?</h3>
</Form.Group>
<Button
className="button-animation"
variant="primary"
type="submit"
disabled={submitting}
>
<span>Delete</span>
</Button>
</Form>
)}
/>
<br />
<RoleUsers
layoutOptions={{ header: false, title: true, border: 'danger' }}
roleId={Number(roleId)}
name={name}
/>
</Layout>
);
}
}
const mapDispatchToProps = dispatch => ({
actions: bindActionCreators(
{
...rolesActions,
...dashActions
},
dispatch
)
});
export default connect(null, mapDispatchToProps)(RoleDelete);
|
import * as utils from "../core/utils";
const SeeAlso = function (placeHolder, translator, dispatch, { tools, selectedTool, onClick }) {
const templateHtml = `
<div class="see-also-block">
<h2 class="heading-2 see-also-heading" data-text="other_tools"></h2>
<div class="other-tools-container">
<div class="other-tools-item">
<a rel="noopener">
<img class="image"/>
<span class="title"></span>
</a>
</div>
</div>
</div>
`;
//require("./see-also.html");
const template = d3.create("div")
template.html(templateHtml);
const itemTemplate = template.select(".other-tools-item");
for (let tool of tools) {
itemTemplate.clone(true)
.datum(tool)
.attr("hidden", (tool.id === selectedTool || tool.hideThumbnail) ? true : null)
.raise()
.call(fillToolItem);
}
itemTemplate.remove();
for (const elem of Array.from(template.node().children)) {
placeHolder.append(function () { return elem; });
}
translate();
dispatch.on("translate.seeAlso", () => {
translate();
});
dispatch.on("toolChanged.seeAlso", d => {
const tool = tools.filter(({id}) => id === d)[0];
toolChanged(tool);
})
function translate() {
placeHolder.select(".see-also-heading").each(utils.translateNode(translator));
placeHolder.selectAll(".other-tools-item").select(".title")
.text(d => translator(d.title || d.id));
}
function toolChanged(tool) {
placeHolder.selectAll(".other-tools-item")
.attr("hidden", _d => (_d.id === tool.id || _d.hideThumbnail) ? true : null)
}
function getLink(tool) {
return `${window.location.pathname}#$chart-type=${tool}`;
}
function fillToolItem(item) {
const tool = item.datum();
const a = item.select("a");
if (tool.url) {
a.attr("href", tool.url)
} else {
a.attr("href", getLink(tool.id))
.on("click", d => {
onClick(d);
});
}
a.select(".image").attr("src", "." + tool.image);
}
}
export default SeeAlso; |
class TradingOrder:
def __init__(self, pegPriceMax, pegPriceDeviation, cancelDuration, timestamp, orderID, stealth, triggerOrder, triggerPrice, triggerOriginalPrice, triggerOrderType, triggerTrailingStopDeviation):
self.pegPriceMax = pegPriceMax
self.pegPriceDeviation = pegPriceDeviation
self.cancelDuration = cancelDuration
self.timestamp = timestamp
self.orderID = orderID
self.stealth = stealth
self.triggerOrder = triggerOrder
self.triggered = ''
self.triggerPrice = triggerPrice
self.triggerOriginalPrice = triggerOriginalPrice
self.triggerOrderType = triggerOrderType
self.triggerTrailingStopDeviation = triggerTrailingStopDeviation
def cancel_order(self, current_timestamp):
if self.cancelDuration > 0 and current_timestamp - self.timestamp >= self.cancelDuration:
self.triggered = 'Cancelled' |
<gh_stars>0
package virtual_robot.controller;
import java.util.concurrent.TimeUnit;
import virtual_robot.controller.LinearOpMode;
import virtual_robot.hardware.HardwareMap;
import virtual_robot.hardware.Telemetry;
public class OpMode extends LinearOpMode {
// internal time tracking
private long _startTime = 0; // in nanoseconds
private long _suspendedTime = 0; // in nanoseconds
private long _prevTick = 0; // in nanoseconds
public OpMode(){}
// implementations of OpMode override these
public void init() {}
public void loop() {}
public void stop() {}
public void runOpMode()
{
init();
telemetry.update();
waitForStart();
while (opModeIsActive()) {
loop();
telemetry.update();
}
stop();
}
public void runOpMode(BooleanObject bSuspend)
{
init();
telemetry.update();
waitForStart();
resetStartTime();
while (opModeIsActive()) {
long tick = System.nanoTime();
long deltaT = tick-_prevTick;
_prevTick = tick;
if (bSuspend.value) {
_suspendedTime += deltaT;
}
else {
loop();
telemetry.update();
}
}
stop();
}
/**
* Get the number of seconds this op mode has been running
* <p>
* This method has sub millisecond accuracy.
* @return number of seconds this op mode has been running
*/
public double getRuntime() {
final double NANOSECONDS_PER_SECOND = TimeUnit.SECONDS.toNanos(1);
double elapsedTime = (System.nanoTime()-_startTime-_suspendedTime) / NANOSECONDS_PER_SECOND;
return elapsedTime;
}
/**
* Reset the start time to zero.
*/
public void resetStartTime() {
_startTime = _prevTick = System.nanoTime();
_suspendedTime = 0;
}
}
|
/**
* @author ooooo
* @date 2020/9/12 10:44
*/
#ifndef CPP_0637__SOLUTION3_H_
#define CPP_0637__SOLUTION3_H_
#include "TreeNode.h"
class Solution3 {
public:
vector<double> averageOfLevels(TreeNode *root) {
vector<double> ans;
if (!root) return ans;
queue<TreeNode *> q;
q.push(root);
while (!q.empty()) {
double sum = 0, len = q.size();
for (int i = 0; i < len; ++i) {
auto node = q.front();
sum += node->val;
q.pop();
if (node->left) q.push(node->left);
if (node->right) q.push(node->right);
}
ans.push_back(sum / len);
}
return ans;
}
};
#endif //CPP_0637__SOLUTION3_H_
|
<filename>src/main/java/br/com/alura/carteira/factory/ConnectionFactory.java<gh_stars>0
package br.com.alura.carteira.factory;
import java.sql.Connection;
import java.sql.DriverManager;
public class ConnectionFactory {
public Connection getConnection() {
try {
String url = "jdbc:mysql://localhost:3306/carteira";
String usuario = "root";
String senha = "<PASSWORD>";
Class.forName("com.mysql.cj.jdbc.Driver");
Connection conexao = DriverManager.getConnection(url, usuario, senha);
return conexao;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
|
<filename>src/ordt/output/systemverilog/common/SystemVerilogModule.java
/*
* Copyright (c) 2016 Juniper Networks, Inc. All rights reserved.
*/
package ordt.output.systemverilog.common;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import ordt.output.common.MsgUtils;
import ordt.output.common.OutputWriterIntf;
import ordt.output.systemverilog.common.io.SystemVerilogIOElement;
import ordt.output.systemverilog.common.io.SystemVerilogIOSignalList;
import ordt.output.systemverilog.common.io.SystemVerilogIOSignalSet;
/** system verilog module generation class
*
* */
public class SystemVerilogModule {
protected OutputWriterIntf writer;
protected String name; // module name
private Integer terminalInsideLocs; // ORed value of (binary) locations that terminate at top level of this module
protected boolean useInterfaces = false; // will interfaces be used in module io
protected List<SystemVerilogParameter> parameterList = new ArrayList<SystemVerilogParameter>(); // list of parameters for this module
protected List<SystemVerilogInstance> instanceList = new ArrayList<SystemVerilogInstance>(); // list of child instances
protected HashMap<String, Integer> childModuleCounts = new HashMap<String, Integer>(); // maintain count of instances by module name
protected List<SystemVerilogIOSignalList> ioList = new ArrayList<SystemVerilogIOSignalList>(); // list of IO lists in this module
protected HashMap<Integer, SystemVerilogIOSignalList> ioHash = new HashMap<Integer, SystemVerilogIOSignalList>(); // set of writable IO lists in this module
protected String defaultClkName; // default clock name for this module
protected HashSet<String> otherClocks; // TODO - need to define this
protected SystemVerilogSignalList wireDefList; // list of wires
protected SystemVerilogSignalList regDefList; // list of reg definitions
protected List<String> wireAssignList = new ArrayList<String>(); // list of wire assign statements
protected SystemVerilogRegisters registers; // set of register info for module
protected HashSet<String> definedSignals = new HashSet<String>(); // set of all user defined reg/wire names for this module (to check for duplicates/resolve as valid)
protected List<String> statements = new ArrayList<String>(); // list of free form verilog statements
protected boolean showDuplicateSignalErrors = true;
protected SystemVerilogCoverGroups coverGroups; // set of cover group info for module
protected boolean inhibitCoverageOutput = false; // inhibit generation of coverage statements within current module (true if an external bind module will be generated)
static boolean isLegacyVerilog = false;
/** create a module
* @param writer - OutputWriterIntf to be used for output generation
* @param insideLocs - ORed Integer of locations in top level in this module (not including children)
* @param defaultClkName - default clock name used for generated registers
*/
public SystemVerilogModule(OutputWriterIntf writer, int insideLocs, String defaultClkName, String coverageResetName, boolean useAsyncResets) {
this.writer = writer; // save reference to calling writer
setTerminalInsideLocs(insideLocs); // locations inside this module
this.defaultClkName = defaultClkName;
registers = new SystemVerilogRegisters(writer, defaultClkName, useAsyncResets);
wireDefList = new SystemVerilogSignalList();
regDefList = new SystemVerilogSignalList();
coverGroups = new SystemVerilogCoverGroups(writer, defaultClkName, coverageResetName); // TODO - need to change cover reset if separate logic reset is being used
}
// ------------------- get/set -----------------------
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public static boolean isLegacyVerilog() {
return isLegacyVerilog;
}
public static void setLegacyVerilog(boolean isLegacyVerilog) {
SystemVerilogModule.isLegacyVerilog = isLegacyVerilog;
}
public void setShowDuplicateSignalErrors(boolean showDuplicateSignalErrors) {
this.showDuplicateSignalErrors = showDuplicateSignalErrors;
}
public boolean useInterfaces() {
return useInterfaces;
}
/** return encoded integer of locations that terminate at top level of current module */
protected Integer getTerminalInsideLocs() {
return terminalInsideLocs;
}
/** return encoded integer of all locations in this module's children */
protected Integer getChildLocs() {
Integer childLocs = 0;
for(SystemVerilogInstance inst: instanceList) {
//System.out.println("SystemVerilogModule getChildLocs: child=" + inst.getName() + " of " + inst.getMod().getName() /*+ ", insideLocs=" + inst.getMod().getInsideLocs() */);
childLocs = childLocs | inst.getMod().getInsideLocs();
}
return childLocs;
}
/** return encoded integer of all locations inside this module and its children */
public Integer getInsideLocs() {
Integer myInsideLocs = terminalInsideLocs | getChildLocs();
//if (terminalInsideLocs==0) System.out.println("SystemVerilogModule getInsideLocs: name=" + getName() + ", insideLocs=" + myInsideLocs + ", terminalInsideLocs=" + terminalInsideLocs + ", children=" + instanceList.size());
return myInsideLocs;
}
protected void setTerminalInsideLocs(Integer terminalInsideLocs) {
this.terminalInsideLocs = terminalInsideLocs;
//System.out.println("SystemVerilogModule setTerminalInsideLocs: name=" + getName() + ", insideLocs=" + terminalInsideLocs + ", children=" + instanceList.size());
}
/** return encoded integer of all locations outside this module and its children */
public Integer getOutsideLocs() {
return SystemVerilogLocationMap.notLocations(getInsideLocs());
}
public void setUseInterfaces(boolean useInterfaces) {
this.useInterfaces = useInterfaces;
}
// ------------------- wire/reg assign methods -----------------------
/** add a wire assign statement */
public void addWireAssign(String assignString) {
wireAssignList.add(assignString);
}
/** add a list of wire assign statements */
public void addWireAssigns(List<String> assignList) {
wireAssignList.addAll(assignList);
}
/** return the list of wire assigns */
public List<String> getWireAssignList() {
return wireAssignList;
}
/** add a combinatorial reg assign */
public void addCombinAssign(String groupName, String assign) {
registers.get(groupName).addCombinAssign(assign);
}
/** add a list of combinatorial reg assigns */
public void addCombinAssign(String groupName, List<String> assignList) {
registers.get(groupName).addCombinAssign(assignList);
}
/** add a combinatorial reg assign with specified precedence */
public void addPrecCombinAssign(String groupName, boolean hiPrecedence, String assign) {
registers.get(groupName).addPrecCombinAssign(hiPrecedence, assign);
}
/** add a sequential reg assign */
public void addRegAssign(String groupName, String assign) {
registers.get(groupName).addRegAssign(assign);
}
public SystemVerilogRegisters getRegisters() {
return registers;
}
/** add a reset to this modules reg group */
public void addReset(String resetName, boolean activeLow) {
registers.addReset(resetName, activeLow);
}
/** add a reset assign to this modules reg group */
public void addResetAssign(String groupName, String resetName, String assign) {
registers.get(groupName).addResetAssign(resetName, assign);
}
/** add a wire define */
public void addVectorWire(String name, int idx, Integer width) {
if (addDefinedSignal(name)) wireDefList.addVector(name, idx, width);
}
/** add a scalar wire define */
public void addScalarWire(String name) {
if (addDefinedSignal(name)) wireDefList.addScalar(name);
}
/** add a list of signals to the wire def list - unroll the loop for uniqueness check */
public void addWireDefs(List<SystemVerilogSignal> wireList) {
for (SystemVerilogSignal sig : wireList) addVectorWire(sig.getName(), sig.getLowIndex(), sig.getSize());
}
/** return the list of defined wires */
public SystemVerilogSignalList getWireDefList() {
return wireDefList;
}
/** add a reg define */
public void addVectorReg(String name, int idx, Integer width) {
if (addDefinedSignal(name)) regDefList.addVector(name, idx, width);
}
/** add a scalar reg define */
public void addScalarReg(String name) {
if (addDefinedSignal(name)) regDefList.addScalar(name);
}
public void addRegDefs(SystemVerilogSignalList regList) {
addRegDefs(regList.getSignalList());
}
/** add a list of signals to the reg def list - unroll the loop for uniqueness check */
public void addRegDefs(List<SystemVerilogSignal> regList) {
for (SystemVerilogSignal sig : regList) addVectorReg(sig.getName(), sig.getLowIndex(), sig.getSize());
}
/** return the list of defined regs */
public SystemVerilogSignalList getRegDefList() {
return regDefList;
}
/** add a signal to list and check for uniqueness
* returns true on success */
public boolean addDefinedSignal(String name) {
if (definedSignals.contains(name)) {
if (showDuplicateSignalErrors) MsgUtils.errorMessage("Duplicate SystemVerilog signal " + name + " detected (possibly due to a repeated instance name)");
//if (name.startsWith("leaf_dec")) System.out.println("SystemVerilogModule addDefinedSignal: not adding " + name + " to module " + getName() + " signal list");
}
else {
definedSignals.add(name);
/* if (name.startsWith("leaf_dec"))*/ //System.out.println("SystemVerilogModule addDefinedSignal: adding " + name + " to module " + getName() + " signal list");
return true;
}
//System.out.println("SystemVerilogModule addDefinedSignal: adding " + name + " to module signal list");
return false;
}
/** return true if specified signal name is in the definedSignal set */
public boolean hasDefinedSignal(String name) {
return definedSignals.contains(name);
}
public SystemVerilogCoverGroups getCoverGroups() {
return coverGroups;
}
public void setCoverGroups(SystemVerilogCoverGroups coverGroups) {
this.coverGroups = coverGroups;
}
public void setInhibitCoverageOutput(boolean inhibitCoverageOutput) {
this.inhibitCoverageOutput = inhibitCoverageOutput;
}
/** create a coverpoint and add it to specified covergroup in this module
* @param group - name of covergroup
* @param name - name of new coverpoint
* @param signal - signal to be sampled
*/
public void addCoverPoint(String group, String name, String signal, int size, String condition) {
coverGroups.addCoverPoint(group, name, signal, size, condition);
}
// ------------------- parameter classes/methods -----------------------
// nested parameter class
private class SystemVerilogParameter {
private String name;
private String defaultValue;
private SystemVerilogParameter(String name, String defaultValue) {
super();
this.name = name;
this.defaultValue = defaultValue;
}
/** return define string for this parameter */
public String toString() {
String defaultStr = (defaultValue != null)? " = " + defaultValue : "";
return "parameter " + name + defaultStr + ";";
}
/** return the name of this parameter */
public String getName() {
return name;
}
}
/** add a parameter to this module
*
* @param name - parameter name
* @param defaultValue - default value of this parameter or null if none
*/
public void addParameter(String name, String defaultValue) {
parameterList.add(new SystemVerilogParameter(name, defaultValue));
}
protected List<SystemVerilogParameter> getParameterList() {
return parameterList;
}
/** return parameter instance string for this module (assumes parms are passed up to parent level) */
private String getParameterInstanceString() {
String retStr = (!parameterList.isEmpty())? "#(" : "";
Iterator<SystemVerilogParameter> iter = parameterList.iterator();
while(iter.hasNext()) {
String parmName = iter.next().getName();
String suffix = iter.hasNext()? ", " : ") ";
retStr += parmName + suffix;
}
return retStr;
}
/** inherit unique parameters from this module's children */
public void inheritChildParameters() {
HashSet<String> uniqueParms = new HashSet<String>();
// add unique child parameters to this module
for (SystemVerilogInstance inst: instanceList) {
for (SystemVerilogParameter parm: inst.getMod().getParameterList()) {
if (!uniqueParms.contains(parm.getName())) {
uniqueParms.add(parm.getName()); // save parameter in unique list
parameterList.add(parm); // add parm to the list
}
}
}
}
// ------------------- IO methods -----------------------
/** add an IO list to be used by this module
*
* @param sigList - list to be added
* @param remoteLocation - optional location, created signals to/from this loc will be added to this list
*/
public void useIOList(SystemVerilogIOSignalList sigList, Integer remoteLocation) {
//System.out.println("SystemVerilogModule useIOList: adding io siglist with " + sigList.size());
ioList.add(sigList);
if (remoteLocation != null) ioHash.put(remoteLocation, sigList);
}
/** return a single combined IO List for the module */
public SystemVerilogIOSignalList getFullIOSignalList() {
SystemVerilogIOSignalList retList = new SystemVerilogIOSignalList("full");
// add io lists
for (SystemVerilogIOSignalList list : ioList)
retList.addList(list);
return retList;
}
/** return inputs for this module (signal sets are included) */
public List<SystemVerilogIOElement> getInputList() {
SystemVerilogIOSignalList fullList = getFullIOSignalList(); // start with the full list
return useInterfaces ? fullList.getDescendentIOElementList(null, getInsideLocs(), false) :
fullList.getIOElementList(getOutsideLocs(), getInsideLocs());
}
/** return inputs for this module (signal sets are not included) */
public List<SystemVerilogIOElement> getOutputList() {
SystemVerilogIOSignalList fullList = getFullIOSignalList(); // start with the full list
return useInterfaces ? fullList.getDescendentIOElementList(getInsideLocs(), null, true) :
fullList.getIOElementList(getInsideLocs(), getOutsideLocs());
}
/** return inputs for this module */
public List<SystemVerilogIOElement> getInputOutputList() {
List<SystemVerilogIOElement> retList = new ArrayList<SystemVerilogIOElement>();
retList.addAll(getInputList());
retList.addAll(getOutputList());
return retList;
}
/** return a list of non-interface signals in this module matching the from/to constraint
* @param from location
* @param to location
* @return - list of SystemVerilogSignals
*/
public List<SystemVerilogSignal> getSignalList(Integer from, Integer to) {
SystemVerilogIOSignalList fullList = getFullIOSignalList(); // start with the full list
return fullList.getSignalList(from, to);
}
/** return a list of strings defining this module's IO (systemverilog format) */
public List<String> getIODefStrList() {
List<String> outList = new ArrayList<String>();
List<SystemVerilogIOElement> inputList = getInputList();
List<SystemVerilogIOElement> outputList = getOutputList();
Boolean hasOutputs = (outputList.size() > 0);
outList.add("(");
// generate input def list
Iterator<SystemVerilogIOElement> it = inputList.iterator();
while (it.hasNext()) {
SystemVerilogIOElement elem = it.next();
String suffix = (it.hasNext() || hasOutputs) ? "," : " );";
outList.add(" " + elem.getIODefString(true, "input ") + suffix);
}
// generate output def list
outList.add("");
it = outputList.iterator();
while (it.hasNext()) {
SystemVerilogIOElement elem = it.next();
String suffix = (it.hasNext()) ? "," : " );";
outList.add(" " + elem.getIODefString(true, "output ") + suffix);
}
return outList;
}
/** return a list of strings listing this module's IO (verilog compatible module IO format) */
public List<String> getLegacyIOStrList() {
List<String> outList = new ArrayList<String>();
List<SystemVerilogIOElement> inputList = getInputList();
List<SystemVerilogIOElement> outputList = getOutputList();
Boolean hasOutputs = (outputList.size() > 0);
outList.add("(");
// generate input sig list
Iterator<SystemVerilogIOElement> it = inputList.iterator();
while (it.hasNext()) {
SystemVerilogIOElement elem = it.next();
String suffix = (it.hasNext() || hasOutputs) ? "," : " );";
if (!elem.isSignalSet()) outList.add(" " + elem.getFullName() + suffix);
}
// generate output sig list
outList.add("");
it = outputList.iterator();
while (it.hasNext()) {
SystemVerilogIOElement elem = it.next();
String suffix = (it.hasNext()) ? "," : " );";
if (!elem.isSignalSet()) outList.add(" " + elem.getFullName() + suffix);
}
return outList;
}
/** return a list of strings defining this module's IO (verilog compatible module IO format) */
public List<String> getLegacyIODefStrList() {
List<String> outList = new ArrayList<String>();
List<SystemVerilogIOElement> inputList = getInputList();
List<SystemVerilogIOElement> outputList = getOutputList();
outList.add(" //------- inputs");
// generate input def list
Iterator<SystemVerilogIOElement> it = inputList.iterator();
while (it.hasNext()) {
SystemVerilogIOElement elem = it.next();
if (!elem.isSignalSet()) outList.add(" " + elem.getIODefString(true, "input ") + ";");
}
outList.add("");
// generate output def list
outList.add(" //------- outputs");
it = outputList.iterator();
while (it.hasNext()) {
SystemVerilogIOElement elem = it.next();
if (!elem.isSignalSet()) outList.add(" " + elem.getIODefString(true, "output ") + ";");
}
outList.add("");
return outList;
}
// simple non-hierarchical IO adds
/** add a new simple scalar IO signal to the specified external location list */
public void addSimpleScalarTo(Integer to, String name) {
this.addSimpleVectorTo(to, name, 0, 1);
}
/** add a new simple scalar IO signal from the specified external location list */
public void addSimpleScalarFrom(Integer from, String name) {
this.addSimpleVectorFrom(from, name, 0, 1);
}
/** add a new simple vector IO signal to the specified external location list */
public void addSimpleVectorTo(Integer to, String name, int lowIndex, int size) {
SystemVerilogIOSignalList sigList = ioHash.get(to); // get the siglist
if (sigList == null) return;
sigList.addSimpleVector(getInsideLocs(), to, name, lowIndex, size);
}
/** add a new simple vector IO signal from the specified external location list */
public void addSimpleVectorFrom(Integer from, String name, int lowIndex, int size) {
SystemVerilogIOSignalList sigList = ioHash.get(from); // get the siglist
if (sigList == null) return;
if (addDefinedSignal(name)) sigList.addSimpleVector(from, getInsideLocs(), name, lowIndex, size);
}
// hierarchical IO adds
/** add a new scalar IO signal to the specified external location list */
public void addScalarTo(Integer to, String prefix, String name) {
this.addVectorTo(to, prefix, name, 0, 1);
}
/** add a new scalar IO signal from the specified external location list */
public void addScalarFrom(Integer from, String prefix, String name) {
this.addVectorFrom(from, prefix, name, 0, 1);
}
/** add a new vector IO signal to the specified external location list */
public void addVectorTo(Integer to, String prefix, String name, int lowIndex, int size) {
SystemVerilogIOSignalList sigList = ioHash.get(to); // get the siglist
if (sigList == null) return;
sigList.addVector(getInsideLocs(), to, prefix, name, lowIndex, size);
}
/** add a new vector IO signal from the specified external location list */
public void addVectorFrom(Integer from, String prefix, String name, int lowIndex, int size) {
SystemVerilogIOSignalList sigList = ioHash.get(from); // get the siglist
if (sigList == null) return;
sigList.addVector(from, getInsideLocs(), prefix, name, lowIndex, size);
}
/** push IO hierarchy to active stack in specified list
* @param useFrom - if true the from location will be used to look up signal list to be updated, otherwise to location is used
*/
public SystemVerilogIOSignalSet pushIOSignalSet(boolean useFrom, Integer from, Integer to, String namePrefix, String name, int reps, boolean isFirstRep, boolean isIntf, boolean isStruct, String extType, String compId) {
Integer locidx = useFrom? from : to;
SystemVerilogIOSignalList sigList = ioHash.get(locidx); // get the siglist
if (sigList == null) return null;
return sigList.pushIOSignalSet(from, to, namePrefix, name, reps, isFirstRep, isIntf, isStruct, extType, compId);
}
/** pop IO hierarchy from active stack in specified list */
public void popIOSignalSet(Integer loc) {
SystemVerilogIOSignalList sigList = ioHash.get(loc); // get the siglist
if (sigList == null) return;
sigList.popIOSignalSet();
}
/** add a freeform statement to this module */
public void addStatement(String stmt) {
statements.add(stmt);
}
// ------------------- child instance methods/classes -----------------------
public SystemVerilogInstance addInstance(SystemVerilogModule mod, String name) {
return addInstance(mod, name, null);
//System.out.println("SystemVerilogModule addInstance: adding instance " + name + " of " + mod.getName() + " to " + getName() + ", child #=" + instanceList.size());
}
public SystemVerilogInstance addInstance(SystemVerilogModule mod, String name, RemapRuleList rules) {
SystemVerilogInstance newInst = (rules==null)? new SystemVerilogInstance(mod, name) :
new SystemVerilogInstance(mod, name, rules);
// add the instance
instanceList.add(newInst);
// update count by module
String modName=mod.getName();
if (!childModuleCounts.containsKey(modName))
childModuleCounts.put(modName, 1); // add new module
else
childModuleCounts.put(modName, childModuleCounts.get(modName) + 1); // bump module count
//System.out.println("SystemVerilogModule addInstance: adding rules instance " + name + " of " + mod.getName() + " to " + getName() + ", child #=" + instanceList.size());
return newInst;
}
/** return the number of child modules having specified name */
public int getChildModuleCount(String modName) {
if (!childModuleCounts.containsKey(modName)) return 0;
else return childModuleCounts.get(modName);
}
/** return true if more than one child modules having specified name */
public boolean childModuleHasMultipleInstances(String modName) {
return getChildModuleCount(modName) > 1;
}
// ------------------- output write methods -----------------------
/** write module stmt */
public void writeModuleBegin(int indentLevel) {
writer.writeStmt(indentLevel, "//");
writer.writeStmt(indentLevel, "//---------- module " + getName());
writer.writeStmt(indentLevel, "//");
writer.writeStmt(indentLevel, "module " + getName());
}
/** write module stmt w no io */
public void writeNullModuleBegin(int indentLevel) {
writer.writeStmt(indentLevel, "//");
writer.writeStmt(indentLevel, "//---------- module " + getName());
writer.writeStmt(indentLevel, "//");
writer.writeStmt(indentLevel, "module " + getName() + " ( );");
}
/** write module end */
public void writeModuleEnd(int indentLevel) {
writer.writeStmt(indentLevel, "endmodule\n");
}
/** write wire define stmts */
public void writeWireDefs(int indentLevel) {
List<String> defList = wireDefList.getDefNameList();
if (defList.isEmpty()) return;
writer.writeStmt(indentLevel, "//------- wire defines");
Iterator<String> it = defList.iterator();
while (it.hasNext()) {
String elem = it.next();
if (isLegacyVerilog) writer.writeStmt(indentLevel, "wire " + elem + ";");
else writer.writeStmt(indentLevel, "logic " + elem + ";");
}
writer.writeStmt(indentLevel, "");
}
/** write reg define stmts */
public void writeRegDefs(int indentLevel) {
List<String> defList = regDefList.getDefNameList();
if (defList.isEmpty()) return;
writer.writeStmt(indentLevel, "//------- reg defines");
Iterator<String> it = defList.iterator();
while (it.hasNext()) {
String elem = it.next();
if (isLegacyVerilog) writer.writeStmt(indentLevel, "reg " + elem + ";");
else writer.writeStmt(indentLevel, "logic " + elem + ";");
}
writer.writeStmt(indentLevel, "");
}
/** write assign stmts */
public void writeWireAssigns(int indentLevel) {
if (wireAssignList.isEmpty()) return;
writer.writeStmt(indentLevel, "//------- assigns");
Iterator<String> it = wireAssignList.iterator();
while (it.hasNext()) {
String elem = it.next();
writer.writeStmt(indentLevel, "assign " + elem);
}
writer.writeStmt(indentLevel, "");
}
/** write always block assign stmts */
public void writeBlockAssigns(int indentLevel) {
registers.writeVerilog(indentLevel); // write always blocks for each group
}
/** write cover group stmts */
public void writeCoverGroups(int indentLevel) {
if (!(isLegacyVerilog || inhibitCoverageOutput)) {
coverGroups.write(indentLevel); // write for each covergroup
}
}
/** write write IO definitions for this module
* @param indentLevel
* @param showInterfaces - if true include interfaces in output, else output encapsulated logic signals
*/
public void writeIOs(int indentLevel) {
// add the IO list
if (useInterfaces) writer.writeStmts(indentLevel+1, getIODefStrList()); // sv format
else writer.writeStmts(0, getLegacyIOStrList()); // legacy vlog io format
writer.writeStmt(0, "");
// write IO definitions - legacy form
if (!useInterfaces) {
writer.writeStmts(0, getLegacyIODefStrList()); // legacy vlog io format
writer.writeStmt(0, "");
}
// add parameter defines if specified
if (!parameterList.isEmpty()) {
writer.writeStmt(indentLevel, "//------- parameters");
for (SystemVerilogParameter parm: parameterList) writer.writeStmt(indentLevel, parm.toString());
writer.writeStmt(0, "");
}
}
/** write each child instance in this module */
public void writeChildInstances(int indentLevel) {
for (SystemVerilogInstance inst : instanceList) {
//System.out.println("SystemVerilogModule writeChildInstances: inst=" + inst.getName());
inst.getMod().writeInstance(indentLevel, inst);
}
}
/** write an instance of this module */
public void writeInstance(int indentLevel, SystemVerilogInstance inst) {
List<SystemVerilogIOElement> childList = this.getInputOutputList();
if (childList.isEmpty()) return;
if (isLegacyVerilog || inst.hasRemapRules()) {
writer.writeStmt(indentLevel++, this.getName() + " " + getParameterInstanceString() + inst.getName() + " ("); // more elements so use comma
Iterator<SystemVerilogIOElement> it = childList.iterator();
while (it.hasNext()) {
SystemVerilogIOElement elem = it.next();
String suffix = it.hasNext()? ")," : ") );";
String remappedSignal = inst.getRemappedSignal(elem.getFullName(), elem.getFrom(), elem.getTo());
writer.writeStmt(indentLevel, "." + elem.getFullName() + "(" + remappedSignal + suffix);
}
}
else {
writer.writeStmt(indentLevel++, this.getName() + " " + getParameterInstanceString() + inst.getName() + " ( .* );"); // more elements so use comma
}
writer.writeStmt(indentLevel--, "");
}
/** write any free form statements */
public void writeStatements(int indentLevel) {
for (String stmt : statements) writer.writeStmt(indentLevel, stmt);
writer.writeStmt(indentLevel, "");
}
/** write this module */
public void write() {
// start the module
int indentLevel = 0;
writeModuleBegin(indentLevel);
indentLevel++;
// write internal structures
writeModuleInternals(indentLevel);
indentLevel--;
writeModuleEnd(indentLevel);
}
/** write module internal structures */
protected void writeModuleInternals(int indentLevel) {
// write inputs, outputs
writeIOs(indentLevel);
// write wire define stmts
writeWireDefs(indentLevel);
// write ff define stmts
writeRegDefs(indentLevel);
// write free form statements
writeStatements(indentLevel);
// write assign stmts
writeWireAssigns(indentLevel);
// write block assign stmts
writeBlockAssigns(indentLevel);
// write the child instances
writeChildInstances(indentLevel);
// write the coverage groups
writeCoverGroups(indentLevel);
}
}
|
/**
* @copyright Copyright 2021 <NAME> <<EMAIL>>
* @license MIT
*/
import { isDeepStrictEqual } from 'util';
/** Manages named additions to defined components.
*/
export default class ComponentManager {
constructor(component) {
if (component === null
|| typeof component !== 'object'
|| Array.isArray(component)) {
throw new TypeError('component must be a non-Array, non-null object');
}
this.component = component;
}
/** Adds a given value, optionally with a given (base) name.
*
* @param {*} value Value to add to the component being managed.
* @param {string=} basename Preferred name for value in component.
* ({@see getNames}).
* @returns {string} Name of added value.
*/
add(value, basename) {
for (const name of this.getNames(basename)) {
if (this.component[name] === undefined) {
this.component[name] = value;
return name;
}
if (this.isMatch(this.component[name], value)) {
return name;
}
}
throw new Error('No suitable name matched value to add');
}
/** Gets property names to check for a given base name.
*
* @param {string=} basename Base name to use for property.
* @yields {string} Property names to check for #add().
*/
// eslint-disable-next-line class-methods-use-this
* getNames(basename) {
yield `${basename}`;
for (let i = 2; ; i += 1) {
yield `${basename}${i}`;
}
}
}
/** Determines if given values match such that an existing value can be used in
* place of an added value.
*/
ComponentManager.prototype.isMatch = isDeepStrictEqual;
|
#!/usr/bin/env bash
set -euo pipefail
exec $(nix-build `dirname $0`/. -A stackNixRegenerate --no-out-link)
|
def find_largest_number(numbers):
largestNumber = 0
for number in numbers:
if number > largestNumber:
largestNumber = number
return largestNumber
result = find_largest_number([2, 4, 8, 15, 16, 23, 42])
print(result) |
import numpy as np
import matplotlib.pyplot as plt
class Dombi:
def __init__(self, p):
self.p = p
def draw(self):
x = np.linspace(0, 1, 100)
y = 1 / (1 + ((1 - x) / x) ** (-self.p))
plt.plot(x, y, label=f'Dombi system with $p = {self.p}$')
plt.xlabel('x')
plt.ylabel('Dombi(x)')
plt.title('Dombi System')
plt.legend()
plt.show()
def draw_system(dombi_instance, label):
dombi_instance.draw()
plt.title(label)
plt.show()
if __name__ == '__main__':
draw_system(Dombi(0.1), "Dombi system with $p = 0.1$") |
<filename>lib/request.js<gh_stars>1-10
const request = require('request')
const TIMEOUT = 10000
class Request {
constructor(siteid, apikey) {
this.siteid = siteid
this.apikey = apikey
this.auth = `Basic ${new Buffer(
this.siteid + ':' + this.apikey,
'utf8'
).toString('base64')}`
this._request = request
}
options(uri, method, data) {
const headers = {
Authorization: this.auth,
'Content-Type': 'application/json'
}
const body = data ? JSON.stringify(data) : null
const options = { method, uri, headers, body, timeout: TIMEOUT }
if (!body) delete options.body
return options
}
handler(options) {
return new Promise((resolve, reject) => {
this._request(options, (error, response, body) => {
if (error) return reject(error)
let json = null
try {
if (body) json = JSON.parse(body)
} catch (e) {
const message = `Unable to parse JSON. Error: ${e} \nBody:\n ${body}`
return reject(new Error(message))
}
if (response.statusCode == 200 || response.statusCode == 201) {
resolve(json)
} else {
reject({
message: (json.meta && json.meta.error) || 'Unknown error',
statusCode: response.statusCode,
response: response,
body: body
})
}
})
})
}
put(uri, data = {}) {
return this.handler(this.options(uri, 'PUT', data))
}
destroy(uri) {
return this.handler(this.options(uri, 'DELETE'))
}
post(uri, data = {}) {
return this.handler(this.options(uri, 'POST', data))
}
}
module.exports = Request
|
<gh_stars>1-10
package info.javaspec.spec;
import de.bechte.junit.runners.context.HierarchicalContextRunner;
import info.javaspec.context.Context;
import info.javaspec.context.ContextFactory;
import info.javaspec.context.FakeContext;
import info.javaspec.spec.SpecFactory.AmbiguousFixture;
import info.javaspecproto.ContextClasses;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runner.notification.RunNotifier;
import java.lang.reflect.Field;
import java.util.LinkedList;
import java.util.List;
import static com.google.common.collect.Lists.newArrayList;
import static info.javaspec.testutil.Assertions.capture;
import static info.javaspec.testutil.Matchers.matchesRegex;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.runner.Description.createSuiteDescription;
import static org.mockito.Mockito.mock;
@RunWith(HierarchicalContextRunner.class)
public class SpecFactoryTest {
private Spec subject;
private final List<String> events = new LinkedList<>();
public class addSpecsFromClass {
private final RunNotifier notifier = mock(RunNotifier.class);
public class givenANestedContextWithBeforeSpecLambdasAtMultipleLevels {
@Before
public void setup() throws Exception {
subject = getSpec(ContextClasses.NestedEstablish.innerContext.class, "asserts");
ContextClasses.NestedEstablish.setEventListener(events::add);
subject.run(notifier);
}
@After
public void releaseSpy() {
ContextClasses.NestedEstablish.setEventListener(null);
}
@Test
public void runsBeforeSpecLambdasOutsideInBeforeTheAssertion() throws Exception {
assertThat(events, equalTo(newArrayList(
"ContextClasses.NestedEstablish::new",
"ContextClasses.NestedEstablish.innerContext::new",
"ContextClasses.NestedEstablish::arranges",
"ContextClasses.NestedEstablish::innerContext::arranges",
"ContextClasses.NestedEstablish.innerContext::asserts"
)));
}
}
public class givenANestedContextWithAfterSpecLambdasAtMultipleLevels {
@Before
public void setup() throws Exception {
subject = getSpec(ContextClasses.NestedCleanup.innerContext.class, "asserts");
ContextClasses.NestedEstablish.setEventListener(events::add);
subject.run(notifier);
}
@After
public void releaseSpy() {
ContextClasses.NestedCleanup.setEventListener(null);
}
@Test
public void runsAfterSpecLambdasInsideOutAfterTheAssertion() throws Exception {
assertThat(events, equalTo(newArrayList(
"ContextClasses.NestedCleanup::new",
"ContextClasses.NestedCleanup.innerContext::new",
"ContextClasses.NestedCleanup.innerContext::asserts",
"ContextClasses.NestedCleanup::innerContext::cleans",
"ContextClasses.NestedCleanup::cleans"
)));
}
}
public class givenAContextClassWithMultipleSetupFieldsOfTheSameType {
@Test
public void givenAContextClassWithMultipleEstablishFields_throwsAmbiguousSpecFixture() throws Exception {
shouldThrowAmbiguousFixture(ContextClasses.TwoBecause.class);
AmbiguousFixture ex = shouldThrowAmbiguousFixture(ContextClasses.TwoEstablish.class);
assertThat(ex.getMessage(), matchesRegex("^Only 1 field of type Establish is allowed in context class .*TwoEstablish$"));
}
@Test
public void givenAContextClassWithMultipleBecauseFields_throwsAmbiguousSpecFixture() throws Exception {
capture(AmbiguousFixture.class, () -> ContextFactory.createRootContext(ContextClasses.TwoBecause.class));
}
}
public class givenAContextClassWithMultipleTeardownFieldsOfTheSameType {
@Test
public void throwsAmbiguousSpecFixture() throws Exception {
shouldThrowAmbiguousFixture(ContextClasses.TwoCleanup.class);
}
}
private AmbiguousFixture shouldThrowAmbiguousFixture(Class<?> source) {
SpecFactory factory = new SpecFactory(FakeContext.withDescription(createSuiteDescription("suite")));
return capture(AmbiguousFixture.class, () -> factory.addSpecsFromClass(source));
}
}
private static Spec getSpec(Class<?> declaringClass, String fieldName) {
Context context = FakeContext.withDescription(createSuiteDescription(declaringClass));
SpecFactory specFactory = new SpecFactory(context);
return specFactory.create(readField(declaringClass, fieldName));
}
private static Field readField(Class<?> declaringClass, String name) {
try {
return declaringClass.getDeclaredField(name);
} catch(Exception e) {
String message = String.format("Failed to read field %s from %s", name, declaringClass);
throw new RuntimeException(message, e);
}
}
}
|
echo -e "\e[0mregular\e[0m"
echo -e "\e[1mbold\e[0m"
echo -e "\e[3mitalic\e[0m"
echo -e "\e[4munderline\e[0m"
echo -e "\e[9mstrikethrough\e[0m"
echo -e "\e[31mHello World\e[0m"
echo -e "\x1B[31mHello World\e[0m"
|
#!/bin/bash
# Build oanhnn/php-stack:latest
docker pull $DOCKER_REPO:latest || true
docker build --pull --cache-from $DOCKER_REPO:latest --tag $DOCKER_REPO:latest .
# Build oanhnn/php-stack:laravel
docker build --tag $DOCKER_REPO:laravel laravel
|
#!/bin/sh
set -e
# Parse args.
args=$@
while [[ $# -gt 0 ]]; do
key="$1"
case $key in
-t|--target)
target="$2"
shift
shift
;;
--disable-pty)
disable_pty="yes"
shift
;;
*)
echo "Usage: ./build.sh [-t|--target <release|debug>] [--disable_pty]";
exit 128
shift
;;
esac
done
# Set defaults.
target=${target:-debug}
disable_pty=${disable_pty:-no}
nproc=$(nproc || sysctl -n hw.ncpu)
#GODOT_DIR Get the absolute path to the directory this script is in.
NATIVE_DIR="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
# Run script inside a nix shell if it is available.
if command -v nix-shell && [ $NIX_PATH ] && [ -z $IN_NIX_SHELL ]; then
cd ${NATIVE_DIR}
nix-shell --pure --run "NIX_PATH=${NIX_PATH} ./build.sh $args"
exit
fi
# Update git submodules.
updateSubmodules() {
eval $1=$2 # E.g LIBUV_DIR=${NATIVE_DIR}/thirdparty/libuv
if [ -z "$(ls -A -- "$2")" ]; then
cd ${NATIVE_DIR}
git submodule update --init --recursive -- $2
fi
}
updateSubmodules LIBUV_DIR ${NATIVE_DIR}/thirdparty/libuv
updateSubmodules LIBTSM_DIR ${NATIVE_DIR}/thirdparty/libtsm
updateSubmodules GODOT_CPP_DIR ${NATIVE_DIR}/thirdparty/godot-cpp
# Build godot-cpp bindings.
cd ${GODOT_CPP_DIR}
echo "scons generate_bindings=yes target=$target -j$nproc"
scons generate_bindings=yes target=$target -j$nproc
# Build libuv as a static library.
cd ${LIBUV_DIR}
mkdir build || true
cd build
args="-DCMAKE_BUILD_TYPE=$target -DBUILD_SHARED_LIBS=OFF -DCMAKE_POSITION_INDEPENDENT_CODE=TRUE"
if [ "$target" == "release" ]; then
args="$args -DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreadedDLL"
else
args="$args -DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreadedDebugDLL"
fi
cmake .. $args
cd ..
cmake --build build --config $target -j$nproc
# Build libgodot-xterm.
cd ${NATIVE_DIR}
scons target=$target disable_pty=$disable_pty -j$nproc
# Use Docker to build libgodot-xterm javascript.
if [ -x "$(command -v docker-compose)" ]; then
UID_GID="0:0" TARGET=$target docker-compose build javascript
UID_GID="$(id -u):$(id -g)" TARGET=$target docker-compose run javascript
fi
|
# Creating a brand
# Check that we're in a bash shell
if [[ $SHELL != *"bash"* ]]; then
echo "PROBLEM: Run these scripts from within the bash shell."
fi
read -p "Please enter a new brand name [Sample Bash Corp. {date}]: " BRAND
BRAND=${BRAND:-"Sample Bash Corp. "$(date +%Y-%m-%d-%H:%M)}
export BRAND
# Step 1: Obtain your OAuth token
# Note: Substitute these values with your own
# Set up variables for full code example
ACCESS_TOKEN=$(cat config/ds_access_token.txt)
account_id=$(cat config/API_ACCOUNT_ID)
base_path="https://demo.docusign.net/restapi"
# Step 2: Construct your API headers
declare -a Headers=('--header' "Authorization: Bearer ${ACCESS_TOKEN}" \
'--header' "Accept: application/json" \
'--header' "Content-Type: application/json")
# Step 3: Construct the request body
# Create a temporary file to store the request body
request_data=$(mktemp /tmp/request-brand-001.XXXXXX)
printf \
'{
"brandName": "'"${BRAND}"'",
"defaultBrandLanguage": "en"
}' >> $request_data
# Step 4: a) Call the eSignature API
# b) Display the JSON response
# Create a temporary file to store the response
response=$(mktemp /tmp/response-brand.XXXXXX)
Status=$(curl -w '%{http_code}' -i --request POST ${base_path}/v2.1/accounts/${account_id}/brands \
"${Headers[@]}" \
--data-binary @${request_data} \
--output ${response})
# If the Status code returned is greater than 399, display an error message along with the API response
if [[ "$Status" -gt "399" ]] ; then
echo ""
echo "Creating a new brand has failed."
echo ""
cat $response
exit 0
fi
# Retrieve the profile ID from the API response.
brandId=`cat $response | grep brandId | sed 's/.*\"brandId\":\"//' | sed 's/\",.*//'`
# Save the envelope id for use by other scripts
echo "brand Id: ${brandId}"
echo ${brandId} > config/BRAND_ID
echo ""
echo "Response:"
cat $response
echo ""
# Remove the temporary files
rm "$request_data"
rm "$response"
echo ""
echo ""
echo "Done."
echo ""
|
<filename>plugins/com.ibm.socialcrm.notesintegration.ui/src/com/ibm/socialcrm/notesintegration/ui/views/NoSugarEntryViewPart.java
package com.ibm.socialcrm.notesintegration.ui.views;
import org.eclipse.jface.layout.GridDataFactory;
import org.eclipse.jface.layout.GridLayoutFactory;
import org.eclipse.jface.resource.JFaceColors;
import org.eclipse.swt.SWT;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Label;
import org.eclipse.ui.IMemento;
import org.eclipse.ui.IViewSite;
import org.eclipse.ui.PartInitException;
import org.eclipse.ui.forms.events.HyperlinkAdapter;
import org.eclipse.ui.forms.events.HyperlinkEvent;
import org.eclipse.ui.part.ViewPart;
import com.ibm.socialcrm.notesintegration.core.extensionpoints.DashboardContributionExtensionProcessor;
import com.ibm.socialcrm.notesintegration.core.utils.SugarWebservicesOperations;
import com.ibm.socialcrm.notesintegration.ui.dashboardpanels.SugarItemsDashboard;
import com.ibm.socialcrm.notesintegration.ui.utils.UiUtils;
import com.ibm.socialcrm.notesintegration.utils.ConstantStrings;
import com.ibm.socialcrm.notesintegration.utils.GenericUtils;
import com.ibm.socialcrm.notesintegration.utils.NotesAccountManager;
import com.ibm.socialcrm.notesintegration.utils.SFAImageManager;
import com.ibm.socialcrm.notesintegration.utils.UtilsPlugin;
import com.ibm.socialcrm.notesintegration.utils.UtilsPluginNLSKeys;
import com.ibm.socialcrm.notesintegration.utils.GenericUtils.SugarType;
import com.ibm.socialcrm.notesintegration.utils.widgets.SFAHyperlink;
public class NoSugarEntryViewPart extends ViewPart {
public static final String VIEW_ID = "com.ibm.socialcrm.notesintegration.ui.view.noSugarEntryViewPart"; //$NON-NLS-1$
public static final String SEARCH_TEXT = "searchText"; //$NON-NLS-1$
public static final String SUGAR_TYPE = "sugarType"; //$NON-NLS-1$
private String searchText = null;
private SugarType type;
private Composite parent;
@Override
public void init(IViewSite viewSite, IMemento input) throws PartInitException {
super.init(viewSite, input);
if (input != null) {
searchText = input.getString(SEARCH_TEXT);
String typeString = input.getString(SUGAR_TYPE);
type = SugarType.valueOf(typeString);
createPartControl(parent);
}
}
@Override
public void createPartControl(Composite parent) {
if (searchText == null) {
this.parent = parent;
} else {
Composite noEntryComposite = new Composite(this.parent, SWT.NONE);
noEntryComposite.setLayout(GridLayoutFactory.fillDefaults().create());
noEntryComposite.setLayoutData(GridDataFactory.fillDefaults().grab(true, true).create());
Label l = new Label(noEntryComposite, SWT.WRAP);
l.setLayoutData(GridDataFactory.fillDefaults().grab(true, false).create());
l.setFont(SugarItemsDashboard.getInstance().getNormalFontForBusinessCardData());
l.setText(UtilsPlugin.getDefault().getResourceString(UtilsPluginNLSKeys.UI_NO_INFORMATION_IN_CRM_SYSTEM));
Composite hyperLinkComposite = new Composite(noEntryComposite, SWT.NONE);
hyperLinkComposite.setLayout(GridLayoutFactory.fillDefaults().numColumns(2).equalWidth(false).spacing(0, 0).create());
hyperLinkComposite.setLayoutData(GridDataFactory.fillDefaults().grab(true, false).create());
SFAHyperlink hyperLink = new SFAHyperlink(hyperLinkComposite, SWT.NONE);
hyperLink.setLayoutData(GridDataFactory.fillDefaults().align(SWT.BEGINNING, SWT.BEGINNING).create());
hyperLink.setText(UtilsPlugin.getDefault().getResourceString(UtilsPluginNLSKeys.UI_SEARCH_CRM_SYSTEM));
hyperLink.setForeground(JFaceColors.getHyperlinkText(Display.getCurrent()));
hyperLink.setUnderlined(true);
hyperLink.setFont(SugarItemsDashboard.getInstance().getNormalFontForBusinessCardData());
hyperLink.addHyperlinkListener(new HyperlinkAdapter() {
@Override
public void linkActivated(HyperlinkEvent arg0) {
// 72458
// GenericUtils.launchUrlInPreferredBrowser(type.getSearchUrl(searchText) + (GenericUtils.isUseEmbeddedBrowserPreferenceSet() ? ConstantStrings.EMPTY_STRING : "&MSID=" //$NON-NLS-1$
// + SugarWebservicesOperations.getInstance().getSessionId(true)), true);
String aUrl = type.getSearchUrl(searchText);
if (!GenericUtils.isUseEmbeddedBrowserPreferenceSet()) {
aUrl = SugarWebservicesOperations.getInstance().buildV10SeamlessURL(aUrl);
}
GenericUtils.launchUrlInPreferredBrowser(aUrl, true);
}
});
Label externalLinkLabel = new Label(hyperLinkComposite, SWT.NONE);
externalLinkLabel.setImage(SFAImageManager.getImage(SFAImageManager.EXTERNAL_LINK));
externalLinkLabel.setLayoutData(GridDataFactory.fillDefaults().indent(0, -3).create());
UiUtils.recursiveSetBackgroundColor(noEntryComposite, JFaceColors.getBannerBackground(Display.getDefault()));
}
}
@Override
public void setFocus() {
}
}
|
#!/bin/bash
if [ "$#" -ne 2 ]; then
echo -e "\nProvisions the given host using the given Ansible playbook file.\n"
echo -e "Assumes that passwordless SSH is already setup for the host. (Use firstrun.sh for achieve that)\n"
echo -e "Usage: $0 <host> <playbook>\n"
exit 1
fi
HOST=$1
PLAYBOOK=$2
if [ ! -f "$PLAYBOOK" ]; then
echo "Playbook not found: $PLAYBOOK"
exit 1
fi
if grep -q $HOST ~/.ssh/known_hosts; then
echo -e "\nRemoving $HOST from ~/.ssh/known_hosts..\n\n"
ssh-keygen -R $HOST
fi
export ANSIBLE_ROLES_PATH=$(dirname $0)/roles
ansible-playbook -i $HOST, $PLAYBOOK
|
#
# Copyright (c) 2013-2021 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# All Rights Reserved.
#
from collections import OrderedDict
import os
from cgtsclient._i18n import _
from cgtsclient.common import constants
from cgtsclient.common import utils
from cgtsclient import exc
from cgtsclient.v1 import ihost as ihost_utils
from cgtsclient.v1 import istor as istor_utils
from six.moves import input
def _print_ihost_show(ihost, columns=None, output_format=None):
optional_fields = []
if columns:
fields = columns
else:
fields = ['id', 'uuid', 'personality', 'hostname', 'invprovision',
'administrative', 'operational', 'availability', 'task',
'action', 'mgmt_mac', 'mgmt_ip', 'serialid',
'capabilities', 'bm_type', 'bm_username', 'bm_ip',
'config_applied', 'config_target', 'config_status',
'location', 'uptime', 'reserved', 'created_at', 'updated_at',
'boot_device', 'rootfs_device', 'install_output', 'console',
'tboot', 'vim_progress_status', 'software_load',
'install_state', 'install_state_info', 'inv_state',
'clock_synchronization',
'device_image_update', 'reboot_needed']
optional_fields = ['vsc_controllers', 'ttys_dcd']
if ihost.subfunctions != ihost.personality:
fields.append('subfunctions')
if 'controller' in ihost.subfunctions:
fields.append('subfunction_oper')
fields.append('subfunction_avail')
if ihost.peers:
fields.append('peers')
# Do not display the trailing '+' which indicates the audit iterations
if ihost.install_state_info:
ihost.install_state_info = ihost.install_state_info.rstrip('+')
if ihost.install_state:
ihost.install_state = ihost.install_state.rstrip('+')
data_list = [(f, getattr(ihost, f, '')) for f in fields]
if optional_fields:
data_list += [(f, getattr(ihost, f, '')) for f in optional_fields
if hasattr(ihost, f)]
data = dict(data_list)
utils.print_dict_with_format(data, wrap=72, output_format=output_format)
def _get_kube_host_upgrade_details(cc):
# Get the list of kubernetes host upgrades
kube_host_upgrades = cc.kube_host_upgrade.list()
# Map the host_id to hostname and personality
kube_host_upgrade_details = dict()
for kube_host_upgrade in kube_host_upgrades:
kube_host_upgrade_details[kube_host_upgrade.host_id] = {
'target_version': kube_host_upgrade.target_version,
'control_plane_version': kube_host_upgrade.control_plane_version,
'kubelet_version': kube_host_upgrade.kubelet_version,
'status': kube_host_upgrade.status}
return kube_host_upgrade_details
@utils.arg('hostnameorid', metavar='<hostname or id>',
help="Name or ID of host")
@utils.arg('--column',
action='append',
default=[],
help="Specify the column(s) to include, can be repeated")
@utils.arg('--format',
choices=['table', 'yaml', 'value'],
help="specify the output format, defaults to table")
def do_host_show(cc, args):
"""Show host attributes."""
ihost = ihost_utils._find_ihost(cc, args.hostnameorid)
_print_ihost_show(ihost, args.column, args.format)
@utils.arg('--column',
action='append',
default=[],
help="Specify the column(s) to include, can be repeated")
@utils.arg('--format',
choices=['table', 'yaml', 'value'],
help="specify the output format, defaults to table")
def do_host_list(cc, args):
"""List hosts."""
ihosts = cc.ihost.list()
if args.column:
fields = args.column
else:
fields = ['id', 'hostname', 'personality', 'administrative',
'operational', 'availability']
utils.print_list(ihosts, fields, fields, sortby=0,
output_format=args.format)
def do_host_upgrade_list(cc, args):
"""List software upgrade info for hosts."""
ihosts = cc.ihost.list()
field_labels = ['id', 'hostname', 'personality',
'running_release', 'target_release']
fields = ['id', 'hostname', 'personality',
'software_load', 'target_load']
utils.print_list(ihosts, fields, field_labels, sortby=0)
def do_kube_host_upgrade_list(cc, args):
"""List kubernetes upgrade info for hosts."""
# Get the list of hosts
ihosts = cc.ihost.list()
# Get the kubernetes host upgrades
kube_host_upgrade_details = _get_kube_host_upgrade_details(cc)
# Keep only the hosts that have kubernetes upgrade details.
# Storage nodes aren't part of the kubernetes cluster, for example.
ihosts = [host for host in ihosts if host.id in kube_host_upgrade_details]
for host in ihosts:
host.target_version = \
kube_host_upgrade_details[host.id]['target_version']
host.control_plane_version = \
kube_host_upgrade_details[host.id]['control_plane_version']
host.kubelet_version = \
kube_host_upgrade_details[host.id]['kubelet_version']
host.status = \
kube_host_upgrade_details[host.id]['status']
field_labels = ['id', 'hostname', 'personality', 'target_version',
'control_plane_version', 'kubelet_version', 'status']
fields = ['id', 'hostname', 'personality', 'target_version',
'control_plane_version', 'kubelet_version', 'status']
utils.print_list(ihosts, fields, field_labels, sortby=0)
@utils.arg('-n', '--hostname',
metavar='<hostname>',
help='Hostname of the host')
@utils.arg('-p', '--personality',
metavar='<personality>',
choices=['controller', 'worker', 'edgeworker', 'storage', 'network', 'profile'],
help='Personality or type of host [REQUIRED]')
@utils.arg('-s', '--subfunctions',
metavar='<subfunctions>',
choices=['lowlatency'],
help='Performance profile or subfunctions of host.[Optional]')
@utils.arg('-m', '--mgmt_mac',
metavar='<mgmt_mac>',
help='MAC Address of the host mgmt interface [REQUIRED]')
@utils.arg('-i', '--mgmt_ip',
metavar='<mgmt_ip>',
help='IP Address of the host mgmt interface (when using static '
'address allocation)')
@utils.arg('-I', '--bm_ip',
metavar='<bm_ip>',
help="IP Address of the host board management interface, "
"only necessary if this host's board management controller "
"is not in the primary region")
@utils.arg('-T', '--bm_type',
metavar='<bm_type>',
help='Type of the host board management interface')
@utils.arg('-U', '--bm_username',
metavar='<bm_username>',
help='Username for the host board management interface')
@utils.arg('-P', '--bm_password',
metavar='<bm_password>',
help='Password for the host board management interface')
@utils.arg('-b', '--boot_device',
metavar='<boot_device>',
help='Device for boot partition, relative to /dev. Default: sda')
@utils.arg('-r', '--rootfs_device',
metavar='<rootfs_device>',
help='Device for rootfs partition, relative to /dev. Default: sda')
@utils.arg('-o', '--install_output',
metavar='<install_output>',
choices=['text', 'graphical'],
help='Installation output format, text or graphical. Default: text')
@utils.arg('-c', '--console',
metavar='<console>',
help='Serial console. Default: ttyS0,115200')
@utils.arg('-v', '--vsc_controllers',
metavar='<vsc_controllers>',
help='Comma separated active/standby VSC Controller IP addresses')
@utils.arg('-l', '--location',
metavar='<location>',
help='Physical location of the host')
@utils.arg('-D', '--ttys_dcd',
metavar='<true/false>',
help='Enable/disable serial console data carrier detection')
@utils.arg('-C', '--clock_synchronization',
metavar='<clock_synchronization>',
choices=['ntp', 'ptp'],
help='Clock synchronization, ntp or ptp. Default: ntp')
def do_host_add(cc, args):
"""Add a new host."""
field_list = ['hostname', 'personality', 'subfunctions',
'mgmt_mac', 'mgmt_ip',
'bm_ip', 'bm_type', 'bm_username', 'bm_password',
'boot_device', 'rootfs_device', 'install_output', 'console',
'vsc_controllers', 'location', 'ttys_dcd',
'clock_synchronization']
fields = dict((k, v) for (k, v) in vars(args).items()
if k in field_list and not (v is None))
# This is the expected format of the location field
if 'location' in fields:
fields['location'] = {"locn": fields['location']}
ihost = cc.ihost.create(**fields)
suuid = getattr(ihost, 'uuid', '')
try:
ihost = cc.ihost.get(suuid)
except exc.HTTPNotFound:
raise exc.CommandError('Host not found: %s' % suuid)
else:
_print_ihost_show(ihost)
@utils.arg('hostsfile',
metavar='<hostsfile>',
help='File containing the XML descriptions of hosts to be '
'provisioned [REQUIRED]')
def do_host_bulk_add(cc, args):
"""Add multiple new hosts."""
field_list = ['hostsfile']
fields = dict((k, v) for (k, v) in vars(args).items()
if k in field_list and not (v is None))
hostsfile = fields['hostsfile']
if os.path.isdir(hostsfile):
raise exc.CommandError("Error: %s is a directory." % hostsfile)
try:
req = open(hostsfile, 'rb')
except Exception:
raise exc.CommandError("Error: Could not open file %s." % hostsfile)
response = cc.ihost.create_many(req)
if not response:
raise exc.CommandError("The request timed out or there was an "
"unknown error")
success = response.get('success')
error = response.get('error')
if success:
print("Success: " + success + "\n")
if error:
print("Error:\n" + error)
@utils.arg('-m', '--mgmt_mac',
metavar='<mgmt_mac>',
help='MAC Address of the host mgmt interface')
@utils.arg('-i', '--mgmt_ip',
metavar='<mgmt_ip>',
help='IP Address of the host mgmt interface')
@utils.arg('-s', '--serialid',
metavar='<serialid>',
help='SerialId of the host.')
def donot_host_sysaddlab(cc, args):
"""LAB ONLY Add a new host simulating sysinv."""
field_list = ['mgmt_mac', 'mgmt_ip', 'serialid']
fields = dict((k, v) for (k, v) in vars(args).items()
if k in field_list and not (v is None))
fields = utils.args_array_to_dict(fields, 'location')
ihost = cc.ihost.create(**fields)
suuid = getattr(ihost, 'uuid', '')
try:
ihost = cc.ihost.get(suuid)
except exc.HTTPNotFound:
raise exc.CommandError('host not found: %s' % suuid)
else:
_print_ihost_show(ihost)
# field_list.append('uuid')
# field_list.append('id')
# data = dict([(f, getattr(ihost, f, '')) for f in field_list])
# utils.print_dict(data, wrap=72)
@utils.arg('hostnameorid',
metavar='<hostname or id>',
nargs='+',
help="Name or ID of host")
def do_host_delete(cc, args):
"""Delete a host."""
for n in args.hostnameorid:
try:
cc.ihost.delete(n)
print('Deleted host %s' % n)
except exc.HTTPNotFound:
raise exc.CommandError('host not found: %s' % n)
@utils.arg('hostnameorid',
metavar='<hostname or id>',
help="Name or ID of host")
@utils.arg('attributes',
metavar='<path=value>',
nargs='+',
action='append',
default=[],
help="Attributes to update ")
def do_host_update(cc, args):
"""Update host attributes."""
patch = utils.args_array_to_patch("replace", args.attributes[0])
ihost = ihost_utils._find_ihost(cc, args.hostnameorid)
try:
ihost = cc.ihost.update(ihost.id, patch)
except exc.HTTPNotFound:
raise exc.CommandError('host not found: %s' % args.hostnameorid)
_print_ihost_show(ihost)
@utils.arg('hostnameorid',
metavar='<hostname or id>',
help="Name or ID of host")
@utils.arg('-f', '--force',
action='store_true',
default=False,
help="Force a lock operation ")
def do_host_lock(cc, args):
"""Lock a host."""
attributes = []
if args.force is True:
# Forced lock operation
attributes.append('action=force-lock')
else:
# Normal lock operation
attributes.append('action=lock')
patch = utils.args_array_to_patch("replace", attributes)
ihost = ihost_utils._find_ihost(cc, args.hostnameorid)
try:
ihost = cc.ihost.update(ihost.id, patch)
except exc.HTTPNotFound:
raise exc.CommandError('host not found: %s' % args.hostnameorid)
_print_ihost_show(ihost)
@utils.arg('hostnameorid',
metavar='<hostname or id>',
help="Name or ID of host")
@utils.arg('-f', '--force',
action='store_true',
default=False,
help="Force an unlock operation ")
def do_host_unlock(cc, args):
"""Unlock a host."""
attributes = []
if args.force is True:
# Forced unlock operation
attributes.append('action=force-unlock')
else:
# Normal unlock operation
attributes.append('action=unlock')
patch = utils.args_array_to_patch("replace", attributes)
ihost = ihost_utils._find_ihost(cc, args.hostnameorid)
try:
ihost = cc.ihost.update(ihost.id, patch)
except exc.HTTPNotFound:
raise exc.CommandError('host not found: %s' % args.hostnameorid)
_print_ihost_show(ihost)
@utils.arg('hostnameorid',
metavar='<hostname or id>',
help="Name or ID of host")
@utils.arg('-f', '--force',
action='store_true',
default=False,
help="Force a host swact operation ")
def do_host_swact(cc, args):
"""Switch activity away from this active host."""
attributes = []
if args.force is True:
# Forced swact operation
attributes.append('action=force-swact')
else:
# Normal swact operation
attributes.append('action=swact')
patch = utils.args_array_to_patch("replace", attributes)
ihost = ihost_utils._find_ihost(cc, args.hostnameorid)
try:
ihost = cc.ihost.update(ihost.id, patch)
except exc.HTTPNotFound:
raise exc.CommandError('host not found: %s' % args.hostnameorid)
_print_ihost_show(ihost)
@utils.arg('hostnameorid',
metavar='<hostname or id>',
help="Name or ID of host")
def do_host_reset(cc, args):
"""Reset a host."""
attributes = []
attributes.append('action=reset')
patch = utils.args_array_to_patch("replace", attributes)
ihost = ihost_utils._find_ihost(cc, args.hostnameorid)
try:
ihost = cc.ihost.update(ihost.id, patch)
except exc.HTTPNotFound:
raise exc.CommandError('host not found: %s' % args.hostnameorid)
_print_ihost_show(ihost)
@utils.arg('hostnameorid',
metavar='<hostname or id>',
help="Name or ID of host")
def do_host_reboot(cc, args):
"""Reboot a host."""
attributes = []
attributes.append('action=reboot')
patch = utils.args_array_to_patch("replace", attributes)
ihost = ihost_utils._find_ihost(cc, args.hostnameorid)
try:
ihost = cc.ihost.update(ihost.id, patch)
except exc.HTTPNotFound:
raise exc.CommandError('host not found: %s' % args.hostnameorid)
_print_ihost_show(ihost)
@utils.arg('hostnameorid',
metavar='<hostname or id>',
help="Name or ID of host")
def do_host_reinstall(cc, args):
"""Reinstall a host."""
attributes = []
attributes.append('action=reinstall')
patch = utils.args_array_to_patch("replace", attributes)
ihost = ihost_utils._find_ihost(cc, args.hostnameorid)
try:
ihost = cc.ihost.update(ihost.id, patch)
except exc.HTTPNotFound:
raise exc.CommandError('host not found: %s' % args.hostnameorid)
_print_ihost_show(ihost)
@utils.arg('hostnameorid',
metavar='<hostname or id>',
help="Name or ID of host")
def do_host_power_on(cc, args):
"""Power on a host."""
attributes = []
attributes.append('action=power-on')
patch = utils.args_array_to_patch("replace", attributes)
ihost = ihost_utils._find_ihost(cc, args.hostnameorid)
try:
ihost = cc.ihost.update(ihost.id, patch)
except exc.HTTPNotFound:
raise exc.CommandError('host not found: %s' % args.hostnameorid)
_print_ihost_show(ihost)
@utils.arg('hostnameorid',
metavar='<hostname or id>',
help="Name or ID of host")
def do_host_power_off(cc, args):
"""Power off a host."""
attributes = []
attributes.append('action=power-off')
patch = utils.args_array_to_patch("replace", attributes)
ihost = ihost_utils._find_ihost(cc, args.hostnameorid)
try:
ihost = cc.ihost.update(ihost.id, patch)
except exc.HTTPNotFound:
raise exc.CommandError('host not found: %s' % args.hostnameorid)
_print_ihost_show(ihost)
def _list_storage(cc, host):
# Echo list of new host stors
istors = cc.istor.list(host.uuid)
for s in istors:
istor_utils._get_disks(cc, host, s)
field_labels = ['uuid', 'function', 'capabilities', 'disks']
fields = ['uuid', 'function', 'capabilities', 'disks']
utils.print_list(istors, fields, field_labels, sortby=0)
# Echo list of new host lvgs
ilvgs = cc.ilvg.list(host.uuid)
field_labels = ['uuid', 'lvm_vg_name', 'Current PVs']
fields = ['uuid', 'lvm_vg_name', 'lvm_cur_pv']
utils.print_list(ilvgs, fields, field_labels, sortby=0)
# Echo list of new host pvs
ipvs = cc.ipv.list(host.uuid)
field_labels = ['uuid', 'lvm_pv_name', 'disk_or_part_device_path',
'lvm_vg_name']
fields = ['uuid', 'lvm_pv_name', 'disk_or_part_device_path', 'lvm_vg_name']
utils.print_list(ipvs, fields, field_labels, sortby=0)
@utils.arg('hostnameorid',
metavar='<hostname or id>',
help="Name or ID of host")
def do_host_patch_reboot(cc, args):
"""Command has been deprecated."""
try:
ihost = cc.ihost.get(args.hostnameorid)
except exc.HTTPNotFound:
raise exc.CommandError('Host not found: %s' % args.hostnameorid)
print("The host-patch-reboot command has been deprecated.")
print("Please use the following procedure:")
print("1. Lock the node:")
print(" system host-lock %s" % ihost.hostname)
print("2. Issue patch install request:")
print(" sudo sw-patch host-install %s" % ihost.hostname)
print(" Or to issue non-blocking requests for parallel install:")
print(" sudo sw-patch host-install-async %s" % ihost.hostname)
print(" sudo sw-patch query-hosts")
print("3. Unlock node once install completes:")
print(" system host-unlock %s" % ihost.hostname)
@utils.arg('--filename',
help="The full file path to store the host file. Default './hosts.xml'")
def do_host_bulk_export(cc, args):
"""Export host bulk configurations."""
result = cc.ihost.bulk_export()
xml_content = result['content']
config_filename = './hosts.xml'
if hasattr(args, 'filename') and args.filename:
config_filename = args.filename
try:
with open(config_filename, 'wb') as fw:
fw.write(xml_content)
print(_('Export successfully to %s') % config_filename)
except IOError:
print(_('Cannot write to file: %s') % config_filename)
return
@utils.arg('hostid',
metavar='<hostname or id>',
help="Name or ID of host")
@utils.arg('-f', '--force',
action='store_true',
default=False,
help="Force the downgrade operation ")
def do_host_downgrade(cc, args):
"""Perform software downgrade for the specified host."""
ihost_utils._find_ihost(cc, args.hostid)
system_type, system_mode = utils._get_system_info(cc)
simplex = system_mode == constants.SYSTEM_MODE_SIMPLEX
if simplex:
warning_message = (
'\n'
'WARNING: THIS OPERATION WILL COMPLETELY ERASE ALL DATA FROM THE '
'SYSTEM.\n'
'Only proceed once the system data has been copied to another '
'system.\n'
'Are you absolutely sure you want to continue? [yes/N]: ')
confirm = input(warning_message)
if confirm != 'yes':
print("Operation cancelled.")
return
ihost = cc.ihost.downgrade(args.hostid, args.force)
_print_ihost_show(ihost)
@utils.arg('hostid',
metavar='<hostname or id>',
help="Name or ID of host")
@utils.arg('-f', '--force',
action='store_true',
default=False,
help="Force the upgrade operation ")
def do_host_upgrade(cc, args):
"""Perform software upgrade for a host."""
ihost_utils._find_ihost(cc, args.hostid)
system_type, system_mode = utils._get_system_info(cc)
simplex = system_mode == constants.SYSTEM_MODE_SIMPLEX
if simplex:
warning_message = (
'\n'
'WARNING: THIS OPERATION WILL COMPLETELY ERASE ALL DATA FROM THE '
'SYSTEM.\n'
'Only proceed once the system data has been copied to another '
'system.\n'
'Are you absolutely sure you want to continue? [yes/N]: ')
confirm = input(warning_message)
if confirm != 'yes':
print("Operation cancelled.")
return
ihost = cc.ihost.upgrade(args.hostid, args.force)
_print_ihost_show(ihost)
@utils.arg('hostid',
metavar='<hostname or id>',
help="Name or ID of host")
@utils.arg('component',
metavar='<component>',
choices=['control-plane', 'kubelet'],
help='Kubernetes component to upgrade')
@utils.arg('-f', '--force',
action='store_true',
default=False,
help="Force the kubernetes upgrade operation ")
def do_kube_host_upgrade(cc, args):
"""Perform kubernetes upgrade for a host."""
ihost_utils._find_ihost(cc, args.hostid)
if args.component == 'control-plane':
host = cc.ihost.kube_upgrade_control_plane(args.hostid, args.force)
elif args.component == 'kubelet':
host = cc.ihost.kube_upgrade_kubelet(args.hostid, args.force)
else:
raise exc.CommandError('Invalid component value: %s' % args.component)
# Get the kubernetes host upgrades
kube_host_upgrade_details = _get_kube_host_upgrade_details(cc)
host.target_version = \
kube_host_upgrade_details[host.id]['target_version']
host.control_plane_version = \
kube_host_upgrade_details[host.id]['control_plane_version']
host.kubelet_version = \
kube_host_upgrade_details[host.id]['kubelet_version']
host.status = \
kube_host_upgrade_details[host.id]['status']
fields = ['id', 'hostname', 'personality', 'target_version',
'control_plane_version', 'kubelet_version', 'status']
data_list = [(f, getattr(host, f, '')) for f in fields]
data = dict(data_list)
ordereddata = OrderedDict(sorted(data.items(), key=lambda t: t[0]))
utils.print_dict(ordereddata, wrap=72)
@utils.arg('hostnameorid',
metavar='<hostname or id>',
help="Name or ID of host")
def do_host_device_image_update(cc, args):
"""Update device image on a host."""
ihost = ihost_utils._find_ihost(cc, args.hostnameorid)
try:
host = cc.ihost.device_image_update(ihost.uuid)
except exc.HTTPNotFound:
raise exc.CommandError(
'Device image update failed: host %s' % args.hostnameorid)
_print_ihost_show(host)
@utils.arg('hostnameorid',
metavar='<hostname or id>',
help="Name or ID of host")
def do_host_device_image_update_abort(cc, args):
"""Abort device image update on a host."""
ihost = ihost_utils._find_ihost(cc, args.hostnameorid)
try:
host = cc.ihost.device_image_update_abort(ihost.uuid)
except exc.HTTPNotFound:
raise exc.CommandError(
'Device image update-abort failed: host %s' % args.hostnameorid)
_print_ihost_show(host)
|
# Add curl to the 'path'
if [ -d "/usr/local/opt/curl/bin" ] ; then
path=("/usr/local/opt/curl/bin" $path)
fi
|
<reponame>ThallesTorres/Linguagem_C
// Exercício 07 - Calcular o fatorial de um número fornecido pelo usuário. A função fatorial de
// um número natural n é o produto de todos os n primeiros números naturais.
#include <stdio.h>
int main(void)
{
int num, count, resp;
resp = 1;
printf("Digite um número: ");
scanf("%d", &num);
printf("Fatorial de %d é: \n", num);
for (count = 1; count <= num; count ++)
{
resp *= count;
printf("%d ", count);
}
printf("= %d \n", resp);
return 0;
} |
// Copyright 2008 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.services;
import org.apache.tapestry5.dom.Document;
/**
* Interface used to programatically render a page, forming a {@link org.apache.tapestry5.dom.Document} which can then
* be manipulated or {@linkplain org.apache.tapestry5.dom.Document#toMarkup(java.io.PrintWriter) streamed to a
* PrintWriter}.
*/
public interface PageDocumentGenerator
{
/**
* Renders the page.
*/
Document render(String logicalPageName);
}
|
import json
import time
import pytest
from netdumplings import Dumpling, DumplingChef, DumplingDriver
from netdumplings.exceptions import InvalidDumpling, InvalidDumplingPayload
@pytest.fixture
def mock_kitchen(mocker):
kitchen = mocker.Mock()
kitchen.name = 'TestKitchen'
kitchen.interface = 'en0'
kitchen.filter = 'tcp'
kitchen.chef_poke_interval = 5
return kitchen
@pytest.fixture
def mock_chef(mocker, mock_kitchen):
chef = mocker.Mock()
chef.name = 'TestChef'
chef.kitchen = mock_kitchen
return chef
@pytest.fixture
def mock_time(mocker):
return mocker.patch.object(time, 'time', return_value=time.time())
@pytest.fixture(scope='function')
def dumpling_dict():
return {
'metadata': {
'chef': 'PacketCountChef',
'creation_time': 1515990765.925951,
'driver': 'interval',
'kitchen': 'default_kitchen',
},
'payload': {
'packet_counts': {
'Ethernet': 1426745,
'IP': 1423352,
'TCP': 12382,
'UDP': 1413268,
},
},
}
class TestDumpling:
"""
Test the Dumpling class.
"""
def test_init_with_chef_instance(self, mock_chef, mock_kitchen, mock_time):
"""
Test initialization of a Dumpling.
"""
dumpling = Dumpling(chef=mock_chef, payload=None)
assert dumpling.chef is mock_chef
assert dumpling.chef_name == 'TestChef'
assert dumpling.kitchen is mock_kitchen
assert dumpling.driver is DumplingDriver.packet
assert dumpling.creation_time == mock_time.return_value
assert dumpling.payload is None
def test_init_with_chef_string(self, mock_time):
"""
Test initialization of a Dumpling with a chef string.
"""
dumpling = Dumpling(chef='test_chef', payload=None)
assert dumpling.chef == 'test_chef'
assert dumpling.chef_name == 'test_chef'
assert dumpling.kitchen is None
assert dumpling.driver is DumplingDriver.packet
assert dumpling.creation_time == mock_time.return_value
assert dumpling.payload is None
def test_metadata(self, mocker, mock_chef, mock_time):
"""
Test metadata keys in the to_json() result.
"""
dumpling = Dumpling(chef=mock_chef, payload=None)
data = json.loads(dumpling.to_json())
metadata = data['metadata']
assert metadata['chef'] == mock_chef.name
assert metadata['kitchen'] == mock_chef.kitchen.name
assert metadata['driver'] == 'packet'
assert metadata['creation_time'] == mock_time.return_value
def test_to_json_payload_string(self, mock_chef):
"""
Test string payload in the to_json() result.
"""
dumpling = Dumpling(chef=mock_chef, payload='test payload')
data = json.loads(dumpling.to_json())
assert data['payload'] == 'test payload'
def test_to_json_payload_list(self, mock_chef):
"""
Test list payload in the to_json() result.
"""
test_payload = list(range(10))
dumpling = Dumpling(chef=mock_chef, payload=test_payload)
data = json.loads(dumpling.to_json())
assert data['payload'] == test_payload
def test_to_json_payload_dict(self, mock_chef):
"""
Test dict payload in the to_json() result.
"""
test_payload = {
'one': 1,
'two': [1, 2, 3],
'three': {
'four': 5,
}
}
dumpling = Dumpling(chef=mock_chef, payload=test_payload)
data = json.loads(dumpling.to_json())
assert data['payload'] == test_payload
def test_unserializable_payload(self, mock_chef):
"""
Test an unserializable Dumpling payload.
"""
dumpling = Dumpling(chef=mock_chef, payload=lambda x: x)
with pytest.raises(InvalidDumplingPayload):
dumpling.to_json()
def test_from_json_valid(self, dumpling_dict):
"""
Test creating a dumpling from valid input JSON.
"""
dumpling = Dumpling.from_json(json.dumps(dumpling_dict))
assert dumpling.chef == 'PacketCountChef'
assert dumpling.creation_time == 1515990765.925951
assert dumpling.driver is DumplingDriver.interval
assert dumpling.kitchen == 'default_kitchen'
assert dumpling.payload == dumpling_dict['payload']
def test_from_json_invalid(self, dumpling_dict):
"""
Test creating a dumpling from invalid input JSON. Should raise
InvalidDumpling.
"""
dumpling_dict['payload']['invalid'] = self
with pytest.raises(InvalidDumpling):
Dumpling.from_json(dumpling_dict)
def test_repr(self, mock_time, dumpling_dict):
"""
Test the string representation.
"""
dumpling = Dumpling(chef='test_chef', payload=None)
assert repr(dumpling) == (
"Dumpling(chef='test_chef', "
'driver=DumplingDriver.packet, '
'creation_time={}, '
'payload=None)'.format(mock_time.return_value)
)
chef = DumplingChef()
chef_repr = repr(chef)
dumpling = Dumpling(
chef=chef,
driver=DumplingDriver.interval,
payload='test_payload'
)
assert repr(dumpling) == (
'Dumpling(chef={}, '
'driver=DumplingDriver.interval, '
'creation_time={}, '
'payload=<str>)'.format(chef_repr, mock_time.return_value)
)
dumpling = Dumpling(
chef=chef,
driver=DumplingDriver.interval,
payload=[1, 2, 3]
)
assert repr(dumpling) == (
'Dumpling(chef={}, '
'driver=DumplingDriver.interval, '
'creation_time={}, '
'payload=<list>)'.format(chef_repr, mock_time.return_value)
)
dumpling = Dumpling(
chef=chef,
driver=DumplingDriver.interval,
payload={'test': 10}
)
assert repr(dumpling) == (
'Dumpling(chef={}, '
'driver=DumplingDriver.interval, '
'creation_time={}, '
'payload=<dict>)'.format(chef_repr, mock_time.return_value)
)
dumpling = Dumpling.from_json(json.dumps(dumpling_dict))
assert repr(dumpling) == (
"Dumpling(chef='PacketCountChef', "
'driver=DumplingDriver.interval, '
'creation_time=1515990765.925951, '
'payload=<dict>)'
)
|
#!/bin/sh
for i in 0 1 2 3 4
do
python -m neuralparticles.tools.show_detail_csv src 2D_data/tmp/2018-08-27_14-13-57/eval/eval_patch_src_e000_d00${i}_t000.csv res 2D_data/tmp/2018-08-27_14-13-57/eval/eval_patch_res_e002_d00${i}_t000.csv idx $1 out details_d/detail_i%04d_%s_${i}
done |
import { Component } from '@angular/core';
import { NbDialogService } from '@nebular/theme';
import { PriceBookService } from '../../../@core/data/pricebook.service';
import { CreateLineItemComponent } from '../create-pricebook/line-item/line-item.component';
import { CreateLineItemGroupComponent } from '../create-pricebook/group/group.component';
@Component({
selector: 'ngx-header-pricebook',
styleUrls: ['./header.component.scss'],
templateUrl: './header.component.html',
})
export class HeaderPriceBookComponent {
constructor(
private dialogService: NbDialogService,
public pricebookService: PriceBookService,
) { }
createLineItem() {
this.dialogService.open(CreateLineItemComponent, {
context: {
title: 'Create Line Item',
},
closeOnBackdropClick: false,
});
}
createGroup() {
this.dialogService.open(CreateLineItemGroupComponent, {
context: {
title: 'Create Line Item Group',
},
closeOnBackdropClick: false,
});
}
}
|
<reponame>Mdamman/APP_MakeTheChange
import { Component } from "@angular/core";
import { SplashScreen } from "@capacitor/splash-screen";
import { SeoService } from "./utils/seo/seo.service";
import { TranslateService, LangChangeEvent } from "@ngx-translate/core";
import { HistoryHelperService } from "./utils/history-helper.service";
import { DonateService } from "./donate/donate.service";
import { AngularFireAuth } from "@angular/fire/auth";
import firebase from "firebase/app";
import { first } from "rxjs/operators";
import { MenuController } from "@ionic/angular";
import { Router } from "@angular/router";
@Component({
selector: "app-root",
templateUrl: "app.component.html",
styleUrls: [
"./side-menu/styles/side-menu.scss",
"./side-menu/styles/side-menu.shell.scss",
"./side-menu/styles/side-menu.responsive.scss",
],
})
export class AppComponent {
appPages = [
{ title: "Accueil", url: "/app/user/profil", ionicIcon: "home-outline" },
{
title: "Tableau de bord",
url: "/app/user/friends",
customIcon: "./assets/custom-icons/side-menu/contact-card.svg",
},
{
title: "Contreparties",
url: "/app/categories",
ionicIcon: "bag-handle-outline",
},
{
title: "Blog",
url: "/posts",
ionicIcon: "reader-outline",
},
{
title: "Notifications",
url: "/app/notifications",
ionicIcon: "notifications-outline",
},
{
title: "Dons",
url: "/donation-list",
ionicIcon: "people-outline",
},
{
title: "Contactez-nous",
url: "/contact-us",
ionicIcon: "mail-open-outline",
},
];
accountPages = [
{
title: "Log In",
url: "/auth/login",
ionicIcon: "log-in-outline",
},
{
title: "Sign Up",
url: "/auth/signup",
ionicIcon: "person-add-outline",
},
{
title: "Tutorial",
url: "/walkthrough",
ionicIcon: "school-outline",
},
{
title: "Getting Started",
url: "/getting-started",
ionicIcon: "rocket-outline",
},
{
title: "404 page",
url: "/page-not-found",
ionicIcon: "alert-circle-outline",
},
];
textDir = "ltr";
profile: any;
currentUser: firebase.User;
//stats
totalAmount: number = 0;
totalContributors: number = 0;
projects: any[] = [];
// Inject HistoryHelperService in the app.components.ts so its available app-wide
constructor(
public translate: TranslateService,
public historyHelper: HistoryHelperService,
private seoService: SeoService,
private firestore: DonateService,
private angularFire: AngularFireAuth,
private menu: MenuController,
private route: Router
) {
this.initializeApp();
this.setLanguage();
this.angularFire.onAuthStateChanged((user) => {
if (user) {
this.currentUser = user;
this.fetchProfile();
this.loadDonations();
} else {
this.currentUser = null;
}
});
}
async fetchProfile() {
this.profile = await this.firestore
.getDocument("users", this.currentUser.uid)
.pipe(first())
.toPromise();
}
loadDonations() {
return this.firestore.getCollection("donations").subscribe((data: any) => {
this.totalAmount = 0;
this.totalContributors = 0;
data
.filter((ele) => {
return ele.userId === this.currentUser.email;
})
.forEach((element) => {
this.totalAmount += element.amount;
// this.totalContributors += 1;
if (!this.projects.includes(element.projectId))
this.projects.push(element.projectId);
});
});
}
async initializeApp() {
try {
await SplashScreen.hide();
} catch (err) {
console.log("This is normal in a browser", err);
}
}
setLanguage() {
// this language will be used as a fallback when a translation isn't found in the current language
this.translate.setDefaultLang("en");
// the lang to use, if the lang isn't available, it will use the current loader to get them
this.translate.use("en");
// this is to determine the text direction depending on the selected language
// for the purpose of this example we determine that only arabic and hebrew are RTL.
// this.translate.onLangChange.subscribe((event: LangChangeEvent) => {
// this.textDir = (event.lang === 'ar' || event.lang === 'iw') ? 'rtl' : 'ltr';
// });
}
openProfileEdit() {
this.route.navigate(["firebase/auth/profile"]);
this.menu.toggle();
}
}
|
<filename>tests/lsshipper/test_connection.py
import pytest
import asyncio
from lsshipper.common.state import State
from lsshipper.connection import logstash_connection
import logging
logging.basicConfig(level=logging.DEBUG)
@pytest.mark.asyncio
async def test_read_common_file(event_loop, unused_tcp_port):
"""transfer without disconnect"""
test = list()
state = State(event_loop)
done = asyncio.Event()
async def handler(reader, writer):
while True:
try:
line = await reader.readline()
except ConnectionError:
break
if not line or line == b'':
break
test.append(line.decode())
done.set()
writer.close()
port = unused_tcp_port
config = {}
config['connection'] = {}
config['ssl'] = {}
config['connection']['host'] = '127.0.0.1'
config['connection']['port'] = port
config['ssl']['enable'] = False
queue = asyncio.Queue(loop=event_loop)
test_messages = list()
for i in range(100):
m = "Message {}\n".format(i)
test_messages.append(m)
await queue.put(m)
server = await asyncio.start_server(
handler, host='127.0.0.1', port=port, loop=event_loop)
client = asyncio.ensure_future(logstash_connection(
queue, state, event_loop, config), )
await queue.join()
state.shutdown()
await done.wait()
await client
assert test == test_messages
server.close()
await server.wait_closed()
|
########################################################################
# Copyright 2021, UChicago Argonne, LLC
#
# Licensed under the BSD-3 License (the "License"); you may not use
# this file except in compliance with the License. You may obtain a
# copy of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
########################################################################
"""
date: 2021-11-02
author: matz
Test the correlations
"""
########################################################################
import os
import pandas as pd
import numpy as np
import pytest
from tests.conftest import activate_rodded_region
import dassh
from dassh.correlations import (friction_ctd,
friction_cts,
friction_nov,
# friction_eng,
friction_reh,
friction_uctd)
from dassh.correlations import (flowsplit_mit,
flowsplit_nov,
flowsplit_ctd,
flowsplit_uctd)
from dassh.correlations import (mixing_ctd,
mixing_mit)
def make_assembly(n_ring, pin_pitch, pin_diameter, clad_thickness,
wire_pitch, wire_diameter, duct_ftf, coolant_obj,
duct_obj, inlet_temp, inlet_flow_rate,
corr_friction='CTD', corr_flowsplit='CTD',
corr_mixing='CTD', se2geo=False):
m = {'coolant': coolant_obj, 'duct': duct_obj}
rr = dassh.RoddedRegion('fuel', n_ring, pin_pitch, pin_diameter,
wire_pitch, wire_diameter, clad_thickness,
duct_ftf, inlet_flow_rate,
m['coolant'], m['duct'],
htc_params_duct=None,
corr_friction=corr_friction,
corr_flowsplit=corr_flowsplit,
corr_mixing=corr_mixing,
corr_nusselt='DB',
se2=se2geo)
return activate_rodded_region(rr, inlet_temp)
########################################################################
# TEST WARNINGS
########################################################################
def test_correlation_warnings(caplog):
"""Test that attempting to use correlations for assemblies that
are outside the acceptable ranges raises warnings"""
# Some standard parameters for assembly instantiation (some will
# be changed to assess that the proper warnings are raised)
n_ring = 2
clad_thickness = 0.5 / 1e3
wire_diameter = 1.094 / 1e3 # mm -> m
duct_ftf = [0.11154, 0.11757] # m
p2d = 1.6
h2d = 3.0
pin_diameter = ((duct_ftf[0] - 2 * wire_diameter)
/ (np.sqrt(3) * (n_ring - 1) * p2d + 1))
pin_pitch = pin_diameter * p2d
wire_pitch = pin_diameter * h2d
inlet_flow_rate = 30.0 # kg /s
inlet_temp = 273.15 + 350.0 # K
coolant_obj = dassh.Material('sodium')
duct_obj = dassh.Material('ss316')
a = make_assembly(n_ring, pin_pitch, pin_diameter, clad_thickness,
wire_pitch, wire_diameter, duct_ftf, coolant_obj,
duct_obj, inlet_temp, inlet_flow_rate,
corr_friction='CTD')
assert all([param in caplog.text for param in
['pin pitch to diameter ratio',
'wire-pitch to pin-diameter ratio',
'number of rods in bundle']])
########################################################################
# FRICTION FACTOR TESTS
########################################################################
def test_nov_sample_problem():
"""Test the sample problem given in the Novendstern correlation paper;
parameters are all calculated and shown in the paper, just using them
to demonstrate that I get the same result with the implemented corr."""
# Dummy class to mock DASSH Coolant, Subchannel, RegionRodded objects
class Dummy(object):
def __init__(self, **kwargs):
for k in kwargs.keys():
setattr(self, k, kwargs[k])
# Dummy Coolant object
coolant_properties = {
'viscosity': 0.677 * 0.00041337887, # lb/hrft --> kg/m-s
'density': 53.5 * 16.0185} # lb/ft3 --> kg/m3
coolant = Dummy(**coolant_properties)
# Dummy Subchannel object
subchannel = Dummy(**{'n_sc': {'coolant': {'interior': 384,
'edge': 48,
'corner': 6,
'total': 438}}})
# Dummy Region object
fftf = {
'n_ring': 9,
'n_pin': 217,
'duct_ftf': [[4.335 * 2.54 / 100, 4.835 * 2.54 / 100]],
'pin_diameter': 0.23 * 2.54 / 100,
'pin_pitch': 0.2879 * 2.54 / 100,
'wire_diameter': 0.056 * 2.54 / 100,
'wire_pitch': 12 * 2.54 / 100,
'coolant': coolant,
'subchannel': subchannel,
'params': {'area': np.array([0.0139 * 2.54 * 2.54 / 100 / 100,
0.0278 * 2.54 * 2.54 / 100 / 100,
0.0099 * 2.54 * 2.54 / 100 / 100]),
'de': np.array([0.124 * 2.54 / 100,
0.151 * 2.54 / 100,
0.114 * 2.54 / 100])},
'bundle_params': {'area': 6.724 * 2.54 * 2.54 / 100 / 100,
'de': 0.128 * 2.54 / 100},
'int_flow_rate': 183000 * 0.000125998 # lb/hr --> kg/s
}
asm = Dummy(**fftf)
# Calculate the necessary coolant flow parameters: velocity, Re; then
# assign to the dummy assembly
v_tot = asm.int_flow_rate / asm.coolant.density / asm.bundle_params['area']
Re = (asm.coolant.density
* v_tot
* asm.bundle_params['de']
/ asm.coolant.viscosity)
asm.coolant_int_params = {'Re': Re, 'vel': v_tot}
# Calculate friction factor, use to determine pressure drop / L
ff = dassh.correlations.friction_nov.calculate_bundle_friction_factor(asm)
dp = ff * asm.coolant.density * v_tot**2 / 2 / asm.bundle_params['de']
ans = 4.64 * 6894.76 / 0.3048
diff = ans - dp
rel_diff = diff / ans
assert rel_diff < 0.002
def test_ctd_intermittency_factor(thesis_asm_rr):
"""Test the calculation of intermittency factor used in the
Cheng-Todreas friction and flow split correlations in the
transition regime
"""
print(dir(thesis_asm_rr))
test_asm = thesis_asm_rr.clone()
test_asm._update_coolant_int_params(300.15)
# Intermittency factor should be greater than 1 if turbulent
Re_bnds = friction_ctd.calculate_Re_bounds(test_asm)
assert test_asm.coolant_int_params['Re'] > Re_bnds[1] # chk trblnce
x = friction_ctd.calc_intermittency_factor(test_asm, Re_bnds[0],
Re_bnds[1])
assert x > 1
# Intermittency factor should be less than 1 if transition
test_asm.coolant_int_params['Re'] = Re_bnds[1] - 2000.0
assert test_asm.coolant_int_params['Re'] > Re_bnds[0] # chk laminar
x = friction_ctd.calc_intermittency_factor(test_asm, Re_bnds[0],
Re_bnds[1])
assert 0 < x < 1
def test_ctd_laminar_cfb(testdir):
"""Test the Cheng-Todreas Detailed (1986) correlations against
data published in the 1986 paper for calculation of the bundle
average friction factor constant for laminar flow.
Notes
-----
The tolerance for this test is set rather high (6%) because the
paper does not provide the exact set of parameters required to
calculated the friction factor constant. The constant is very
sensitive to the inner duct flat-to-flat distance, but this value
is not given in the paper. Without knowing the exact value Cheng-
Todreas used to calculate the results tabulated in their paper,
this is the closest we can get.
"""
df = pd.read_csv(os.path.join(testdir, 'test_data',
'cfb_laminar.csv'), header=0)
# Will only print if the test fails
print('{:15s} {:>5s} {:>8s} {:>8s} {:>8s}'
.format('Experiment', 'Year', 'Result', 'CTD', 'Rel err.'))
abs_err = np.zeros(len(df))
rel_err = np.zeros(len(df))
for exp in df.index:
# Pull assembly geometry information from table
n_ring = df.loc[exp]['Rings']
wire_pitch = df.loc[exp]['H']
pin_diameter = df.loc[exp]['D']
pin_pitch = pin_diameter * df.loc[exp]['P/D'] # m
clad_thickness = 0.5 / 1e3 # m
wire_diameter = df.loc[exp]['D_wire']
duct_inner_ftf = df.loc[exp]['Duct FTF']
duct_ftf = [duct_inner_ftf, duct_inner_ftf + 0.001] # m
inlet_flow_rate = 0.5 # kg /s
inlet_temp = 273.15
coolant_obj = dassh.Material('water')
duct_obj = dassh.Material('ss316')
try:
a = make_assembly(n_ring, pin_pitch, pin_diameter,
clad_thickness, wire_pitch,
wire_diameter, duct_ftf, coolant_obj,
duct_obj, inlet_temp, inlet_flow_rate)
except:
raise
pytest.xfail("Failure in DASSH Assembly instantiation "
"(should raise another error elsewhere in "
"the tests)")
res = (friction_ctd
.calculate_bundle_friction_factor_const(a)['laminar'])
abs_err[exp] = res - df.loc[exp]['CTD']
rel_err[exp] = abs_err[exp] / df.loc[exp]['CTD']
# Only print if assertion fails
print('{:<2d} {:12s} {:5d} {:8.2f} {:8.2f} {:8.2f}'
.format(exp, df.loc[exp]['Investigators'],
df.loc[exp]['Year'], res, df.loc[exp]['CTD'],
100 * rel_err[exp]))
print(max(np.abs(abs_err)))
print(max(np.abs(rel_err)))
assert max(np.abs(abs_err)) < 0.35 # I made up this tolerance
assert max(np.abs(rel_err)) < 0.005
def test_cts_laminar_cfb(testdir):
"""Test the Cheng-Todreas Simple (1986) correlations against
data published in the 1986 paper for calculation of the bundle
average friction factor constant for laminar flow.
Notes
-----
Because the simple correlation only requires a few parameters,
the tolerance for this test (1%) is much lower than that for
the analogous Cheng-Todreas Detailed test.
"""
df = pd.read_csv(os.path.join(testdir, 'test_data',
'cfb_laminar.csv'), header=0)
# Will only print if the test fails
print('{:15s} {:>5s} {:>8s} {:>8s} {:>8s}'
.format('Experiment', 'Year', 'Result', 'CTS', 'Rel err.'))
abs_err = np.zeros(len(df))
rel_err = np.zeros(len(df))
for exp in df.index:
# Pull assembly geometry information from table
n_ring = df.loc[exp]['Rings']
wire_pitch = df.loc[exp]['H']
pin_diameter = df.loc[exp]['D']
pin_pitch = pin_diameter * df.loc[exp]['P/D'] # m
clad_thickness = 0.5 / 1e3 # m
wire_diameter = df.loc[exp]['D_wire']
duct_inner_ftf = df.loc[exp]['Duct FTF'] # + 0.0001
duct_ftf = [duct_inner_ftf, duct_inner_ftf + 0.001] # m
inlet_flow_rate = 0.5 # kg /s
inlet_temp = 273.15
coolant_obj = dassh.Material('water')
duct_obj = dassh.Material('ss316')
try:
a = make_assembly(n_ring, pin_pitch, pin_diameter,
clad_thickness, wire_pitch,
wire_diameter, duct_ftf, coolant_obj,
duct_obj, inlet_temp, inlet_flow_rate)
except:
pytest.xfail("Failure in DASSH Assembly instantiation "
"(should raise another error elsewhere in "
"the tests)")
res = (friction_cts
.calculate_bundle_friction_factor_const(a)['laminar'])
abs_err[exp] = res - df.loc[exp]['CTS']
rel_err[exp] = abs_err[exp] / df.loc[exp]['CTS']
# Only print if assertion fails
print('{:<2d} {:12s} {:5d} {:8.2f} {:8.2f} {:8.2f}'
.format(exp, df.loc[exp]['Investigators'],
df.loc[exp]['Year'], res, df.loc[exp]['CTS'],
100 * rel_err[exp]))
print(max(np.abs(abs_err)))
print(max(np.abs(rel_err)))
assert max(np.abs(abs_err)) < 0.75
assert max(np.abs(rel_err)) < 0.01
def test_ctd_turbulent_cfb(testdir):
"""Test the Cheng-Todreas Detailed (1986) correlations against
data published in the 1986 paper for calculation of the bundle
average friction factor constant for laminar flow.
Notes
-----
I (<NAME>) could not find any tabulated data, so I visually
assessed the data by hand from Figure 13 of the Cheng-Todreas
paper (1986). The pin bundle characteristics for the Marten
data came from the description in Table 6 of the same paper.
The characteristics for the Rehme data come from Table 1 of
Chen et al (2014).
"""
df = pd.read_csv(os.path.join(testdir, 'test_data',
'cfb_turbulent.csv'), header=0)
# Will only print if the test fails
print('{:15s} {:>5s} {:>8s} {:>8s} {:>8s}'
.format('Experiment', 'Year', 'Result', 'CTD', 'Rel. err'))
abs_err = np.zeros(len(df))
rel_err = np.zeros(len(df))
for exp in df.index:
# Pull assembly geometry information from table
n_ring = df.loc[exp]['Rings']
wire_pitch = df.loc[exp]['H']
pin_diameter = df.loc[exp]['D']
pin_pitch = pin_diameter * df.loc[exp]['P/D'] # m
clad_thickness = 0.5 / 1e3 # m
wire_diameter = df.loc[exp]['D_wire']
duct_inner_ftf = df.loc[exp]['Duct FTF'] + 1e-4
# None of this stuff matters - it's not used in the calculation
duct_ftf = [duct_inner_ftf, duct_inner_ftf + 0.001] # m
inlet_flow_rate = 30.0 # kg /s; enough to be turbulent
# inlet_temp = 298.15
inlet_temp = 273.15
coolant_obj = dassh.Material('water')
duct_obj = dassh.Material('ss316')
try:
a = make_assembly(n_ring, pin_pitch, pin_diameter,
clad_thickness, wire_pitch,
wire_diameter, duct_ftf, coolant_obj,
duct_obj, inlet_temp, inlet_flow_rate)
except:
pytest.xfail("Failure in DASSH Assembly instantiation "
"(should raise another error elsewhere in "
"the tests)")
res = (friction_ctd.calculate_bundle_friction_factor_const(a)
['turbulent'])
abs_err[exp] = res - df.loc[exp]['CTD']
rel_err[exp] = abs_err[exp] / df.loc[exp]['CTD']
# Only print if assertion fails
print('{:<2d} {:12s} {:5d} {:8.5f} {:8.3f} {:8.2f}'
.format(exp, df.loc[exp]['Investigators'],
df.loc[exp]['Year'], res, df.loc[exp]['CTD'],
100 * rel_err[exp]))
print(max(np.abs(abs_err)))
print(max(np.abs(rel_err)))
assert max(np.abs(abs_err)) < 0.025
assert max(np.abs(rel_err)) < 0.10
def test_thesis_asm_sc_friction_constants(thesis_asm_rr):
"""."""
# Individual subchannel friction factor constants
tmp = (friction_ctd
.calculate_subchannel_friction_factor_const(thesis_asm_rr))
ans = {'laminar': [79.78, 89.07, 114.97], # 103.54
'turbulent': [0.2273, 0.2416, 0.3526]} # 0.2456
for key in tmp.keys():
print('ans', key, ans[key])
print('result', key, tmp[key])
for cfi in range(len(tmp[key])):
err = (tmp[key][cfi] - ans[key][cfi]) / ans[key][cfi]
assert abs(100 * err) < 1.0
def test_thesis_asm_friction_constants(thesis_asm_rr):
"""."""
cfb_L = (friction_ctd
.calculate_bundle_friction_factor_const(thesis_asm_rr)
['laminar'])
error = (cfb_L - 81.11) / 81.11
print(cfb_L)
print(error)
assert abs(100 * error) < 1.0
cfb_T = (friction_ctd
.calculate_bundle_friction_factor_const(thesis_asm_rr)
['turbulent'])
error = (cfb_T - 0.2306) / 0.2306
print(cfb_T)
print(error)
assert abs(100 * error) < 2.0
def test_thesis_asm_friction_constants_exact(thesis_asm_rr):
"""."""
cf_sc = {}
cf_sc['laminar'] = [79.78, 89.07, 103.54]
cf_sc['turbulent'] = [0.2273, 0.2416, 0.2456]
cfb = friction_ctd._calc_cfb(thesis_asm_rr, cf_sc)
error_l = (81.11 - cfb['laminar']) / 81.11
assert abs(100 * error_l) < 0.01
error_t = (0.2306 - cfb['turbulent']) / 0.2306
assert abs(100 * error_t) < 0.01
def test_compare_ff_correlations_turbulent(textbook_active_rr):
"""Compare the friction factors obtained by different
correlations for turbulent flow
Notes
-----
This test compares the calculated friction factors to each
other, not to experimental data. In the future, another
test may utilize a dataset of experimental data to confirm
that the correlations achieve their advertised uncertainties.
"""
# tol = 10.0 # percent
# Will only print if the test fails
# print(textbook_asm.corr)
# print(textbook_asm.corr_names)
# print(textbook_asm.correlation_constants['fs'])
print('P/D ', (textbook_active_rr.pin_pitch
/ textbook_active_rr.pin_diameter))
print('H/D ', (textbook_active_rr.wire_pitch
/ textbook_active_rr.pin_diameter))
print('Re ', textbook_active_rr.coolant_int_params['Re'])
print('{:<6s} {:>6s} {:>6s}'.format('Corr.', 'ff', '% Diff'))
corr = [friction_ctd, friction_cts, friction_uctd,
friction_nov, friction_reh]
name = ['CTD', 'CTS', 'UCTD', 'NOV', 'REH']
res = np.zeros(len(corr))
abs_err = np.zeros(len(corr))
rel_err = np.zeros(len(corr))
for i in range(len(corr)):
textbook_active_rr._setup_correlations(name[i], 'CTD', 'CTD', 'DB')
textbook_active_rr._update_coolant_int_params(300.15)
res[i] = corr[i].calculate_bundle_friction_factor(textbook_active_rr)
abs_err[i] = (res[i] - res[0])
rel_err[i] = abs_err[i] / res[0]
print('{:<6s} {:6.5f} {:6.5f}'
.format(name[i], res[i], 100 * rel_err[i]))
print('Expect similar results from all correlations')
print('Max abs. err relative to CTD: ', max(abs(abs_err)))
print('Max rel. err relative to CTD: ', max(abs(rel_err)))
assert max(np.abs(abs_err)) < 0.001
assert max(np.abs(rel_err)) < 0.05
########################################################################
# FLOW SPLIT
########################################################################
def test_flow_split(textbook_active_rr):
"""Test the relative magnitude of the flow split values given
by the four correlations"""
fs = [flowsplit_mit, flowsplit_nov, flowsplit_ctd, flowsplit_uctd]
res = np.zeros((len(fs), 3))
for i in range(len(fs)):
del textbook_active_rr.corr_constants['fs']
textbook_active_rr.corr_constants['fs'] = \
fs[i].calc_constants(textbook_active_rr)
res[i, :] = fs[i].calculate_flow_split(textbook_active_rr)
# In all cases, x1 < x2 and x2 >= x3
assert all([x[0] < x[1] for x in res])
assert all([x[1] >= x[2] for x in res])
# Make sure all have similar total velocity magnitude
vel = np.array([sum(res[i, :]) for i in range(len(res))])
assert np.std(vel) < 0.10 # arbitrary but seems relatively small
def test_flowsplit_x2_ctd(testdir):
"""Test the Cheng-Todreas Detailed (1986) flow split
correlations against data published in the 1986 paper
Notes
-----
I (<NAME>) could not find any tabulated data, so I visually
assessed the data by hand from Figure 14 of the Cheng-Todreas
paper (1986). Some of the pin bundle characteristics were known
from the friction factor tests.
"""
df = pd.read_csv(os.path.join(testdir, 'test_data',
'ctd_x2.csv'), header=0)
df = df.dropna()
flowrate = {'laminar': 0.5, 'turbulent': 30.0} # arbitary
abs_err = np.zeros(len(df))
rel_err = np.zeros(len(df))
# Will only print if the test fails
print('{:15s} {:>5s} {:>8s} {:>8s} {:>8s}'
.format('Experiment', 'Year', 'X2', 'CTD', 'Rel. err'))
idx = 0
for exp in df.index:
# Pull assembly geometry information from table
n_ring = int(df.loc[exp]['Rings'])
wire_pitch = df.loc[exp]['H']
pin_diameter = df.loc[exp]['D']
pin_pitch = pin_diameter * df.loc[exp]['P/D'] # m
clad_thickness = 0.5 / 1e3 # m
wire_diameter = df.loc[exp]['D_wire']
duct_inner_ftf = df.loc[exp]['Duct FTF'] + 1e-4
# None of this stuff matters - it's not used in the calculation
duct_ftf = [duct_inner_ftf, duct_inner_ftf + 0.001] # m
inlet_flow_rate = flowrate[df.loc[exp]['Regime']]
inlet_temp = 273.15
coolant_obj = dassh.Material('water')
duct_obj = dassh.Material('ss316')
try:
a = make_assembly(n_ring, pin_pitch, pin_diameter,
clad_thickness, wire_pitch,
wire_diameter, duct_ftf, coolant_obj,
duct_obj, inlet_temp, inlet_flow_rate)
except:
pytest.xfail("Failure in DASSH Assembly instantiation "
"(should raise another error elsewhere in "
"the tests)")
a._update_coolant_int_params(inlet_temp)
print(a.coolant_int_params['Re'], df.loc[exp]['Regime'])
res = flowsplit_ctd.calculate_flow_split(a)
abs_err[idx] = res[1] - df.loc[exp]['CTD']
rel_err[idx] = abs_err[idx] / df.loc[exp]['CTD']
# Only print if assertion fails
print('{:<2d} {:12s} {:5d} {:8.5f} {:8.3f} {:8.2f}'
.format(exp, df.loc[exp]['Investigators'],
df.loc[exp]['Year'], res[1], df.loc[exp]['CTD'],
100 * rel_err[idx]))
idx += 1
print('Max abs err: ', round(max(np.abs(abs_err)), 2))
print('Max rel err: ', round(max(np.abs(rel_err)), 2))
assert max(np.abs(abs_err)) < 0.02
assert max(np.abs(rel_err)) < 0.02
def test_ctd_transition_flowsplit(thesis_asm_rr):
"""Test the Cheng-Todreas Detailed (1986) flow split
correlations in the transition regime
Notes
-----
The transition flowsplit values should fall within the range
set by the laminar and turbulent flow split values. It should
approach the laminar/turbulent flow split values as the Reynolds
number approaches the laminar/turbulent regime.
"""
test_asm = thesis_asm_rr.clone()
flowsplit = {}
# Re bounds depend only on geometry
Re_bnds = friction_ctd.calculate_Re_bounds(test_asm)
print(Re_bnds)
# Turbulent - thesis_asm fixture comes in turbulent regime as-is
test_asm._update_coolant_int_params(300.15) # arbitrary water temp
print('turbulent ', test_asm.coolant_int_params['Re'])
assert test_asm.coolant_int_params['Re'] > Re_bnds[1] # turbulent
flowsplit['turbulent'] = test_asm.coolant_int_params['fs']
# Laminar - need to adjust flow rate and update params
test_asm.int_flow_rate = 0.1
test_asm._update_coolant_int_params(300.15)
print('laminar ', test_asm.coolant_int_params['Re'])
assert test_asm.coolant_int_params['Re'] < Re_bnds[0] # laminar
flowsplit['laminar'] = test_asm.coolant_int_params['fs']
# Transition
test_asm.int_flow_rate = 1.0
test_asm._update_coolant_int_params(300.15)
print('transition ', test_asm.coolant_int_params['Re'])
assert test_asm.coolant_int_params['Re'] > Re_bnds[0]
assert test_asm.coolant_int_params['Re'] < Re_bnds[1]
# flowsplit['transition'] = thesis_asm.coolant_int_params['fs']
# Compare transition with turbulent, laminar
for i in range(0, len(test_asm.coolant_int_params['fs'])):
bnds = [flowsplit['laminar'][i], flowsplit['turbulent'][i]]
assert min(bnds) < test_asm.coolant_int_params['fs'][i] < max(bnds)
# Make flow more turbulent, flow split should approach turbulent
fs1 = test_asm.coolant_int_params['fs']
test_asm.int_flow_rate = 6.0
test_asm._update_coolant_int_params(300.15)
print('transition 2', test_asm.coolant_int_params['Re'])
assert test_asm.coolant_int_params['Re'] > Re_bnds[0]
assert test_asm.coolant_int_params['Re'] < Re_bnds[1]
for i in range(0, len(test_asm.coolant_int_params['fs'])):
assert (abs(flowsplit['turbulent'][i] - fs1[i])
> abs(flowsplit['turbulent'][i]
- test_asm.coolant_int_params['fs'][i]))
########################################################################
# MIXING
########################################################################
def test_eddy_diffusivity_constants(testdir):
"""Test the Cheng-Todreas Detailed (1986) eddy diffusivity
constant (CmT) correlations against data published in the 1986
paper
Notes
-----
From Table 1 of the 1986 paper
"""
df = pd.read_csv(os.path.join(testdir, 'test_data',
'mp_turbulent.csv'), header=0)
df = df.dropna(subset=['CmT'])
abs_err = np.zeros(len(df))
rel_err = np.zeros(len(df))
# Will only print if the test fails
print('{:15s} {:>5s} {:>8s} {:>8s} {:>8s}'
.format('Experiment', 'Year', 'CmT', 'Data', 'Rel. err'))
idx = 0
for exp in df.index:
# Pull assembly geometry information from table
n_ring = int(df.loc[exp]['Rings'])
wire_pitch = df.loc[exp]['H']
pin_diameter = df.loc[exp]['D']
pin_pitch = pin_diameter * df.loc[exp]['P/D'] # m
clad_thickness = 0.5 / 1e3 # m
wire_diameter = df.loc[exp]['D_wire']
duct_inner_ftf = df.loc[exp]['Duct FTF']
# None of this stuff matters - it's not used in the calculation
duct_ftf = [duct_inner_ftf, duct_inner_ftf + 0.001] # m
inlet_flow_rate = 30.0 # enough to be turbulent
inlet_temp = 273.15
coolant_obj = dassh.Material('water')
duct_obj = dassh.Material('ss316')
try:
a = make_assembly(n_ring, pin_pitch, pin_diameter,
clad_thickness, wire_pitch,
wire_diameter, duct_ftf, coolant_obj,
duct_obj, inlet_temp, inlet_flow_rate)
except:
pytest.xfail("Failure in DASSH Assembly instantiation "
"(should raise another error elsewhere in "
"the tests)")
res = (mixing_ctd
.calculate_mixing_param_constants(a)[0]['turbulent'])
abs_err[idx] = res - df.loc[exp]['CmT']
rel_err[idx] = abs_err[idx] / df.loc[exp]['CmT']
# Only print if assertion fails
print('{:<2d} {:12s} {:5d} {:8.5f} {:8.3f} {:8.2f}'
.format(exp, df.loc[exp]['Investigators'],
df.loc[exp]['Year'], res, df.loc[exp]['CmT'],
100 * rel_err[idx]))
idx += 1
# print(round(max(np.abs(abs_err)), 2))
# print(round(max(np.abs(rel_err)), 2))
var = sum(np.array([x**2 for x in abs_err])) / len(abs_err)
assert np.sqrt(var) <= 0.20
def test_eddy_diffusivity(testdir):
"""Test the eddy diffusivity (epsilon) from the Cheng-Todreas
(1986) correlations against data published in the 1986 paper
Notes
-----
From Table 1 of the 1986 paper
"""
df = pd.read_csv(os.path.join(testdir, 'test_data',
'mp_turbulent.csv'), header=0)
df = df.dropna(subset=['eps']) # , 'Duct FTF'])
# Drop the second Chiu experiment - the value for eddy diffusivity
# must be wrong - see Figure 5.17 (page 241) of Cheng's MIT thesis
df = df[(df['Investigators'] != 'Chiu') & (df['H/D'] != 8)]
for mp in [mixing_ctd]:
abs_err = np.zeros(len(df))
rel_err = np.zeros(len(df))
# Will only print if the test fails
print('{:15s} {:>5s} {:11s} {:>8s} {:>8s} {:>8s}'
.format('Experiment', 'Year', 'Model',
'Eps', 'Data', 'Rel. err'))
idx = 0
for exp in df.index:
# Pull assembly geometry information from table
n_ring = int(df.loc[exp]['Rings'])
wire_pitch = df.loc[exp]['H']
pin_diameter = df.loc[exp]['D']
pin_pitch = pin_diameter * df.loc[exp]['P/D'] # m
clad_thickness = 0.5 / 1e3 # m
wire_diameter = df.loc[exp]['D_wire']
duct_inner_ftf = df.loc[exp]['Duct FTF']
# None of this stuff matters - it's not used in the calculation
duct_ftf = [duct_inner_ftf, duct_inner_ftf + 0.001] # m
inlet_flow_rate = 100.0 # enough to be turbulent
inlet_temp = 273.15
coolant_obj = dassh.Material('water')
duct_obj = dassh.Material('ss316')
try:
a = make_assembly(n_ring, pin_pitch, pin_diameter,
clad_thickness, wire_pitch,
wire_diameter, duct_ftf, coolant_obj,
duct_obj, inlet_temp, inlet_flow_rate)
except:
pytest.xfail("Failure in DASSH Assembly instantiation "
"(should raise another error elsewhere in "
"the tests)")
# Force turbulence
a.coolant_int_params['Re'] = 2e4
# Calculate eddy diffusivity
res = mp.calculate_mixing_params(a)[0]
# Eddy diffusivity is scaled in module by L[0][0]; need to
# UNDO because here we're comparing the dimensionless value
res /= a.L[0][0]
abs_err[idx] = res - df.loc[exp]['eps']
rel_err[idx] = abs_err[idx] / df.loc[exp]['eps']
# Only print if assertion fails
print('{:<2d} {:12s} {:5d} {:12s} {:8.5f} {:8.3f} {:8.2f}'
.format(exp, df.loc[exp]['Investigators'],
df.loc[exp]['Year'],
mp.__name__.split('.')[-1], res,
df.loc[exp]['eps'], 100 * rel_err[idx]))
idx += 1
# print(round(max(np.abs(abs_err)), 2))
# print(round(max(np.abs(rel_err)), 2))
var = np.array([x**2 for x in abs_err])
var = sum(var) / len(var)
print(np.sqrt(var))
assert np.sqrt(var) <= 0.01
# demonstrated fit (Cheng 1986 fig 17) is +/- 25%, but some
# exceed that or are not shown, so 35% is used here (yikes!)
assert max(rel_err) < 0.35
def test_mit_eddy_diffusivity(testdir):
"""Test the eddy diffusivity (epsilon) from the MIT (1978)
correlations against data published in the 1980 report
comparing the ENERGY and SIMPLE correlations.
Notes
-----
From Tables 1, 2 of the 1980 report
Three assemblies: fuel, blanket, intermediate
"""
# n_pin = np.array([217, 61, 37])
n_ring = np.array([9, 5, 4])
d_pin = np.array([0.230, 0.506, 0.501]) * 0.0254 # in -> m
d_wire = np.array([0.056, 0.033, 0.075]) * 0.0254 # in -> m
pitch = np.array([0.288, 0.542, 0.578]) * 0.0254 # in -> m
clad_thickness = 0.5 / 1e3 # m
lead = np.array([11.9, 4.00, 10.5]) * 0.0254 # in -> m
ans = np.array([0.0288, 0.448, 0.100])
abs_err = np.zeros(len(ans))
rel_err = np.zeros(len(ans))
for i in range(3):
# ftf distance doesn't, matter, just need one that's big enough
duct_inner_ftf = (np.sqrt(3) * (n_ring[i] - 1) * pitch[i]
+ d_pin[i] + 2 * d_wire[i] + 0.002)
duct_ftf = [duct_inner_ftf, duct_inner_ftf + 0.001] # m
inlet_flow_rate = 100.0 # enough to be turbulent
inlet_temp = 273.15
coolant_obj = dassh.Material('water')
duct_obj = dassh.Material('ss316')
try:
a = make_assembly(n_ring[i], pitch[i], d_pin[i],
clad_thickness, lead[i], d_wire[i],
duct_ftf, coolant_obj, duct_obj,
inlet_temp, inlet_flow_rate,
corr_mixing='MIT', se2geo=True)
except:
pytest.xfail("Failure in DASSH Assembly instantiation "
"(should raise another error elsewhere in "
"the tests)")
# Force turbulence
a.coolant_int_params['Re'] = 2e4
# Calculate eddy diffusivity
res = mixing_mit.calculate_mixing_params(a)[0]
# print(res)
# print(a.corr_names['mix'])
# Eddy diffusivity is scaled in module by hydraulic diameter
# of interior subchannel; need to UNDO because here we're
# comparing the dimensionless value
res /= a.params['de'][0]
abs_err[i] = res - ans[i]
rel_err[i] = abs_err[i] / ans[i]
print(ans[i], res, abs_err[i], rel_err[i])
var = np.array([x**2 for x in abs_err])
var = sum(var) / len(var)
print('std err: ', np.sqrt(var))
assert np.sqrt(var) <= 0.02
print('max rel err: ', max(abs(rel_err)))
assert max(abs(rel_err)) < 0.065
def test_swirl_velocity_constants(testdir):
"""Test the swirl velocity constant (CsT) from the
Cheng-Todreas (1986) correlations against data published
in the 1986 paper
Notes
-----
From Table 2 of the 1986 paper
"""
df = pd.read_csv(os.path.join(testdir, 'test_data',
'mp_turbulent.csv'), header=0)
df = df.dropna(subset=['CsT'])
abs_err = np.zeros(len(df))
rel_err = np.zeros(len(df))
# Will only print if the test fails
print('{:15s} {:>5s} {:>8s} {:>8s} {:>8s}'
.format('Experiment', 'Year', 'CsT', 'Data', 'Rel. err'))
idx = 0
for exp in df.index:
# Pull assembly geometry information from table
n_ring = int(df.loc[exp]['Rings'])
wire_pitch = df.loc[exp]['H']
pin_diameter = df.loc[exp]['D']
pin_pitch = pin_diameter * df.loc[exp]['P/D'] # m
clad_thickness = 0.5 / 1e3 # m
wire_diameter = df.loc[exp]['D_wire']
duct_inner_ftf = df.loc[exp]['Duct FTF']
# None of this stuff matters - it's not used in the calculation
duct_ftf = [duct_inner_ftf, duct_inner_ftf + 0.001] # m
inlet_flow_rate = 30.0 # enough to be turbulent
inlet_temp = 273.15
coolant_obj = dassh.Material('water')
duct_obj = dassh.Material('ss316')
try:
a = make_assembly(n_ring, pin_pitch, pin_diameter,
clad_thickness, wire_pitch,
wire_diameter, duct_ftf, coolant_obj,
duct_obj, inlet_temp, inlet_flow_rate)
except:
pytest.xfail("Failure in DASSH Assembly instantiation "
"(should raise another error elsewhere in "
"the tests)")
a.coolant_int_params['Re'] = 2e4
print()
print(a.edge_pitch)
res = mixing_ctd.calculate_mixing_param_constants(a)[1]
res = res['turbulent']
abs_err[idx] = res - df.loc[exp]['CsT']
rel_err[idx] = abs_err[idx] / df.loc[exp]['CsT']
# Only print if assertion fails
print('{:<2d} {:12s} {:5d} {:8.5f} {:8.3f} {:8.2f}'
.format(exp, df.loc[exp]['Investigators'],
df.loc[exp]['Year'], res,
df.loc[exp]['CsT'], 100 * rel_err[idx]))
idx += 1
# print(round(max(np.abs(abs_err)), 2))
# print(round(max(np.abs(rel_err)), 2))
var = np.array([x**2 for x in abs_err])
var = sum(var) / len(abs_err)
print('Standard error: ', np.sqrt(var))
assert np.sqrt(var) <= 0.30
assert all(x < 0.30 for x in rel_err)
def test_swirl_velocity(testdir):
"""Test the swirl velocity (C1L) from the MIT (1978) and
Cheng-Todreas (1986) correlations against data published
in the 1986 paper
Notes
-----
From Table 2 of the 1986 paper
"""
df = pd.read_csv(os.path.join(testdir, 'test_data',
'mp_turbulent.csv'), header=0)
df = df.dropna(subset=['C1L']) # , 'Duct FTF'])
corr_name = ['CTD', 'MIT']
corr = [mixing_ctd, mixing_mit]
for mpi in range(2):
mp = corr[mpi]
abs_err = np.zeros(len(df))
rel_err = np.zeros(len(df))
# Will only print if the test fails
print('{:15s} {:>5s} {:11s} {:>8s} {:>8s} {:>8s}'
.format('Experiment', 'Year', 'Model',
'CsT', 'Data', 'Rel. err'))
idx = 0
for exp in df.index:
# Pull assembly geometry information from table
n_ring = int(df.loc[exp]['Rings'])
wire_pitch = df.loc[exp]['H']
pin_diameter = df.loc[exp]['D']
pin_pitch = pin_diameter * df.loc[exp]['P/D'] # m
clad_thickness = 0.5 / 1e3 # m
wire_diameter = df.loc[exp]['D_wire']
duct_inner_ftf = df.loc[exp]['Duct FTF']
# None of this matters - it's not used in the calculation
duct_ftf = [duct_inner_ftf, duct_inner_ftf + 0.001] # m
inlet_flow_rate = 100.0 # enough to be turbulent
inlet_temp = 273.15
coolant_obj = dassh.Material('water')
duct_obj = dassh.Material('ss316')
# try:
# a = make_assembly(n_ring, pin_pitch, pin_diameter,
# clad_thickness, wire_pitch,
# wire_diameter, duct_ftf, coolant_obj,
# duct_obj, inlet_temp, inlet_flow_rate)
# except ValueError:
# raise
# except:
# pytest.xfail("Failure in DASSH Assembly instantiation "
# "(should raise another error elsewhere in "
# "the tests)")
a = make_assembly(n_ring, pin_pitch, pin_diameter,
clad_thickness, wire_pitch,
wire_diameter, duct_ftf, coolant_obj,
duct_obj, inlet_temp, inlet_flow_rate,
corr_mixing=corr_name[mpi])
# Force turbulence
a.coolant_int_params['Re'] = 2e4
res = mp.calculate_mixing_params(a)[1]
abs_err[idx] = round(res, 2) - df.loc[exp]['C1L']
rel_err[idx] = abs_err[idx] / df.loc[exp]['C1L']
# Only print if assertion fails
print('{:<2d} {:12s} {:5d} {:12s} {:8.5f} {:8.3f} {:8.2f}'
.format(exp, df.loc[exp]['Investigators'],
df.loc[exp]['Year'],
mp.__name__.split('.')[-1], res,
df.loc[exp]['C1L'], 100 * rel_err[idx]))
idx += 1
# print(round(max(np.abs(abs_err)), 2))
# print(round(max(np.abs(rel_err)), 2))
var = np.array([x**2 for x in abs_err])
var = sum(var) / len(var)
print('std err: ', np.sqrt(var))
assert np.sqrt(var) <= 0.05
def test_ctd_sc_intermittency_factor(thesis_asm_rr):
"""Test the calculation of intermittency factor used in the
Cheng-Todreas friction and flow split correlations in the
transition regime for the individual coolant subchannels
"""
test_asm = thesis_asm_rr.clone()
# Intermittency factor should be greater than 1 if turbulent
Re_bnds = friction_ctd.calculate_Re_bounds(test_asm)
x = mixing_ctd.calc_sc_intermittency_factors(test_asm,
Re_bnds[0],
Re_bnds[1])
assert all([x[i] > 1 for i in range(len(x))])
# Intermittency factor should be less than 1 if transition
test_asm.int_flow_rate = 5.0
test_asm._update_coolant_int_params(300.15) # arbitrary water temp
# (check that transition flow is achieved)
assert Re_bnds[0] < test_asm.coolant_int_params['Re'] < Re_bnds[1]
x = mixing_ctd.calc_sc_intermittency_factors(test_asm,
Re_bnds[0],
Re_bnds[1])
assert all([0 < x[i] < 1 for i in range(len(x))])
########################################################################
# CORRELATION APPLICABILITY
########################################################################
# if name[i] == 'ENG': # outside the Engel range
# with pytest.warns(UserWarning):
# corr[i].calculate_bundle_friction_factor(textbook_asm)
|
#!/usr/bin/env bash
SELECT 15 BETWEEN 1 AND 20;
SELECT 150 BETWEEN 1 AND 20;
SELECT 150 NOT BETWEEN 1 AND 20;
SELECT 10 IN(0,10,20,30);
SELECT 11 IN(0,10,20,30);
SELECT NULL IS NULL;
SELECT '' IS NULL;
SELECT 'NULL' IS NULL;
SELECT 0 IS NULL;
SELECT * FROM `test`;
SELECT * FROM `test` WHERE `first_name` IS NULL;
SELECT * FROM `test` WHERE `first_name` IS NOT NULL;
|
import React, { Component } from 'react';
import PropTypes from 'prop-types';
import styled from 'styled-components';
import { space, alignSelf, width } from 'styled-system';
import OutsideClickHandler from 'react-outside-click-handler';
import { preset } from 'react/styles/functions';
import PulldownValue from 'react/components/UI/Pulldown/components/PulldownValue';
import PulldownOption from 'react/components/UI/Pulldown/components/PulldownOption';
const Container = styled.div`
position: relative;
background-color: white;
border-radius: 0.25em;
border: 1px solid ${x => ({
resting: x.theme.colors.gray.regular,
expanded: x.theme.colors.gray.medium,
}[x.mode])};
${space}
${alignSelf}
${preset(width, { width: '88%' })}
z-index: 1;
${x => x.mode === 'expanded' && `
border-bottom-left-radius: 0;
border-bottom-right-radius: 0;
`}
`;
const PulldownOptions = styled.div`
position: absolute;
top: 100%;
left: -1px;
right: -1px;
border-radius: 0.25em;
background-color: white;
border: 1px solid ${x => x.theme.colors.gray.medium};
border-top-color: ${x => x.theme.colors.gray.regular};
${x => x.mode === 'expanded' && `
border-top-left-radius: 0;
border-top-right-radius: 0;
`}
`;
export default class Pulldown extends Component {
static propTypes = {
value: PropTypes.oneOfType([
PropTypes.string, PropTypes.number, PropTypes.bool,
]).isRequired,
onChange: PropTypes.func,
options: PropTypes.objectOf(PropTypes.node).isRequired,
}
static defaultProps = {
onChange: () => {},
}
constructor(props) {
super(props);
const { value } = this.props;
this.state = {
mode: 'resting',
value,
};
}
expand = () =>
this.setState({ mode: 'expanded' });
rest = () =>
this.setState({ mode: 'resting' });
toggle = () => {
this.setState(prevState => ({
mode: prevState.mode === 'resting' ? 'expanded' : 'resting',
}));
}
selectValue = value => () => {
this.setState({ mode: 'resting', value });
return this.props.onChange(value);
}
render() {
const { value: selected, mode } = this.state;
const { options } = this.props;
return (
<Container mode={mode}>
<OutsideClickHandler onOutsideClick={this.rest}>
<PulldownValue mode={mode} onMouseDown={this.toggle} selected>
{options[selected]}
</PulldownValue>
{mode === 'expanded' &&
<PulldownOptions mode={mode}>
{Object.keys(options).map(key => (
<PulldownOption
key={key}
mode={mode}
selected={key === selected}
onClick={this.selectValue(key)}
>
{options[key]}
</PulldownOption>
))}
</PulldownOptions>
}
</OutsideClickHandler>
</Container>
);
}
}
|
<gh_stars>0
import * as dotenv from "dotenv";
dotenv.config({ path: `${__dirname}/../.env` });
export const config = {
port: process.env.PORT || 5000,
dbUrl: process.env.DB_URL || "bolt://neo4j:7687",
};
|
# platform = Red Hat Enterprise Linux 6
#
# Disable dhcpd for all run levels
#
/sbin/chkconfig --level 0123456 dhcpd off
#
# Stop dhcpd if currently running
#
/sbin/service dhcpd stop
|
<reponame>maufonseca/haste
package com.maufonseca.haste.infrastructure;
import android.support.annotation.NonNull;
import android.util.Log;
import com.google.android.gms.tasks.OnCompleteListener;
import com.google.android.gms.tasks.Task;
import com.google.firebase.firestore.CollectionReference;
import com.google.firebase.firestore.DocumentSnapshot;
import com.google.firebase.firestore.QuerySnapshot;
import com.maufonseca.haste.model.Rush;
import com.maufonseca.haste.model.RushList;
import com.maufonseca.haste.presentation.home.HomePresenter;
/**
* Created by mauricio on 05/04/18.
*/
public class StorageWorker {
private static StorageWorker singleton;
private StorageWorker() {
}
public static synchronized StorageWorker getInstance() {
if (singleton == null)
singleton = new StorageWorker();
return singleton;
}
public void getRushes(final HomePresenter presenter, CollectionReference rushesRef){
rushesRef
.orderBy("position")
.get()
.addOnCompleteListener(new OnCompleteListener<QuerySnapshot>() {
@Override
public void onComplete(@NonNull Task<QuerySnapshot> task) {
if (task.isSuccessful()) {
RushList rushes = new RushList();
for (DocumentSnapshot document : task.getResult()) {
Log.d("FB", document.getId() + " => " + document.getData());
Rush rush = new Rush();
rush.setDescription((String) document.getData().get("description"));
rush.setDone((boolean)document.getData().get("done"));
rush.setPosition((long)document.getData().get("position"));
rush.setId(document.getId());
rushes.add(rush.getPosition(), rush);
}
presenter.onRushesArrived(rushes);
} else {
presenter.onError("Error getting documents.", task.getException());
}
}
});
}
}
|
from typing import Type
class DefaultFeed:
pass
class LatestCommentFeed:
pass
feeds = {
'comments': LatestCommentFeed,
}
def get_feed_class(url_pattern: str) -> Type[FeedClass]:
return feeds.get(url_pattern, DefaultFeed) |
module.exports = async (d) => {
const data = d.util.aoiFunc(d);
if (data.err) return d.error(data.err);
let [
varname,
value,
userId = d.author?.id,
Id = d.guild?.id || "dm",
table = d.client.db.tables[0],
] = data.inside.splits;
value = value.addBrackets();
if (!d.client.variableManager.has(varname.addBrackets(), table))
return d.aoiError.fnError(
d,
"custom",
{},
`Variable ${varname.addBrackets()} Not Found!`,
);
const variable = d.client.variableManager.get(varname, table);
if (!variable.checkType(value))
return d.aoiError.fnError(
d,
"custom",
{ inside: data.inside },
`Variable "${varname.addBrackets()}" Needs Value Of Type "${
variable.type
}". Provided Wrong Type In`,
);
value = d.client.variableManager.parseData(value, variable.type);
try {
await d.client.db.set(
table,
varname.addBrackets(),
`${userId}_${Id}`,
value,
);
} catch (e) {
d.aoiError.fnError(
d,
"custom",
{},
`Failed To Set Value To The Variable: "${varname.addBrackets()}" With Reason: ${e}`,
);
}
return {
code: d.util.setCode(data),
};
}; |
<reponame>ES-UFABC/UFABCplanner
import { useCallback, useEffect, useState } from 'react';
import { ICredentials } from '../../interfaces/credentials';
import api from '../../services/api';
import { AuthContext } from './context';
interface Props {
children: React.ReactNode;
}
const AuthProvider = ({ children }: Props) => {
const [loading, setLoading] = useState(false);
const [authenticated, setAuthenticated] = useState(false);
const login = useCallback(async (data: ICredentials, reloadOnAuth?: boolean) => {
setLoading(true);
await api
.post('/users/login', data)
.then(({ data }) => {
if (data.token) {
localStorage.setItem('auth_token', data.token)
if (reloadOnAuth) window.location.reload();
} else {
alert('A API foi incapaz de gerar seu token de autenticação')
}
})
.catch(error => {
error?.response?.data?.message
? alert(error?.response?.data?.message)
: alert('Houve um erro ao tentar fazer o login...');
})
.finally(() => setLoading(false));
}, []);
const logout = () => {
localStorage.removeItem('auth_token');
window.location.reload();
};
useEffect(() => {
setAuthenticated(!!localStorage.getItem('auth_token'));
}, []);
return (
<AuthContext.Provider value={{ authenticated, loading, login, logout }}>
{children}
</AuthContext.Provider>
);
};
export { AuthProvider };
|
package org.librealsense;
public class StreamProfileList {
long streamProfileList;
protected StreamProfileList(long streamProfileList) {
this.streamProfileList = streamProfileList;
}
public int getSize() {
return Native.rs2GetStreamProfileCount(streamProfileList);
}
public StreamProfile get(int index) {
return new StreamProfile(Native.rs2GetStreamProfile(streamProfileList, index));
}
public void destroy() {
Native.rs2DeleteStreamProfilesList(streamProfileList);
}
} |
firefox_da)
name="Firefox"
type="dmg"
downloadURL="https://download.mozilla.org/?product=firefox-latest&os=osx&lang=da"
appNewVersion=$(curl -fs https://www.mozilla.org/en-US/firefox/releases/ | grep '<html' | grep -o -i -e "data-latest-firefox=\"[0-9.]*\"" | cut -d '"' -f2)
expectedTeamID="43AQ936H96"
blockingProcesses=( firefox )
printlog "WARNING for ERROR: Label firefox, firefox_da and firefox_intl should not be used. Instead use firefoxpkg and firefoxpkg_intl as per recommendations from Firefox. It's not fully certain that the app actually gets updated here. firefoxpkg and firefoxpkg_intl will have built in updates and make sure the client is updated in the future." REQ
;;
|
import random
def generate_password():
password = ""
characters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890!@#$%^&*()_+"
for i in range(0,8):
password += random.choice(characters)
return password |
module Ricer4::Plugins::Board
class Abbo < Ricer4::Plugin
is_add_abbo_trigger :for => Ricer4::Plugins::Board::Model::Board
end
end
|
import React, {Component} from 'react';
import {Form, Icon, Message, Button} from 'semantic-ui-react';
import StatusMessage from '../../components/statusmessage';
import './styles.css';
export default class Register extends Component {
constructor(props) {
super(props);
this.state = {
username: '',
name: '',
email: '',
password: '',
checked: true,
};
}
handleChange = (e, {name, value}) => {
this.setState({[name]: value});
};
handleCheckbox = () => {
this.setState({checked: !this.state.checked});
};
isFormValid = () => {
const {username, name, email, password, checked} = this.state;
let isFormValid = true;
if (!username || !name || !email || !password || !checked) {
isFormValid = false;
}
return isFormValid;
};
handleSubmit = e => {
if (this.isFormValid()) {
let data = {
username: this.state.username,
name: this.state.name,
email: this.state.email,
password: <PASSWORD>,
};
this.props.handleRegister(data);
}
};
render() {
let {isLoading, error, showLogin} = this.props;
const statusMessage = (
<StatusMessage
error={error}
errorMessage={error || 'Login Error'}
loading={isLoading}
loadingMessage={'Registering your account'}
type="modal"
/>
);
return (
<div>
<Message
attached
header="Welcome to our site!"
content="Fill out the form below to sign-up for a new account"
/>
{statusMessage}
<Form className="attached fluid segment">
<Form.Input
required
label="Username"
placeholder="Username"
type="text"
name="username"
value={this.state.username}
onChange={this.handleChange}
/>
<Form.Input
required
label="Name"
placeholder="Name"
type="text"
name="name"
value={this.state.name}
onChange={this.handleChange}
/>
<Form.Input
required
label="Email"
placeholder="Email"
type="email"
name="email"
value={this.state.email}
onChange={this.handleChange}
/>
<Form.Input
required
label="Password"
type="password"
name="password"
value={this.state.password}
onChange={this.handleChange}
/>
<Form.Checkbox
inline
required
label="I agree to the terms and conditions"
name="agreement"
checked={this.state.checked}
onChange={this.handleCheckbox}
/>
<Button
color="blue"
loading={isLoading}
disabled={isLoading}
onClick={this.handleSubmit}>
Submit
</Button>
</Form>
<Message attached="bottom" warning>
<Icon name="help" />
Already signed up?
{/* eslint-disable-next-line */}
<a className="register-login" onClick={showLogin}>
Login here
</a>
instead.
</Message>
</div>
);
}
}
|
from datetime import datetime, timedelta
from django.conf import settings
from django.conf.urls import url, patterns, include
from django.contrib.auth.forms import PasswordChangeForm
from django.shortcuts import get_object_or_404
from django.utils.translation import ugettext, ugettext_lazy as _, pgettext, pgettext_lazy
from canvas.exceptions import ServiceError, ValidationError
from canvas.models import Content, UserInfo
from canvas.redis_models import redis
from canvas.upload import api_upload, chunk_uploads
from canvas.view_guards import require_staff, require_POST, require_user
from drawquest import signals, models, api_forms, economy
from drawquest.api_decorators import api_decorator
from drawquest.apps.brushes.models import Brush
from drawquest.apps.drawquest_auth.details_models import PrivateUserDetails
from drawquest.apps.drawquest_auth.models import User
from drawquest.apps.iap.models import COIN_PRODUCTS, brush_products
from drawquest.apps.palettes.models import Color, ColorPack
from drawquest.apps.twitter.models import TwitterUser
from drawquest.apps.quests.models import Quest
from drawquest.apps.quest_comments.models import QuestComment
from website.apps.share_tracking.models import ShareTrackingUrl, get_share_page_url_with_tracking
urls = patterns('',
url(r'^quest_comments/flag', 'apps.comments.api.flag_comment'),
url(r'^quests/flag', 'apps.comments.api.flag_comment'),
)
urls += patterns('drawquest.api',
url(r'^activity/', include('apps.activity.api')),
url(r'^auth/', include('drawquest.apps.drawquest_auth.api')),
url(r'^brushes/', include('drawquest.apps.brushes.api')),
url(r'^chunk/', include(chunk_uploads)),
url(r'^explore/', include('drawquest.apps.explore.api')),
url(r'^feed/', include('drawquest.apps.feed.api')),
url(r'^following/', include('drawquest.apps.following.api')),
url(r'^iap/', include('drawquest.apps.iap.api')),
url(r'^ios_logging/', include('drawquest.apps.ios_logging.api')),
url(r'^invites/', include('drawquest.apps.invites.api')),
url(r'^palettes/', include('drawquest.apps.palettes.api')),
url(r'^playback/', include('drawquest.apps.playback.api')),
url(r'^push_notifications/', include('drawquest.apps.push_notifications.api')),
url(r'^quest_comments/', include('drawquest.apps.quest_comments.api')),
url(r'^quest_comments/', include('drawquest.apps.star_gallery.api')),
url(r'^quest_invites/', include('drawquest.apps.quest_invites.api')),
url(r'^quests/', include('drawquest.apps.gallery.api')),
url(r'^quests/', include('drawquest.apps.quests.api')),
url(r'^profiles/', include('drawquest.apps.profiles.api')),
url(r'^stars/', include('drawquest.apps.stars.api')),
url(r'^submit_quest/', include('drawquest.apps.submit_quest.api')),
url(r'^staff/', include('drawquest.apps.staff.api')),
url(r'^timeline/', include('drawquest.apps.timeline.api')),
url(r'^tumblr/', include('drawquest.apps.tumblr.api')),
url(r'^ugq/', include('drawquest.apps.ugq.api')),
url(r'^upload$', api_upload),
url(r'^whitelisting/', include('drawquest.apps.whitelisting.api')),
# Only used for the admin.
url(r'^comment/', include('apps.comments.api')),
# Disabled for now for perf.
#url(r'^', include('apps.analytics.api')),
)
if settings.DRAWQUEST_SEARCH:
urls += patterns('drawquest.api',
url(r'^search/', include('drawquest.apps.search.api')),
)
api = api_decorator(urls)
@api('metric/record')
def metric_record(request, name, info={}):
""" Currently a no-op. """
@api('user/profile')
def user_profile(request, username):
return models.user_profile_for_viewer(username, viewer=request.user)
@api('user/change_profile')
@require_user
def change_profile(request, old_password=<PASSWORD>, new_password=<PASSWORD>, new_email=None, bio=None):
if bio is not None:
request.user.userinfo.bio_text = bio
request.user.userinfo.save()
request.user.details.force()
if new_email is not None:
if not User.validate_email(new_email):
raise ValidationError({'new_email': "Please enter a valid email address."})
if request.user.email != new_email:
if not User.email_is_unused(new_email):
raise ValidationError({
'new_email': "Sorry! That email address is already being used for an account.",
})
request.user.email = new_email
request.user.save()
request.user.userinfo.update_hashes()
request.user.details.force()
if old_password is not None and new_password is not None:
if not User.validate_password(new_password):
raise ValidationError({
'new_password': "Sorry, your new password is too short. "
"Please use {} or more characters.".format(User.MINIMUM_PASSWORD_LENGTH),
})
form = PasswordChangeForm(user=request.user, data={
'old_password': <PASSWORD>,
'new_password1': <PASSWORD>,
'new_password2': <PASSWORD>,
})
api_forms.validate(form)
form.save()
request.user.details.force()
@api('user/change_avatar')
@require_user
def change_avatar(request, content_id):
user_info = request.user.userinfo
user_info.avatar = get_object_or_404(Content, id=content_id)
user_info.save()
user = User.objects.get(id=request.user.id)
user.invalidate_details()
# DEPRECATED.
@api('create_email_invite_url')
def create_email_invite_url(request):
url = 'http://example.com/download'
if request.user.is_authenticated():
sharer = request.user
share = ShareTrackingUrl.create(sharer, url, 'email')
url = share.url_for_channel()
return {'invite_url': url}
@api('existing_users_by_email')
def existing_users_by_email(request, email_hashes):
uis = UserInfo.objects.filter(email_hash__in=email_hashes)
if request.user.is_authenticated():
uis = uis.exclude(user=request.user)
uis = uis.select_related('user')
uis = uis.values_list('user__email', 'user__username', 'user__id')
following = None
if request.user.is_authenticated():
following = [int(id_) for id_ in request.user.redis.new_following.zrange(0, -1)]
users = []
for ui in uis:
user = {'email': ui[0], 'username': ui[1]}
if following is not None:
user['viewer_is_following'] = ui[2] in following
users.append(user)
return {'users': users}
@api('realtime/sync')
def realtime_sync(request):
return {'channels': models.realtime_sync(request.user)}
@api('share/create_for_channel')
def share_create_for_channel(request, channel, comment_id=None, quest_id=None, download_link=False, is_invite=False):
ret = {}
def download_share():
return ShareTrackingUrl.create(request.user, 'http://example.com/download', channel)
if not download_link and comment_id is not None and quest_id is not None:
raise ValueError("Can't specify both a comment and quest to share.")
if download_link:
return {'share_url': download_share().url_for_channel()}
if is_invite:
if quest_id is None:
share_url = download_share().url_for_channel()
if channel == 'twitter':
message = _(u"Come draw with me on @DrawQuest! %(url)s" % {'url': share_url})
elif channel == 'email':
follow_me_message = _("You can follow me in the app as \"%(username)s\"" % {'username': getattr(request.user, 'username')}) if request.user.is_authenticated() else ''
message = _(u"""I'm using DrawQuest, a free creative drawing app for iPhone, iPod touch, and iPad. DrawQuest sends you daily drawing challenges and allows you to create your own to share with friends. %(follow_me_message)s
Download DrawQuest for free here: %(url)s""" % {'follow_me_message': follow_me_message, 'url': share_url})
else:
message = _(u"Come draw with me on DrawQuest! %(url)s" % {'url': share_url})
else:
quest = get_object_or_404(Quest, id=quest_id)
if channel in ['twitter', 'facebook']:
share_url = get_share_page_url_with_tracking(quest, request.user, channel, request=request)
else:
share_url = download_share().url_for_channel()
if request.user.is_authenticated() and quest.author_id == request.user.id:
if channel == 'twitter':
message = _(u"I just created a Quest on @DrawQuest! \"%(quest_title)s\" Come draw it with me: %(url)s" % {'quest_title': quest.title, 'url': share_url})
else:
message = _(u"I just created a Quest on DrawQuest! \"%(quest_title)s\" Come draw it with me: %(url)s" % {'quest_title': quest.title, 'url': share_url})
else:
if channel == 'twitter':
message = _(u"Come draw \"%(quest_title)s\" with me on @DrawQuest! %(url)s" % {'quest_title': quest.title, 'url': share_url})
else:
message = _(u"Come draw \"%(quest_title)s\" with me on DrawQuest! %(url)s" % {'quest_title': quest.title, 'url': share_url})
return {
'share_url': share_url,
'message': message,
}
if comment_id is None and quest_id is None:
share = ShareTrackingUrl.create(request.user, None, channel)
ret['share_id'] = share.id
url = share.url_for_channel()
else:
if quest_id is not None:
shared_obj = get_object_or_404(Quest, id=quest_id)
quest_title = shared_obj.title
else:
shared_obj = get_object_or_404(QuestComment, id=comment_id)
quest_title = shared_obj.parent_comment.title
author = User.objects.get(id=shared_obj.author_id)
if channel in ['twitter', 'facebook']:
url = get_share_page_url_with_tracking(shared_obj, request.user, channel, request=request)
else:
url = download_share().url_for_channel()
if channel == 'twitter':
if quest_id is not None:
ret['tweet'] = _(u'Come draw "%(quest_title)s" with me on @DrawQuest! %(url)s' % {'quest_title': quest_title, 'url': url})
else:
ret['tweet'] = _(u'"%(quest_title)s" %(url)s via @DrawQuest' % {'quest_title': quest_title, 'url': url})
if author.kv.twitter_privacy.get() == False:
try:
author_screen_name = ret['twitter_screen_name'] = author.twitteruser.screen_name
if quest_id is not None:
ret['tweet'] = _(u'Come draw "%(quest_title)s" by @%(screen_name)s with me on @DrawQuest! %(url)s' % {'quest_title': quest_title, 'url': url, 'screen_name': author_screen_name})
else:
ret['tweet'] = _(u'"%(quest_title)s" %(url)s by @%(screen_name)s via @DrawQuest' % {'quest_title': quest_title, 'url': url, 'screen_name': author_screen_name})
except (AttributeError, TwitterUser.DoesNotExist):
pass
ret['message'] = ret['tweet']
elif channel == 'email':
if quest_id is not None:
ret['message'] = _(u"""I'm using DrawQuest, a free creative drawing app for iPhone, iPod touch, and iPad. DrawQuest sends you daily drawing challenges and allows you to create your own to share with friends. I thought you might enjoy this Quest: \"%(quest_title)s\"
Download DrawQuest for free here: %(url)s""" % {'quest_title': quest_title, 'url': url})
else:
ret['message'] = _(u"""I thought you'd like this drawing made with DrawQuest, a free creative drawing app for iPhone, iPod touch, and iPad.
\"%(quest_title)s\" %(url)s
Download DrawQuest for free here: %(download_url)s""" % {'quest_title': quest_title, 'url': get_share_page_url_with_tracking(shared_obj, request.user, channel, request=request), 'download_url': url})
else:
if quest_id is not None:
ret['message'] = _(u"Come draw \"%(quest_title)s\" with me on DrawQuest! %(url)s" % {'quest_title': quest_title, 'url': url})
else:
if channel == 'text_message':
ret['message'] = _(u"""Check out this drawing on DrawQuest: \"%(quest_title)s\" %(url)s
Download DrawQuest for free here: %(download_url)s""" % {'quest_title': quest_title, 'url': get_share_page_url_with_tracking(shared_obj, request.user, channel, request=request), 'download_url': url})
else:
ret['message'] = _(u"Check out this drawing on DrawQuest: \"%(quest_title)s\" %(url)s" % {'quest_title': quest_title, 'url': url})
if channel == 'facebook':
url = 'http://example.com' + url
if not ret.get('message'):
if channel == 'twitter':
ret['message'] = _(u"Come draw with me on @DrawQuest! %(url)s" % {'url': url})
else:
ret['message'] = _(u"Come draw with me on DrawQuest! %(url)s" % {'url': url})
ret['share_url'] = url
return ret
@api('economy/balance')
@require_user
def coin_balance(request):
return {'balance': economy.balance(request.user)}
@api('user/set_web_profile_privacy')
@require_user
def set_web_profile_privacy(request, privacy):
request.user.kv.web_profile_privacy.set(privacy)
models.user_profile.delete_cache(request.user.username)
@api('user/set_twitter_privacy')
@require_user
def set_twitter_privacy(request, privacy):
request.user.kv.twitter_privacy.set(privacy)
models.user_profile.delete_cache(request.user.username)
@api('user/set_facebook_privacy')
@require_user
def set_facebook_privacy(request, privacy):
request.user.kv.facebook_privacy.set(privacy)
models.user_profile.delete_cache(request.user.username)
@api('kv/set')
@require_user
def kv_set(request, items):
for key in items.keys():
if key not in request.user.kv.DEFINITION.keys():
raise AttributeError(key)
for key, val in items.items():
request.user.kv.set(key, val)
@api('heavy_state_sync')
def heavy_state_sync(request, tab_last_seen_timestamps={}):
return models.heavy_state_sync(request.user, app_version=request.app_version, app_version_tuple=request.app_version_tuple, tab_last_seen_timestamps=tab_last_seen_timestamps)
@api('shop/all_items')
@require_user
def shop_all_items(request):
ret = {
'shop_brushes': Brush.for_shop(viewer=request.user, request=request),
'shop_colors': Color.for_shop(viewer=request.user),
'color_packs': ColorPack.for_shop(viewer=request.user),
'coin_products': COIN_PRODUCTS,
'brush_products': brush_products(request=request),
'balance': economy.balance(request.user),
'color_packs_header': redis.get('color_packs_header'),
'colors_header': redis.get('colors_header'),
'tabs': [
{'name': 'colors', 'default': True},
{'name': 'coins'},
{'name': 'brushes'},
],
}
return ret
@api('tab_badges')
def tab_badges(request, last_seen_timestamps={}):
'''
`last_seen_timestamps` looks like this:
{'home': 1234567890, 'draw': 1234567890, 'activity': 1234567890}
If a tab has never been seen yet, just omit it from the dict.
Returns a dict like:
{'home': True, 'draw': False, 'activity': True}
'''
return models.tab_badges(request.user, last_seen_timestamps=last_seen_timestamps)
@api('base_url')
def china_base_url(request):
if settings.STAGING:
return {
'api_url': 'https://drawquestapi1.com/api/',
'search_url': 'https://searchapi.example.com/',
'web_url': 'https://drawquestapi1.com/',
'rt_url': 'https://rt.example.com/rt',
}
else:
from django.contrib.gis.geoip import GeoIP
g = GeoIP()
ip = request.META['REMOTE_ADDR']
if ip and g.country_code(ip).upper() == 'CN':
return {
'api_url': 'https://drawquestapi1.com/api/',
'search_url': 'https://searchapi.example.com/',
'web_url': 'https://drawquestapi1.com/',
'rt_url': 'https://rt.example.com/rt',
}
else:
return {
'api_url': 'https://api.example.com/',
'search_url': 'https://searchapi.example.com/',
'web_url': 'https://example.com/',
'rt_url': 'https://rt.example.com/rt',
}
|
def generate_migration(app_name, dependencies):
migration_content = f"class Migration(migrations.Migration):\n\n dependencies = [\n ('{app_name}', '{dependencies}'),\n # Additional dependencies go here\n ]\n\n # Other migration content goes here"
return migration_content |
#! /bin/bash
#получить все новости - здесь не нужны куки
curl -b 'sId=' -X GET http://localhost:3000/api/getOne/category3/12 |
<reponame>msnraju/al-productivity-tools<filename>src/commands/al-file-commands.ts
import * as vscode from "vscode";
import * as fs from "fs";
import * as path from "path";
import ALFileHelper from "./al-file-helper";
import simpleGit from "simple-git";
import { v4 as uuidv4 } from "uuid";
export default class ALFileCommands {
static insertGuid() {
const editor = vscode.window.activeTextEditor;
if (!editor) {
return;
}
const pos = editor.selection.active;
editor.edit((editBuilder) => {
editBuilder.delete(editor.selection);
editBuilder.insert(pos, uuidv4());
});
}
static fixALFileNamingNotation() {
if (!vscode.workspace.workspaceFolders) {
return;
}
try {
vscode.workspace.workspaceFolders.forEach((folder) => {
let git = false;
if (fs.lstatSync(path.join(folder.uri.fsPath, ".git")).isDirectory()) {
git = true;
}
ALFileHelper.renameALFilesSync(
folder.uri.fsPath,
(oldFile, newFile) => {
ALFileCommands.renameALFileInternal(
oldFile,
newFile,
git,
folder.uri.fsPath
);
}
);
});
vscode.window.showInformationMessage(
"AL file names corrected as per the best practices"
);
} catch (err) {
vscode.window.showErrorMessage(
"An error occurred while standardizing AL files in this workspace."
);
}
}
static async fixALCurrentFileNamingNotation() {
const editor = vscode.window.activeTextEditor;
if (!editor) return;
if (path.extname(editor.document.fileName).toLowerCase() != ".al") return;
try {
if (editor.document.isDirty) {
await editor.document.save();
}
await ALFileCommands.renameALFile();
} catch (err) {
vscode.window.showErrorMessage(
"An error occurred while standardizing AL file name."
);
}
}
static async renameALFile() {
const editor = vscode.window.activeTextEditor;
if (!editor) {
return;
}
if (path.extname(editor.document.fileName).toLowerCase() != ".al") {
return;
}
const oldFile = editor.document.fileName;
const newFile = await ALFileHelper.getALFileName(editor.document.fileName);
if (oldFile.toLowerCase() === newFile.toLowerCase()) {
return;
}
const folder = vscode.workspace.getWorkspaceFolder(editor.document.uri);
if (
folder &&
fs.lstatSync(path.join(folder.uri.fsPath, ".git")).isDirectory()
) {
this.renameALFileInternal(
oldFile,
newFile,
true,
folder.uri.fsPath,
this.openNewALFile
);
} else
this.renameALFileInternal(
oldFile,
newFile,
false,
"",
this.openNewALFile
);
}
private static renameALFileInternal(
oldFile: string,
newFile: string,
git: boolean,
gitPath: string,
openNewFile: ((newFile: string) => void) | undefined = undefined
) {
if (oldFile === newFile) {
return;
}
if (git)
simpleGit(gitPath).mv(oldFile, newFile, () => {
if (openNewFile) {
openNewFile(newFile);
}
});
else {
fs.renameSync(oldFile, newFile);
if (openNewFile) {
openNewFile(newFile);
}
}
}
private static openNewALFile(newFile: string) {
const editor = vscode.window.activeTextEditor;
if (!editor) {
return;
}
const position = editor.selection.active;
vscode.commands
.executeCommand("workbench.action.closeActiveEditor")
.then(() => {
var openPath = vscode.Uri.parse("file:///" + newFile);
vscode.workspace.openTextDocument(openPath).then((doc) => {
vscode.window.showTextDocument(doc).then(() => {
ALFileCommands.setCursorPosition(position);
});
});
});
}
private static setCursorPosition(position: vscode.Position) {
const editor = vscode.window.activeTextEditor;
if (!editor) {
return;
}
editor.selection = new vscode.Selection(position, position);
editor.revealRange(new vscode.Range(position, position));
}
}
|
<gh_stars>0
# https://binarysearch.com/
#
# GGA 2020.10.28
#
# User Problem
# You have:
#
# You Need:
#
# You Must:
#
# Input/Output Example:
#
# Domino Placement
# You are given integers n and m representing a board
# of size n by m. You also have an unlimited number
# of 1 by 2 dominos.
# Return the maximum number of dominos that can be placed
# on the board such that they don't overlap and
# every domino lies entirely within the board.
# Example 1
# Input
# n = 2
# m = 2
# Output
# 2
# Solution (Feature/Product)
# since each domino covers two spaces
# half the number of spaces
# is the number of dominos
#
#
# Edge cases:
#
# Revision, Reflection, Future Versions, Action Items:
#
class Solution:
def solve(self, n, m):
# return half the number of spaces
return (n * m) // 2
Tom = Solution()
Tom.solve("input")
|
import string
import random
def generatePassword(length):
password = []
characters = string.ascii_letters + '0123456789' + string.punctuation
for i in range(length):
password.append(random.choice(characters))
password.append('A')
password.append('a')
password.append('1')
password.append('!')
random.shuffle(password)
return "".join(password)
if __name__ == '__main__':
password = generatePassword(8)
print(password) |
<gh_stars>1-10
package com.kamikaze.yada;
import android.Manifest;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.res.Configuration;
import android.location.Address;
import android.location.Geocoder;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.os.Bundle;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import androidx.fragment.app.FragmentActivity;
import com.google.android.gms.maps.CameraUpdateFactory;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.OnMapReadyCallback;
import com.google.android.gms.maps.SupportMapFragment;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.MapStyleOptions;
import com.google.android.gms.maps.model.MarkerOptions;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.Locale;
public class MapsActivity extends FragmentActivity implements OnMapReadyCallback, GoogleMap.OnMapLongClickListener {
LocationManager locationManager;
LocationListener locationListener;
private GoogleMap mMap;
public void centerMapOnLocation(Location location, String title) {
if (location != null) {
LatLng userLocation = new LatLng(location.getLatitude(), location.getLongitude());
mMap.clear();
mMap.addMarker(new MarkerOptions().position(userLocation).title(title));
mMap.moveCamera(CameraUpdateFactory.newLatLngZoom(userLocation, 12));
}
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
if (ContextCompat.checkSelfPermission(this, Manifest.permission.ACCESS_FINE_LOCATION) == PackageManager.PERMISSION_GRANTED) {
locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER,0,0,locationListener);
Location lastKnownLocation = locationManager.getLastKnownLocation(LocationManager.GPS_PROVIDER);
centerMapOnLocation(lastKnownLocation, "Your Location");
}
}
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_maps);
// Obtain the SupportMapFragment and get notified when the map is ready to be used.
SupportMapFragment mapFragment = (SupportMapFragment) getSupportFragmentManager()
.findFragmentById(R.id.map);
mapFragment.getMapAsync(this);
}
@Override
public void onMapReady(GoogleMap googleMap) {
mMap = googleMap;
int nightModeFlags = getResources().getConfiguration().uiMode & Configuration.UI_MODE_NIGHT_MASK;
if(nightModeFlags==Configuration.UI_MODE_NIGHT_YES)
{
mMap.setMapStyle(MapStyleOptions.loadRawResourceStyle(MapsActivity.this,R.raw.map_style_night));
}
mMap.setOnMapLongClickListener(this);
Intent intent = getIntent();
if (intent.getIntExtra("placeNumber",0) == 0) {
// Zoom in on user location
locationManager = (LocationManager) this.getSystemService(Context.LOCATION_SERVICE);
locationListener = new LocationListener() {
@Override
public void onLocationChanged(Location location) {
centerMapOnLocation(location, "Your Location");
}
@Override
public void onStatusChanged(String s, int i, Bundle bundle) {
}
@Override
public void onProviderEnabled(String s) {
}
@Override
public void onProviderDisabled(String s) {
}
};
if (ContextCompat.checkSelfPermission(this, Manifest.permission.ACCESS_FINE_LOCATION) == PackageManager.PERMISSION_GRANTED) {
locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER,0,0,locationListener);
Location lastKnownLocation = locationManager.getLastKnownLocation(LocationManager.GPS_PROVIDER);
centerMapOnLocation(lastKnownLocation, "Your Location");
} else {
ActivityCompat.requestPermissions(this,new String[]{Manifest.permission.ACCESS_FINE_LOCATION},1);
}
} else {
Location placeLocation = new Location(LocationManager.GPS_PROVIDER);
placeLocation.setLatitude(MemorablePlacesActivity.locations.get(intent.getIntExtra("placeNumber",0)).latitude);
placeLocation.setLongitude(MemorablePlacesActivity.locations.get(intent.getIntExtra("placeNumber",0)).longitude);
centerMapOnLocation(placeLocation, MemorablePlacesActivity.places.get(intent.getIntExtra("placeNumber",0)));
}
}
@Override
public void onMapLongClick(LatLng latLng) {
Geocoder geocoder = new Geocoder(getApplicationContext(), Locale.getDefault());
String address = "";
try {
List<Address> listAdddresses = geocoder.getFromLocation(latLng.latitude,latLng.longitude,1);
if (listAdddresses != null && listAdddresses.size() > 0) {
if (listAdddresses.get(0).getThoroughfare() != null) {
if (listAdddresses.get(0).getSubThoroughfare() != null) {
address += listAdddresses.get(0).getSubThoroughfare() + " ";
}
address += listAdddresses.get(0).getThoroughfare();
}
}
} catch (Exception e) {
e.printStackTrace();
}
if (address.equals("")) {
SimpleDateFormat sdf = new SimpleDateFormat("HH:mm yyyy-MM-dd");
address += sdf.format(new Date());
}
mMap.addMarker(new MarkerOptions().position(latLng).title(address));
MemorablePlacesActivity.places.add(address);
MemorablePlacesActivity.locations.add(latLng);
MemorablePlacesActivity.arrayAdapter.notifyDataSetChanged();
Toast.makeText(this,"Location Saved!",Toast.LENGTH_SHORT).show();
}
} |
# Detect when we're not being sourced, print a hint and exit
# Based on https://stackoverflow.com/questions/2683279/how-to-detect-if-a-script-is-being-sourced#34642589
# When "return" fails (ie if not sourced), an error message is printed and
# caught by the if clause.
# In the normal mode of operation (ie if sourced), "return" is silent
if [[ ! -z "$(return 2>&1)" ]];
then
echo >&2 "ERROR: You must use \"source $0\" to run this script."
kill -INT $$
fi
if [[ ! "$0" =~ ("bash") ]];
then
# Not sourced from Bash
BASH_SOURCE="$0"
# For zsh (http://zsh.sourceforge.net/FAQ/zshfaq03.html#l18)
setopt shwordsplit || true
fi
helpstring="Usage: source $BASH_SOURCE [options]
Options:
-c cmsgemos release version (e.g. X.Y.Z)
-d debug information is printed
-g gemplotting release version (e.g. X.Y.Z)
-G gemplotting dev version (e.g. single integer)
-h displays this string
-p Path to the venv location
-P When at P5, port of a SOCKS proxy to be used by pip
-v vfatqc release version (e.g. X.Y.Z)
-V vfatqc dev version (e.g. single integer)
-w No value following, deletes and recreates the venv from scratch
The virtualenv found at -p will either be activated or created"
CMSGEMOS_VERSION=""
DEBUG=""
GEMPLOT_VERSION=""
GEMPLOT_DEV_VERSION=""
PROXY_PORT=""
VFATQC_VERSION=""
VFATQC_DEV_VERSION=""
VENV_ROOT=""
WIPE=""
OPTIND=1
while getopts "c:g:G:v:V:p:P:whd" opts
do
case $opts in
c)
CMSGEMOS_VERSION="$OPTARG";;
d)
DEBUG="true";;
g)
GEMPLOT_VERSION="$OPTARG";;
G)
GEMPLOT_DEV_VERSION="$OPTARG";;
v)
VFATQC_VERSION="$OPTARG";;
V)
VFATQC_DEV_VERSION="$OPTARG";;
p)
VENV_ROOT="$OPTARG";;
w)
WIPE=1;;
P)
PROXY_PORT="$OPTARG";;
h)
echo >&2 "${helpstring}"
return 1;;
\?)
echo >&2 "${helpstring}"
return 1;;
[?])
echo >&2 "${helpstring}"
return 1;;
esac
done
unset OPTIND
# Check if user provided the venv argument
if [ -n "$DEBUG" ]
then
echo VENV_ROOT $VENV_ROOT
echo CMSGEMOS_VERSION $CMSGEMOS_VERSION
echo GEMPLOT_VERSION $GEMPLOT_VERSION
echo GEMPLOT_DEV_VERSION $GEMPLOT_DEV_VERSION
echo PROXY_PORT $PROXY_PORT
echo VFATQC_VERSION $VFATQC_VERSION
echo VFATQC_DEV_VERSION $VFATQC_DEV_VERSION
echo WIPE $WIPE
fi
if [ ! -n "$VENV_ROOT" ]
then
# Sane default
VENV_ROOT=$PWD/venv
fi
#export ELOG_PATH=/<your>/<elog>/<directory>
if [ -z "$ELOG_PATH" ]
then
echo "ELOG_PATH not set, please set ELOG_PATH to a directory where plots created by analysis applications will be written"
echo " (export ELOG_PATH=<your>/<elog>/<directory>/) and then rerun this script"
return 1
fi
# Detect operating system
####################
KERNEL_VERSION="$(uname -r)"
if [[ $KERNEL_VERSION == *"2.6."* ]];
then
OS_VERSION="slc6"
elif [[ $KERNEL_VERSION == *"3.10."* ]];
then
OS_VERSION="cc7"
else
echo "Unrecognized kernel version! Exiting..."
return 1
fi
# Detect host
####################
DNS_INFO="$(dnsdomainname)"
SYSTEM_INFO="$(uname -a)"
VIRTUALENV=virtualenv
PIP=pip
WGET=wget
if [[ $SYSTEM_INFO == *"lxplus"* ]];
then
# LCG 93 doesn't provide `virtualenv' in PATH
VIRTUALENV="python -m virtualenv"
PIP="python -m pip"
if [[ "$OS_VERSION" == "slc6" ]];
then
source /cvmfs/sft.cern.ch/lcg/views/LCG_93/x86_64-slc6-gcc7-opt/setup.sh
else
# cc7
source /cvmfs/sft.cern.ch/lcg/views/LCG_93/x86_64-centos7-gcc7-opt/setup.sh
fi
elif [[ "$DNS_INFO" == "cms" ]];
then
# We are in the .cms network
WGET="ssh cmsusr wget"
fi
# Setup proxy
####################
if [ ! -z "$PROXY_PORT" ];
then
# Install PySocks if it's not alread there
if ! $PIP --disable-pip-version-check show -q PySocks >/dev/null ; then
echo "Installing PySocks..."
ssh cmsusr wget https://files.pythonhosted.org/packages/53/12/6bf1d764f128636cef7408e8156b7235b150ea31650d0260969215bb8e7d/PySocks-1.6.8.tar.gz
$PIP --disable-pip-version-check install --user PySocks-1.6.8.tar.gz
fi
PIP="$PIP --proxy socks5://localhost:$PROXY_PORT"
VIRTUALENV="$VIRTUALENV --never-download" # virtualenv doesn't have proxy support
fi
# setup virtualenv
####################
PYTHON_VERSION=$(python -c "import sys; sys.stdout.write(sys.version[:3])")
VENV_DIR=${VENV_ROOT}/${OS_VERSION}/py${PYTHON_VERSION}
if [ -n "$DEBUG" ]
then
echo SYSTEM_INFO $SYSTEM_INFO
echo KERNEL_VERSION $KERNEL_VERSION
echo OS_VERSION $OS_VERSION
echo PIP $PIP
echo PYTHON_VERSION $PYTHON_VERSION
echo VENV_DIR $VENV_DIR
echo VIRTUALENV $VIRTUALENV
echo WGET $WGET
fi
# Install virtualenv if it's not already there
if ! $PIP show -q virtualenv >/dev/null ; then
$PIP install --user virtualenv
fi
# Check if user wants to start from scratch
if [[ "$WIPE" == "1" ]];
then
/bin/rm -rf $VENV_DIR
if [[ ( -e cmsgemos_gempython-${CMSGEMOS_VERSION}.tar.gz ) && ( ! -z ${CMSGEMOS_VERSION} ) ]]
then
/bin/rm cmsgemos_gempython-${CMSGEMOS_VERSION}.tar.gz
fi
if [[ ( -e gempython_gemplotting-${GEMPLOT_VERSION}.tar.gz) && ( ! -z ${GEMPLOT_VERSION} ) ]]
then
/bin/rm gempython_gemplotting-${GEMPLOT_VERSION}.tar.gz
fi
if [[ ( -e gempython_vfatqc-${VFATQC_VERSION}.tar.gz) && ( ! -z ${VFATQC_VERSION} ) ]]
then
/bin/rm gempython_vfatqc-${VFATQC_VERSION}.tar.gz
fi
fi
if [ ! -d "${VENV_DIR}" ]
then
# Make virtualenv
####################
mkdir -p $VENV_DIR
echo $VIRTUALENV -p python --system-site-packages $VENV_DIR
$VIRTUALENV -p python --system-site-packages $VENV_DIR
. $VENV_DIR/bin/activate
# Check virtualenv
####################
if [ -z ${VIRTUAL_ENV+x} ] ; then
echo "ERROR: Could not activate virtualenv"
return
fi
# Install deps
####################
echo $PIP install -U importlib 'setuptools>25,<=38' 'pip>8,<10'
$PIP install -U importlib 'setuptools>25,<=38' 'pip>8,<10'
# install cmsgemos?
####################
if [ ! -z ${CMSGEMOS_VERSION} ]
then
if [ ! -e cmsgemos_gempython-${CMSGEMOS_VERSION}.tar.gz ]
then
if [ ! -z "$PROXY_PORT" ];
then
echo scp lxplus.cern.ch:/afs/cern.ch/user/s/sturdy/public/cmsgemos_gempython-${CMSGEMOS_VERSION}.tar.gz .
scp lxplus.cern.ch:/afs/cern.ch/user/s/sturdy/public/cmsgemos_gempython-${CMSGEMOS_VERSION}.tar.gz .
else
echo cp /afs/cern.ch/user/s/sturdy/public/cmsgemos_gempython-${CMSGEMOS_VERSION}.tar.gz .
cp /afs/cern.ch/user/s/sturdy/public/cmsgemos_gempython-${CMSGEMOS_VERSION}.tar.gz .
fi
fi
ls >/dev/null # Forces a filesystem sync
echo $PIP install cmsgemos_gempython-0.3.1.tar.gz --no-deps
$PIP install cmsgemos_gempython-0.3.1.tar.gz --no-deps
fi
# install gemplotting?
####################
if [ ! -z ${GEMPLOT_VERSION} ]
then
if [ ! -e gempython_gemplotting-${GEMPLOT_VERSION}.tar.gz ]
then
if [ -z "$GEMPLOT_DEV_VERSION" ]
then
echo $WGET https://github.com/cms-gem-daq-project/gem-plotting-tools/releases/download/v${GEMPLOT_VERSION}/gempython_gemplotting-${GEMPLOT_VERSION}.tar.gz
$WGET https://github.com/cms-gem-daq-project/gem-plotting-tools/releases/download/v${GEMPLOT_VERSION}/gempython_gemplotting-${GEMPLOT_VERSION}.tar.gz
else
echo $WGET https://github.com/cms-gem-daq-project/gem-plotting-tools/releases/download/v${GEMPLOT_VERSION}-dev${GEMPLOT_DEV_VERSION}/gempython_gemplotting-${GEMPLOT_VERSION}.tar.gz
$WGET https://github.com/cms-gem-daq-project/gem-plotting-tools/releases/download/v${GEMPLOT_VERSION}-dev${GEMPLOT_DEV_VERSION}/gempython_gemplotting-${GEMPLOT_VERSION}.tar.gz
fi
fi
ls >/dev/null # Forces a filesystem sync
echo $PIP install gempython_gemplotting-${GEMPLOT_VERSION}.tar.gz
$PIP install gempython_gemplotting-${GEMPLOT_VERSION}.tar.gz
fi
# install vfatqc?
####################
if [ ! -z ${VFATQC_VERSION} ]
then
if [ ! -e gempython_vfatqc-${VFATQC_VERSION}.tar.gz ]
then
if [ -z "$VFATQC_DEV_VERSION" ]
then
echo $WGET https://github.com/cms-gem-daq-project/vfatqc-python-scripts/releases/download/v${VFATQC_VERSION}/gempython_vfatqc-${VFATQC_VERSION}.tar.gz
$WGET https://github.com/cms-gem-daq-project/vfatqc-python-scripts/releases/download/v${VFATQC_VERSION}/gempython_vfatqc-${VFATQC_VERSION}.tar.gz
else
echo $WGET https://github.com/cms-gem-daq-project/vfatqc-python-scripts/releases/download/v${VFATQC_VERSION}-dev${VFATQC_DEV_VERSION}/gempython_vfatqc-${VFATQC_VERSION}.tar.gz
$WGET https://github.com/cms-gem-daq-project/vfatqc-python-scripts/releases/download/v${VFATQC_VERSION}-dev${VFATQC_DEV_VERSION}/gempython_vfatqc-${VFATQC_VERSION}.tar.gz
fi
fi
ls >/dev/null # Forces a filesystem sync
echo $PIP install gempython_vfatqc-${VFATQC_VERSION}.tar.gz
$PIP install gempython_vfatqc-${VFATQC_VERSION}.tar.gz
fi
else
echo source $VENV_DIR/bin/activate
source $VENV_DIR/bin/activate
fi
# Setup locations
####################
if [[ $SYSTEM_INFO == *"lxplus"* ]];
then
if [[ -z "$DATA_PATH" ]]; # Don't override existing value
then
export DATA_PATH=/afs/cern.ch/work/${USER:0:1}/$USER/CMS_GEM/Data/gemdata
else
# Make sure it's exported
export DATA_PATH
fi
if [ ! -d "$DATA_PATH" ];
then
mkdir -p "$DATA_PATH"
echo "INFO: The directory \"$DATA_PATH\" (\$DATA_PATH) didn't exist."
echo " I created it for you."
fi
elif [[ $SYSTEM_INFO == *"gem904"* ]];
then
# System Paths
export AMC13_ADDRESS_TABLE_PATH=/opt/cactus/etc/amc13/
export DATA_PATH=/data/bigdisk/GEM-Data-Taking/GE11_QC8/
export GBT_SETTINGS=/data/bigdisk/GEMDAQ_Documentation/system/OptoHybrid/V3/GBT_Files/
export GEM_ADDRESS_TABLE_PATH=/opt/cmsgemos/etc/maps
export REPO_PATH=/data/bigdisk/sw/gemonlinesw/repos/
# Setup LD_LIBARY_PATH
export LD_LIBRARY_PATH=/opt/cactus/lib:$LD_LIBRARY_PATH
export LD_LIBRARY_PATH=/opt/rwreg/lib:$LD_LIBRARY_PATH
export LD_LIBRARY_PATH=/opt/wiscrpcsvc/lib:$LD_LIBRARY_PATH
export LD_LIBRARY_PATH=/opt/xdaq/lib:$LD_LIBRARY_PATH
export LD_LIBRARY_PATH=/opt/xhal/lib:$LD_LIBRARY_PATH
# Add hardware access tools to PATH
export PATH=/opt/cactus/bin/amc13/:$PATH
export PATH=/opt/reg_utils/bin:$PATH
export PATH=/opt/xhal/bin/:$PATH
# Firmware
export FIRMWARE_GEM=/data/bigdisk/GEMDAQ_Documentation/system/firmware/files
# xDAQ
alias xdaq=/opt/xdaq/bin/xdaq.exe
# Misc
#alias arp-scan='sudo /usr/sbin/arp-scan'
alias arp-scan='ip n show dev "$@" to 192.168.0.0/16'
alias editConfig='vim $VIRTUAL_ENV/lib/python2.7/site-packages/gempython/gemplotting/mapping/chamberInfo.py'
alias gbtProgrammer='java -jar /data/bigdisk/sw/GBTx_programmer/programmerv2.20180116.jar'
# fedKit on gem904daq04
if [[ $SYSTEM_INFO == *"gem904daq04"* ]];
then
export PATH=/opt/xdaq/bin:$PATH
fi
elif [[ $SYSTEM_INFO == *"srv-s2g18"* || $SYSTEM_INFO == *"kvm"* ]];
then
# System Paths
export DATA_PATH=/gemdata
# Setup LD_LIBARY_PATH
export LD_LIBRARY_PATH=/opt/cactus/lib:$LD_LIBRARY_PATH
export LD_LIBRARY_PATH=/opt/rwreg/lib:$LD_LIBRARY_PATH
export LD_LIBRARY_PATH=/opt/wiscrpcsvc/lib:$LD_LIBRARY_PATH
export LD_LIBRARY_PATH=/opt/xdaq/lib:$LD_LIBRARY_PATH
export LD_LIBRARY_PATH=/opt/xhal/lib:$LD_LIBRARY_PATH
export LD_LIBRARY_PATH=/opt/cactus/lib:$LD_LIBRARY_PATH
# Add hardware access tools to PATH
export PATH=/opt/cactus/bin/amc13/:$PATH
export PATH=/opt/xhal/bin/:$PATH
export PATH=/opt/reg_utils/bin:$PATH
fi
# Setup path
export PATH=$VENV_DIR/lib/python$PYTHON_VERSION/site-packages/gempython/scripts:$PATH
export PATH=$VENV_DIR/lib/python$PYTHON_VERSION/site-packages/gempython/gemplotting/macros:$PATH
# Create mapping files
if [ ! -f $VENV_DIR/lib/python$PYTHON_VERSION/site-packages/gempython/gemplotting/mapping/shortChannelMap.txt ]
then
find $VENV_DIR/lib/python$PYTHON_VERSION/site-packages/gempython -type f -name buildMapFiles.py -exec python {} \;
fi
# Clean up
unset DEBUG DNS_INFO helpstring PIP VIRTUALENV WIPE
if [[ ! "$0" =~ ("bash") ]];
then
# Not sourced from Bash
unset BASH_SOURCE
fi
|
#!/usr/bin/env
python3 -m venv venv
source venv/bin/activate
pip3 install numpy numexpr tqdm pygam scikit-learn networkx
pip3 install conditional_independence graphical_models graphical_model_learning
pip3 install twine wheel ipdb ipython
pip3 install jedi==0.17.2
# REPLACE WITH PATH TO OTHER PACKAGES
pip3 install -e ~/Documents/packages/conditional_independence/
pip3 install -e ~/Documents/packages/graphical_models/
pip3 install -e ~/Documents/packages/graphical_model_learning/
|
<filename>game-practice/src/components/Sidebar/SidebarMenu.js
import React, { Component } from 'react'
import {
BrowserRouter as Router,
Route,
NavLink
} from 'react-router-dom'
import '../../styles/SidebarMenu.css'
const routes = [
{
path: '/',
exact: true,
sidebar: () => <div>Home</div>,
main: () => <h2>Home</h2>,
},
{
path: '/snake',
sidebar: () => <div>SNAKE</div>,
main: () => <h2>Snake</h2>,
},
{
path: '/simon',
sidebar: () => <div>SIMON</div>,
main: () => <h2>Simon</h2>,
},
]
class SidebarMenu extends Component {
render() {
return (
<React.Fragment>
<Router>
<div className='toggle'>
<div className='sidebar-container'>
<ul style={{listStyle: 'none', padding: 0}}>
<li><NavLink to='/'style={{color: 'white', textDecoration: 'none'}} >Home</NavLink> </li>
<li><NavLink to='/snake' style={{color: 'white', textDecoration: 'none'}}>SNAKE</NavLink> </li>
<li><NavLink to='/simon' style={{color: 'white', textDecoration: 'none'}}>SIMON</NavLink> </li>
</ul>
{routes.map(route => (
<Route
key={route.path}
path={route.path}
exact={route.exact}
component={route.sidebar}
/>
))}
</div>
<div style={{flex: 1, padding: '10px'}}>
{routes.map(route => (
<Route
key={route.path}
path={route.path}
exact={route.exact}
component={route.main}
/>
))}
</div>
</div>
</Router>
</React.Fragment>
)
}
}
export default SidebarMenu |
<gh_stars>0
package cmd
import (
"encoding/json"
"errors"
"fmt"
"io/ioutil"
"os"
"regexp"
"strconv"
"strings"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"github.com/plumber-cd/github-apps-trampoline/helper"
"github.com/plumber-cd/github-apps-trampoline/logger"
)
var (
verbose bool
server string
privateKey string
appID int
filter string
currentRepo bool
repositories string
repositoryIDs string
permissions string
installation string
installationID int
cliMode bool
cfgFile string
cfg string
)
var rootCmd = &cobra.Command{
Use: "github-apps-trampoline",
Short: "A GIT_ASKPASS trampoline for GitHub Apps",
Long: `A cross-platform no-dependency GIT_ASKPASS trampoline for GitHub Apps,
written in Go`,
Args: cobra.ArbitraryArgs,
Run: func(cmd *cobra.Command, args []string) {
logger.Refresh()
logger.Get().Println("hi")
if viper.GetBool("verbose") {
outData, err := json.MarshalIndent(viper.AllSettings(), "", " ")
cobra.CheckErr(err)
logger.Get().Println(string(outData))
}
if cfgFile := viper.GetString("config"); cfgFile != "" {
logger.Get().Printf("Reading config from file %s", cfgFile)
dat, err := os.ReadFile(cfgFile)
cobra.CheckErr(err)
cfg = string(dat)
} else if dat, present := os.LookupEnv("GITHUB_APPS_TRAMPOLINE"); present {
logger.Get().Println("Reading config from environment")
cfg = dat
}
if cfg == "" {
logger.Get().Println("Config was not set - inferring in-memory from cli args")
key := viper.GetString("key")
if key == "" {
cobra.CheckErr(errors.New("If no config was provided, must specify private key via --key or GITHUB_APPS_TRAMPOLINE_KEY"))
}
app := viper.GetInt("app")
if app <= 0 {
cobra.CheckErr(errors.New("If no config was provided, must specify app ID via --app or GITHUB_APPS_TRAMPOLINE_APP"))
}
filter := viper.GetString("filter")
if filter == "" {
logger.Get().Println("Filter was not set - assuming '.*'")
filter = ".*"
}
config := helper.Config{
PrivateKey: key,
AppID: app,
}
if server := viper.GetString("server"); server != "" {
config.GitHubServer = &server
}
if api := viper.GetString("api"); api != "" {
config.GitHubAPI = &api
}
if currentRepo := viper.GetBool("current-repo"); currentRepo {
logger.Get().Println("Enabled: current-repo")
config.CurrentRepositoryOnly = ¤tRepo
}
if repositories := viper.GetString("repositories"); repositories != "" {
logger.Get().Println("Enabled: repositories")
split := strings.Split(repositories, ",")
logger.Get().Printf("Repositories: %v", split)
config.Repositories = &split
}
if repositoryIDs := viper.GetString("repository-ids"); repositoryIDs != "" {
logger.Get().Println("Enabled: repository-ids")
ids := strings.Split(repositoryIDs, ",")
int_ids := make([]int, len(ids))
for i, id := range ids {
int_id, err := strconv.Atoi(id)
cobra.CheckErr(err)
int_ids[i] = int_id
}
logger.Get().Printf("Repository IDs: %v", int_ids)
config.RepositoryIDs = &int_ids
}
if permissions := viper.GetString("permissions"); permissions != "" {
logger.Get().Println("Enabled: permissions")
raw := json.RawMessage(permissions)
logger.Get().Printf("Permissions: %s", string(raw))
config.Permissions = &raw
}
if installation := viper.GetString("installation"); installation != "" {
logger.Get().Printf("Enabled: installation %q", installation)
config.Installation = &installation
}
if installationID := viper.GetInt("installation-id"); installationID > 0 {
logger.Get().Printf("Enabled: installation-id %q", installation)
config.InstallationID = &installationID
}
obj := map[string]helper.Config{}
obj[filter] = config
jsonData, err := json.MarshalIndent(obj, "", " ")
cobra.CheckErr(err)
cfg = string(jsonData)
}
logger.Get().Printf("Config: %s", cfg)
_helper := helper.New(cfg)
if cliMode = viper.GetBool("cli"); !cliMode {
logger.Get().Println("Git AskPass Credentials Helper mode enabled")
if len(args) != 1 || args[0] != "get" {
logger.Get().Printf("Expecting single arg 'get', got: %v", args)
logger.Get().Println("Silently exiting - nothing to do")
os.Exit(0)
}
inBytes, err := ioutil.ReadAll(os.Stdin)
cobra.CheckErr(err)
in := string(inBytes)
logger.Get().Printf("Read input from git:\n%s", in)
var protocol, host, path string
re := regexp.MustCompile("(protocol|host|path)=(.*)")
result := re.FindAllStringSubmatchIndex(in, -1)
for _, match := range result {
key := in[match[2]:match[3]]
value := in[match[4]:match[5]]
switch key {
case "protocol":
protocol = value
case "host":
host = value
case "path":
path = strings.TrimSuffix(value, ".git")
}
}
if protocol != "https" {
logger.Get().Printf("Expecting protocol 'https', got: %q", protocol)
logger.Get().Println("Silently exiting - nothing to do")
os.Exit(0)
}
git, err := _helper.GitHelper(fmt.Sprintf("%s/%s", host, path))
checkSilentErr(err)
token, err := git.GetToken()
checkSilentErr(err)
logger.Get().Printf("Returning token in a helper format: %q", token)
fmt.Printf("username=%s\n", "x-access-token")
fmt.Printf("password=%<PASSWORD>", token)
} else {
logger.Get().Println("Standalone CLI mode enabled")
cli, err := _helper.CLIHelper()
cobra.CheckErr(err)
token, err := cli.GetToken()
cobra.CheckErr(err)
logger.Get().Printf("Returning token in JSON format: %q", token)
out := map[string]string{
"username": "x-access-token",
"password": <PASSWORD>,
}
outData, err := json.MarshalIndent(out, "", " ")
cobra.CheckErr(err)
fmt.Println(string(outData))
}
},
}
func Execute() {
if err := rootCmd.Execute(); err != nil {
cobra.CheckErr(err)
}
}
func init() {
cobra.OnInitialize(initConfig)
rootCmd.PersistentFlags().StringVarP(&cfgFile, "config", "c", "", "config file")
if err := viper.BindPFlag("config", rootCmd.PersistentFlags().Lookup("config")); err != nil {
cobra.CheckErr(err)
}
rootCmd.PersistentFlags().BoolVarP(&verbose, "verbose", "v", false, "verbose output")
if err := viper.BindPFlag("verbose", rootCmd.PersistentFlags().Lookup("verbose")); err != nil {
cobra.CheckErr(err)
}
rootCmd.PersistentFlags().BoolVar(&cliMode, "cli", false, "cli mode")
if err := viper.BindPFlag("cli", rootCmd.PersistentFlags().Lookup("cli")); err != nil {
cobra.CheckErr(err)
}
rootCmd.PersistentFlags().StringVarP(&server, "server", "s", "", "GitHub Server")
if err := viper.BindPFlag("server", rootCmd.PersistentFlags().Lookup("server")); err != nil {
cobra.CheckErr(err)
}
rootCmd.PersistentFlags().StringVar(&server, "api", "", "GitHub API url")
if err := viper.BindPFlag("api", rootCmd.PersistentFlags().Lookup("api")); err != nil {
cobra.CheckErr(err)
}
rootCmd.PersistentFlags().StringVarP(&privateKey, "key", "k", "", "path to the private key")
if err := viper.BindPFlag("key", rootCmd.PersistentFlags().Lookup("key")); err != nil {
cobra.CheckErr(err)
}
rootCmd.PersistentFlags().IntVarP(&appID, "app", "a", 0, "app ID")
if err := viper.BindPFlag("app", rootCmd.PersistentFlags().Lookup("app")); err != nil {
cobra.CheckErr(err)
}
rootCmd.PersistentFlags().StringVarP(&filter, "filter", "f", "", "filter")
if err := viper.BindPFlag("filter", rootCmd.PersistentFlags().Lookup("filter")); err != nil {
cobra.CheckErr(err)
}
rootCmd.PersistentFlags().BoolVar(¤tRepo, "current-repo", false, "if set to true and no repos provided - request token to the current repo")
if err := viper.BindPFlag("current-repo", rootCmd.PersistentFlags().Lookup("current-repo")); err != nil {
cobra.CheckErr(err)
}
rootCmd.PersistentFlags().StringVarP(&repositories, "repositories", "r", "", "repositories")
if err := viper.BindPFlag("repositories", rootCmd.PersistentFlags().Lookup("repositories")); err != nil {
cobra.CheckErr(err)
}
rootCmd.PersistentFlags().StringVar(&repositoryIDs, "repository-ids", "", "repository IDs")
if err := viper.BindPFlag("repository-ids", rootCmd.PersistentFlags().Lookup("repository-ids")); err != nil {
cobra.CheckErr(err)
}
rootCmd.PersistentFlags().StringVarP(&permissions, "permissions", "p", "", "permissions")
if err := viper.BindPFlag("permissions", rootCmd.PersistentFlags().Lookup("permissions")); err != nil {
cobra.CheckErr(err)
}
rootCmd.PersistentFlags().StringVarP(&installation, "installation", "i", "", "installation")
if err := viper.BindPFlag("installation", rootCmd.PersistentFlags().Lookup("installation")); err != nil {
cobra.CheckErr(err)
}
rootCmd.PersistentFlags().IntVar(&installationID, "installation-id", -1, "installation ID")
if err := viper.BindPFlag("installation-id", rootCmd.PersistentFlags().Lookup("installation-id")); err != nil {
cobra.CheckErr(err)
}
}
func initConfig() {
viper.SetEnvPrefix("GITHUB_APPS_TRAMPOLINE")
viper.AutomaticEnv()
if err := viper.ReadInConfig(); err == nil {
if v := viper.GetBool("verbose"); v {
fmt.Fprintln(os.Stderr, "Using config file:", viper.ConfigFileUsed())
}
}
}
func checkSilentErr(err error) {
if err != nil {
var s *helper.SilentExitError
if errors.As(err, &s) {
logger.Get().Printf("Silently exiting: %s", err)
os.Exit(0)
}
cobra.CheckErr(err)
}
}
|
#!/bin/bash
# Copyright 2016 - 2018 Crunchy Data Solutions, Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
source ${CCPROOT}/examples/common.sh
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
# This var lets us change the image to gis by setting
# CCP_PG_IMAGE='-gis'.
export CCP_PG_IMAGE=${CCP_PG_IMAGE:-}
${DIR}/cleanup.sh
create_storage "backrest-async-archive"
if [[ $? -ne 0 ]]
then
echo_err "Failed to create storage, exiting.."
exit 1
fi
${CCP_CLI?} create --namespace=${CCP_NAMESPACE?} \
configmap br-aa-pgconf \
--from-file ${DIR?}/configs/pgbackrest.conf
expenv -f $DIR/backrest.json | ${CCP_CLI?} create --namespace=${CCP_NAMESPACE?} -f -
|
export default ({ spacing }) => ({
container: {
margin: 'auto',
paddingTop: 60,
paddingBottom: 60,
width: spacing.fullWidth,
},
secondButton: {
position: 'relative',
'& .backdrop': {
display: 'none',
},
'&:hover > .backdrop': {
cursor: 'pointer',
width: spacing.fullWidth,
height: spacing.fullHeight,
position: 'absolute',
content: ' ',
top: 0,
left: 0,
backgroundColor: 'rgba(0,0,0,0.7)',
display: 'flex',
},
},
backdropIcon: {
margin: 'auto',
color: 'white',
fontSize: 72,
},
});
|
<filename>src/main/java/io/github/rcarlosdasilva/weixin/model/response/certificate/JsTicketResponse.java
package io.github.rcarlosdasilva.weixin.model.response.certificate;
import io.github.rcarlosdasilva.weixin.model.JsTicket;
public class JsTicketResponse extends JsTicket {
private static final long serialVersionUID = -1273892049807763672L;
}
|
<filename>pirates/effects/EnergySpiral.py
# File: E (Python 2.4)
from pandac.PandaModules import *
from direct.interval.IntervalGlobal import *
from otp.otpbase import OTPRender
from EffectController import EffectController
from PooledEffect import PooledEffect
import random
class EnergySpiral(PooledEffect, EffectController):
def __init__(self):
PooledEffect.__init__(self)
EffectController.__init__(self)
self.effectModel = loader.loadModel('models/effects/energy_spirals')
self.effectModel2 = loader.loadModel('models/effects/energy_spirals')
self.effectModel2.reparentTo(self.effectModel)
self.effectModel.setBillboardAxis(0)
self.effectModel.setColorScaleOff()
self.effectModel.reparentTo(self)
self.effectColor = Vec4(1, 1, 1, 1)
self.setDepthWrite(0)
self.setLightOff()
self.setColorScaleOff()
self.setTransparency(0, 0)
self.hide(OTPRender.ShadowCameraBitmask)
def createTrack(self):
textureStage = self.effectModel.findAllTextureStages()[0]
self.effectModel.setTexOffset(textureStage, 0.0, 1.0)
self.effectModel.setScale(0.40000000000000002, 0.5, 0.5)
duration = 6.0
self.setColorScale(1.0, 1.0, 1.0, 0.0)
fadeIn = LerpColorScaleInterval(self, 1.0, Vec4(1.0, 1.0, 1.0, 1.0), startColorScale = Vec4(0.0, 0.0, 0.0, 0.0))
fadeOut = LerpColorScaleInterval(self, 1.0, Vec4(0.0, 0.0, 0.0, 0.0), startColorScale = Vec4(1.0, 1.0, 1.0, 1.0))
scaleIval = LerpScaleInterval(self.effectModel, duration, Vec3(1.0, 1.0, 4.0), startScale = Vec3(1.0, 1.0, 4.0))
uvScroll = LerpFunctionInterval(self.setNewUVs, duration / 4.0, toData = -1.0, fromData = 1.0, extraArgs = [
self.effectModel,
textureStage])
self.startEffect = Sequence(Func(uvScroll.loop), fadeIn)
self.endEffect = Sequence(fadeOut, Func(uvScroll.finish), Func(self.cleanUpEffect))
self.track = Sequence(self.startEffect, Wait(duration), self.endEffect)
def setEffectColor(self, color):
self.effectColor = (Vec4(1, 1, 1, 1) - (Vec4(1, 1, 1, 1) - color) / 4.0) + Vec4(0.10000000000000001, 0.10000000000000001, 0, 1)
self.effectModel.setColorScale(self.effectColor)
def setNewUVs(self, offset, part, ts):
part.setTexOffset(ts, 0.0, offset)
def cleanUpEffect(self):
EffectController.cleanUpEffect(self)
self.checkInEffect(self)
def destroy(self):
self.stop()
if self.track:
self.track = None
self.removeNode()
EffectController.destroy(self)
PooledEffect.destroy(self)
|
from flask import Flask, request, render_template
app = Flask(__name__)
@app.route('/')
def index():
return render_template('form_page.html')
@app.route('/submit', methods=['POST'])
def submit():
if request.method == "POST":
name = request.form["name"]
email = request.form["email"]
message = request.form["message"]
#Connect to database
db = connect_to_database()
#Insert data into database
db.execute("INSERT INTO submissions (name, email, message) VALUES (?, ?, ?)", (name, email, message))
if __name__ == '__main__':
app.run(debug=True) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.