text
stringlengths 3
1.05M
|
|---|
goog.provide('reagent_mui.material.text_field');
var module$node_modules$react$index=shadow.js.require("module$node_modules$react$index", {});
var module$node_modules$$mui$material$TextField$index=shadow.js.require("module$node_modules$$mui$material$TextField$index", {});
reagent_mui.material.text_field.input_props = (function reagent_mui$material$text_field$input_props(props,ref){
return reagent_mui.util.remove_undefined_vals(cljs.core.assoc.cljs$core$IFn$_invoke$arity$3(reagent_mui.util.js__GT_clj_SINGLEQUOTE_(props),new cljs.core.Keyword(null,"ref","ref",1289896967),ref));
});
reagent_mui.material.text_field.input = module$node_modules$react$index.forwardRef((function reagent_mui$material$text_field$input(props,ref){
return reagent.core.as_element(new cljs.core.PersistentVector(null, 2, 5, cljs.core.PersistentVector.EMPTY_NODE, [new cljs.core.Keyword(null,"input","input",556931961),reagent_mui.material.text_field.input_props(props,ref)], null));
}));
reagent_mui.material.text_field.textarea = module$node_modules$react$index.forwardRef((function reagent_mui$material$text_field$textarea(props,ref){
return reagent.core.as_element(new cljs.core.PersistentVector(null, 2, 5, cljs.core.PersistentVector.EMPTY_NODE, [new cljs.core.Keyword(null,"textarea","textarea",-650375824),reagent_mui.material.text_field.input_props(props,ref)], null));
}));
reagent_mui.material.text_field.mui_text_field = reagent_mui.util.adapt_react_class.cljs$core$IFn$_invoke$arity$2(module$node_modules$$mui$material$TextField$index.default,"mui-text-field");
reagent_mui.material.text_field.text_field = (function reagent_mui$material$text_field$text_field(var_args){
var args__4870__auto__ = [];
var len__4864__auto___37390 = arguments.length;
var i__4865__auto___37391 = (0);
while(true){
if((i__4865__auto___37391 < len__4864__auto___37390)){
args__4870__auto__.push((arguments[i__4865__auto___37391]));
var G__37392 = (i__4865__auto___37391 + (1));
i__4865__auto___37391 = G__37392;
continue;
} else {
}
break;
}
var argseq__4871__auto__ = ((((1) < args__4870__auto__.length))?(new cljs.core.IndexedSeq(args__4870__auto__.slice((1)),(0),null)):null);
return reagent_mui.material.text_field.text_field.cljs$core$IFn$_invoke$arity$variadic((arguments[(0)]),argseq__4871__auto__);
});
(reagent_mui.material.text_field.text_field.cljs$core$IFn$_invoke$arity$variadic = (function (props,children){
var min_rows = reagent_mui.util.get_anycase.cljs$core$IFn$_invoke$arity$2(props,new cljs.core.Keyword(null,"min-rows","min-rows",1428640765));
var max_rows = reagent_mui.util.get_anycase.cljs$core$IFn$_invoke$arity$2(props,new cljs.core.Keyword(null,"max-rows","max-rows",-2131113613));
var autosize_QMARK_ = (function (){var and__4251__auto__ = new cljs.core.Keyword(null,"multiline","multiline",-1084693234).cljs$core$IFn$_invoke$arity$1(props);
if(cljs.core.truth_(and__4251__auto__)){
return cljs.core.not(new cljs.core.Keyword(null,"rows","rows",850049680).cljs$core$IFn$_invoke$arity$1(props));
} else {
return and__4251__auto__;
}
})();
var input_component = (function (){var or__4253__auto__ = reagent_mui.util.get_anycase.cljs$core$IFn$_invoke$arity$2(new cljs.core.Keyword(null,"InputProps","InputProps",-1485068760).cljs$core$IFn$_invoke$arity$1(props),new cljs.core.Keyword(null,"input-component","input-component",-745892912));
if(cljs.core.truth_(or__4253__auto__)){
return or__4253__auto__;
} else {
if(cljs.core.truth_(autosize_QMARK_)){
return reagent_mui.material.textarea_autosize.react_textarea_autosize;
} else {
if(cljs.core.truth_(new cljs.core.Keyword(null,"multiline","multiline",-1084693234).cljs$core$IFn$_invoke$arity$1(props))){
return reagent_mui.material.text_field.textarea;
} else {
if(cljs.core.truth_(new cljs.core.Keyword(null,"select","select",1147833503).cljs$core$IFn$_invoke$arity$1(props))){
return null;
} else {
return reagent_mui.material.text_field.input;
}
}
}
}
})();
var props__$1 = (function (){var G__37389 = props;
var G__37389__$1 = (cljs.core.truth_(input_component)?cljs.core.update.cljs$core$IFn$_invoke$arity$3(G__37389,new cljs.core.Keyword(null,"InputProps","InputProps",-1485068760),(function (p1__37386_SHARP_){
return reagent_mui.util.assoc_anycase.cljs$core$IFn$_invoke$arity$3(p1__37386_SHARP_,new cljs.core.Keyword(null,"input-component","input-component",-745892912),input_component);
})):G__37389);
if(cljs.core.truth_(autosize_QMARK_)){
return cljs.core.update.cljs$core$IFn$_invoke$arity$4(G__37389__$1,new cljs.core.Keyword(null,"input-props","input-props",-1504868202),cljs.core.merge,new cljs.core.PersistentArrayMap(null, 2, [new cljs.core.Keyword(null,"max-rows","max-rows",-2131113613),max_rows,new cljs.core.Keyword(null,"min-rows","min-rows",1428640765),min_rows], null));
} else {
return G__37389__$1;
}
})();
return cljs.core.into.cljs$core$IFn$_invoke$arity$2(new cljs.core.PersistentVector(null, 2, 5, cljs.core.PersistentVector.EMPTY_NODE, [reagent_mui.material.text_field.mui_text_field,props__$1], null),children);
}));
(reagent_mui.material.text_field.text_field.cljs$lang$maxFixedArity = (1));
/** @this {Function} */
(reagent_mui.material.text_field.text_field.cljs$lang$applyTo = (function (seq37387){
var G__37388 = cljs.core.first(seq37387);
var seq37387__$1 = cljs.core.next(seq37387);
var self__4851__auto__ = this;
return self__4851__auto__.cljs$core$IFn$_invoke$arity$variadic(G__37388,seq37387__$1);
}));
//# sourceMappingURL=reagent_mui.material.text_field.js.map
|
const db = require('mongodb').MongoClient
const url = "mongodb://localhost:27017/"
function mongoDb() {
db.connect(url, function(err, db) {
if (err) throw err
var dbo = db.db("mydb")
dbo.createCollection("facts", function(err, res) {
if (err) throw err
db.close()
})
})
}
mongoDb.prototype.save = function save(factEn, factFi, imgBase64) {
db.connect(url, function(err, db) {
if (err) throw err
const dbo = db.db("mydb")
let factObj = { date: new Date(), factEn: factEn, factFi: factFi, image: imgBase64}
dbo.collection("facts").insertOne(factObj, function(err, res) {
if(err) {
console.log("Error inserting object")
throw err
}
console.log("1 fact inserted")
db.close()
})
})
}
mongoDb.prototype.fetchImage = function fetchImage(cb) {
db.connect(url, function(err, db) {
if (err) throw err
const dbo = db.db("mydb")
const startDate = new Date()
startDate.setHours(0)
const endDate = new Date()
endDate.setHours(23)
dbo.collection("facts").findOne({"date": {"$gte": startDate, "$lte": endDate}}, function(err, result) {
if (err) throw err
db.close()
cb(result)
})
})
}
module.exports = mongoDb
|
import React from 'react';
import { Platform } from 'react-native';
import { shallow } from 'enzyme';
import toJson from 'enzyme-to-json';
import { create } from 'react-test-renderer';
import { ThemeProvider } from '../../config';
import ThemedIcon, { Icon } from '../Icon';
describe('Icon component', () => {
it('should render without issues', () => {
const component = shallow(<Icon name="wifi" />);
expect(component.length).toBe(1);
expect(toJson(component)).toMatchSnapshot();
});
it('should render with icon type', () => {
const component = shallow(
<Icon
name="alert"
type="octicon"
reverse
color="red"
iconStyle={{ backgroundColor: 'peru', borderRadius: 30 }}
onPress={jest.fn()}
/>
);
expect(component.length).toBe(1);
expect(toJson(component)).toMatchSnapshot();
});
it('should have onPress event', () => {
const onPress = jest.fn();
const component = shallow(<Icon onPress={onPress} name="wifi" />);
const touchable = component.childAt(0);
touchable.simulate('press');
expect(onPress).toHaveBeenCalledTimes(1);
});
it('should apply default disabled styles', () => {
const onPress = jest.fn();
const component = shallow(<Icon onPress={onPress} name="wifi" disabled />);
expect(toJson(component)).toMatchSnapshot();
});
it('should apply custom disabled styles', () => {
const onPress = jest.fn();
const component = shallow(
<Icon
onPress={onPress}
name="wifi"
disabled
disabledStyle={{ backgroundColor: 'pink' }}
/>
);
expect(toJson(component)).toMatchSnapshot();
});
it('should apply container style', () => {
const component = shallow(
<Icon name="wifi" containerStyle={{ backgroundColor: 'blue' }} />
);
expect(toJson(component)).toMatchSnapshot();
});
it('should apply reverse styles', () => {
const component = shallow(<Icon name="wifi" reverse />);
expect(toJson(component)).toMatchSnapshot();
});
it('should set underlayColor to color when styles when underlayColor absent', () => {
const component = shallow(<Icon name="wifi" underlayColor={null} />);
expect(toJson(component)).toMatchSnapshot();
});
it('should apply raised styles', () => {
const component = shallow(<Icon name="wifi" raised />);
expect(toJson(component)).toMatchSnapshot();
});
it('works on android with onPress', () => {
Platform.OS = 'android';
Platform.Version = 25;
const component = shallow(<Icon name="wifi" onPress={jest.fn()} />);
expect(toJson(component)).toMatchSnapshot();
});
it('should apply values from theme', () => {
const theme = {
Icon: {
size: 26,
},
};
const component = create(
<ThemeProvider theme={theme}>
<ThemedIcon />
</ThemeProvider>
);
expect(component.root.findByProps({ testID: 'iconIcon' }).props.size).toBe(
26
);
expect(component.toJSON()).toMatchSnapshot();
});
});
|
# Python modules
import sys
import os
import stat
import subprocess
import tempfile
# Our modules
import vespa.common.util.misc as util_misc
"""Creates double-clickable shortcuts for all Vespa apps.
It will only work when Vespa has been successfully installed.
This code is meant to be run like so:
python -m 'vespa.create_shortcuts'
"""
# APPLICATIONS lists the directories that contain the Vespa apps installed
# by setup.py.
APPLICATIONS = ("pulse", "simulation", "analysis", "datasim")
# Vespa supports OS X, Windows and Linux, and all three have fairly different
# ways of creating executable shortcuts.
platform = sys.platform.lower()
if "linux" in platform:
platform = "linux"
elif "darwin" in platform:
platform = "osx"
elif "win32" in platform:
platform = "windows"
vespa_install_path = util_misc.get_vespa_install_directory()
python_path = sys.executable
done_msg = "Done! Vespa shortcuts have been created on your desktop."
if platform == "osx":
# Under OS X we create shortcuts in the standard Applications folder. The shortcuts are
# directories with the .app extension which makes them look and behave like resgular OS X
# apps. They are bare bones apps, though, because they only contain a single 2-line shell
# script.
target_path = '/Applications/Vespa'
done_msg = "Done! Vespa shortcuts have been created in your Applications folder."
if not os.path.exists(target_path):
os.mkdir(target_path)
for application in APPLICATIONS:
capitalized_name = application.capitalize()
print("Creating a shortcut for %s..." % capitalized_name)
# Create directory tree, e.g. /Applications/Vespa/Simulation.app/Contents/MacOS
app_path = os.path.join(target_path, capitalized_name + ".app", "Contents", "MacOS")
if not os.path.exists(app_path):
os.makedirs(app_path)
app_path = os.path.join(app_path, capitalized_name)
# Create a command string that will run this app. The path to Python must be
# fully qualified.
path = os.path.join(vespa_install_path, application, "src", "main.py")
executable = sys.executable
if 'conda' in executable:
# Mini/Anaconda on the Mac requires pythonw to launch GUI apps.
executable += 'w'
command = '"%s" "%s"' % (executable, path)
with open(app_path, 'w') as f:
f.write("#!/bin/bash\n")
f.write(command)
os.chmod(app_path, 0o755)
elif platform == "linux":
# Get some environment vars.
home = os.getenv("HOME")
for application in APPLICATIONS:
print("Creating a shortcut for %s..." % application.capitalize())
# Create a command string that will run this app
app_path = os.path.join(vespa_install_path, application, "src", "main.py")
app_name = application.capitalize()
# Gnome & KDE both support .desktop files. They're plain text
# files with a well-documented standard. What a crazy idea!
# http://standards.freedesktop.org/desktop-entry-spec/desktop-entry-spec-latest.html
# Note that under Ubuntu (and perhaps other distros), pythonw is
# not available by default. Under OS X and Windows, it is.
content = """
#!/usr/bin/env xdg-open
[Desktop Entry]
Encoding=UTF-8
Version=1.0
Type=Application
Terminal=false
Icon[en_US]={python_path}
Name[en_US]={app_name}
Exec={python_path} {app_path}
Name={app_name}
Icon=python
""".format(python_path=python_path, app_name=app_name, app_path=app_path)
path = home + ("/Desktop/%s.desktop" % app_name)
open(path, "w").write(content)
# Make the file executable.
mode = os.stat(path)[0]
mode |= stat.S_IXUSR
os.chmod(path, mode)
elif platform == "windows":
# Under Windows, executable shortcuts are .lnk files which are an
# undocumented binary format. Thanks, Microsoft! The only safe way
# to create them is to call functions in Windows DLLs. The easiest (?)
# way for us to call those functions is via VBScript.
# We write our VBScript to a temp file and then ask Windows to execute
# it. Hopefully executing a .vbs won't cause any Windows security
# thingies to freak out.
# Get the path to the Python executable and change it to the pythonw
# EXE if possible. In contrast to python.exe, pythonw.exe doesn't
# open a console window before launching the app, so it looks a bit
# nicer. The downside is that anything printed to stdout or stderr
# will go into the bit bucket.
# Under non-Windows platforms, there's no difference between python
# and pythonw.
if python_path.endswith("python.exe"):
python_path = python_path[:-len("python.exe")] + "pythonw.exe"
# else:
# This is very unexpected, better not mess with it.
for application in APPLICATIONS:
print("Creating a shortcut for %s..." % application.capitalize())
# Create the path to main.py for this app
app_path = os.path.join(vespa_install_path, application, "src", "main.py")
name = application.capitalize()
# This is the VBScript.
content = """
WScript.Quit Main
Function Main
Set shell = CreateObject("WScript.Shell")
With shell.CreateShortcut(shell.SpecialFolders("Desktop") & "\\%s.lnk")
.TargetPath = chr(34) + "%s" + chr(34)
.Arguments = chr(34) + "%s" + chr(34)
.WindowStyle = 1
.Save
End With
End Function
""" % (name, python_path, app_path)
# I create a temp file from which to execute the script. The temp
# file must have a .vbs extension.
fd, filename = tempfile.mkstemp(".vbs")
os.write(fd, content)
os.close(fd)
# Execute it
os.system(filename)
# Clean up
os.remove(filename)
print(done_msg)
|
"""Admin forms."""
from flask_wtf import FlaskForm
from flask_wtf.file import FileAllowed, FileRequired
from wtforms import PasswordField, StringField, FloatField, MultipleFileField, FileField, SelectField, DateField, \
TextAreaField
from wtforms.validators import DataRequired, Length, Email, EqualTo, InputRequired
from wtforms_alchemy import Unique, ModelForm, model_form_factory
from mycms.user.models import User
from flask_uploads import UploadSet, IMAGES
images = UploadSet('images', IMAGES)
BaseModelForm = model_form_factory(FlaskForm)
class AdminLoginForm(FlaskForm):
username = StringField("Username", validators=[DataRequired()])
password = PasswordField("Password", validators=[DataRequired()])
def __init__(self, *args, **kwargs):
"""Create instance."""
super(AdminLoginForm, self).__init__(*args, **kwargs)
self.user = None
def validate(self):
"""Validate the form."""
initial_validation = super(AdminLoginForm, self).validate()
if not initial_validation:
return False
self.user = User.query.filter_by(username=self.username.data).first()
if not self.user:
self.username.errors.append("Unknown username")
return False
if not self.user.check_password(self.password.data):
self.password.errors.append("Invalid password")
return False
if not self.user.active:
self.username.errors.append("User not activated")
return False
return True
class UserCrudForm(BaseModelForm):
"""Client Form ."""
username = StringField("Username", validators=[DataRequired(), Length(min=3, max=25), Unique(User.username)])
first_name = StringField("First Name", validators=[ Length(min=3, max=40)])
last_name = StringField("Last Name", validators=[ Length(min=3, max=40)])
email = StringField("Email", validators=[DataRequired(), Email(), Length(min=6, max=40), Unique(User.email)])
password = PasswordField("Password", validators=[DataRequired(), Length(min=6, max=40)])
confirm = PasswordField("Verify password",[DataRequired(), EqualTo("password", message="Passwords must match")])
class ModuleCrudForm(BaseModelForm):
"""Client Form ."""
name = StringField("Name", validators=[DataRequired(), Length(min=5, max=80)])
description = StringField("Description", validators=[DataRequired(), Length(min=5, max=256)])
long_description = TextAreaField("Long Description", validators=[DataRequired(), Length(min=5)])
tags = StringField("Tags (comma separated)", validators=[DataRequired(), Length(min=5)])
demo_url = StringField("Demo Url", validators=[DataRequired(), Length(min=5, max=256)])
code_path = StringField("Code Path", validators=[DataRequired(), Length(min=5, max=256)])
price = FloatField("Price", validators=[DataRequired()])
support_price = FloatField("Support Price", validators=[DataRequired()])
release_date = DateField("Release Date", validators=[DataRequired()])
last_update_date = DateField("Release Date", validators=[DataRequired()])
image = FileField('Product Image (397x306)', validators=[FileRequired(), FileAllowed(images, 'Images only allowed!')])
images = MultipleFileField('Product Screenshots (726x403)', validators=[DataRequired(), FileAllowed(images, 'Images only!')])
class SlideShowCrudForm(BaseModelForm):
title = StringField("Title", validators=[DataRequired(), Length(min=5, max=80)])
image = FileField('SlideShow Image (928x413)', validators=[FileRequired(), FileAllowed(images, 'Images only allowed!')])
class SeoCrudForm(BaseModelForm):
meta_tag = SelectField(u'Meta Tag',choices=[('name','name'),('property','property')] ,validators=[DataRequired()])
title = StringField("Title", validators=[DataRequired(), Length(min=5, max=80)])
content = StringField("Content", validators=[DataRequired(), Length(min=5, max=255)])
#############################################################
class HomeTextForm(BaseModelForm):
firstext = StringField("First Title", validators=[DataRequired(), Length(min=25, max=80)])
secondtext = StringField("Second Title", validators=[DataRequired(), Length(min=25, max=80)])
class TechnologiesForm(BaseModelForm):
firstext = StringField("First Title", validators=[DataRequired(), Length(min=25, max=80)])
secondtext = StringField("Second Title", validators=[DataRequired(), Length(min=25, max=80)])
class ImageTechnologyForm(BaseModelForm):
image = FileField('Technology Image (128x128)', validators=[FileRequired(), FileAllowed(images, 'Images only allowed!')])
class WebsiteLogoForm(BaseModelForm):
logo_image = FileField('Logo Image (128x128)', validators=[FileRequired(), FileAllowed(images, 'Logo Only allowed!')])
# Footer Text Form
class FooterTextForm(BaseModelForm):
title = TextAreaField("Long Description", validators=[DataRequired(), Length(min=5)])
# Social Media Form
class SocialIocnForm(BaseModelForm):
# image = FileField('Logo Image (128x128)', validators=[FileRequired(), FileAllowed(images, 'Social Iocn Image Only!')])
icon = StringField("Icon Html Code(fab fa-facebook-f)", validators=[DataRequired()])
url_link = StringField("Icon Link", validators=[DataRequired()])
# Copyright Form Model
class CopyRightForm(BaseModelForm):
text = StringField("Copyright Footer Text", validators=[DataRequired(), Length(min=6, max=60)])
# Footer Image Model Form
class FooterImageForm(BaseModelForm):
image = FileField('Footer Image', validators=[FileRequired(), FileAllowed(images, "Image Allowed Only !")])
# Resource Title Role Model Form
class ResourcesForm(BaseModelForm):
role_title = StringField("Role Add ...", validators=[DataRequired(), Length(min=3, max=20)])
# Resource Detais Add with Role
class ResourceDetailAddForm(BaseModelForm):
title = StringField("Title", validators=[DataRequired(), Length(min=5, max=20)])
description = TextAreaField("Description", validators=[DataRequired(), Length(min=5)])
|
from socket import *
import time
import pymysql
HOST = ''
PORT = 8009
BUFSIZE = 1024
ADDR = (HOST,PORT)
# DB connection
conn = pymysql.connect(host='localhost', user='root', password='juneomysql', db='mysql', charset='utf8')
curs = conn.cursor()
#### set webcam2 ON DB
sql = "update Command SET Webcam2 = 'ON'"
curs.execute(sql)
conn.commit()
conn.close()
server_socket = socket(AF_INET, SOCK_STREAM)
server_socket.bind(ADDR)
print('Webcam2EAR Server launched..')
server_socket.listen(1)
print('Waiting connection..')
client_socket, addr = server_socket.accept()
print('Connected by: ', str(addr))
###wait for client dlib loading
print('wait 5 sec')
time.sleep(5)
while True: # Server loop; webcam2 off, alert on
# DB connection
conn = pymysql.connect(host='localhost', user='root', password='juneomysql', db='mysql', charset='utf8')
curs = conn.cursor()
### Read Data
sql = "select Webcam2 from Command"
curs.execute(sql)
dbdata = curs.fetchall()
str_dbdata = ''.join(dbdata[0])
### check Webcam2 Data
print('Webcam2 Check:', str_dbdata)
### Send message OFF/ON
if 'OFF' in str_dbdata:
client_socket.send('Webcam2 OFF'.encode('utf-8'))
print('\nWebcam2 OFF and Shut down\n')
nothing1 = client_socket.recv(BUFSIZE) #n1 n2
elif 'ON' in str_dbdata:
client_socket.send('Webcam2 ON'.encode('utf-8'))
time.sleep(0.1)
print('\nWebcam2 ON and Detecting\n')
sql = "select Registration2 from Command"
curs.execute(sql)
regis2 = curs.fetchall()
str_regis = ''.join(regis2[0])
print('Registration2 Check:', str_regis)
#time.sleep(0.6) #n
nothing2 = client_socket.recv(BUFSIZE)
if str_regis != 'OFF':
client_socket.send('EAR update'.encode('utf-8'))
print('Calculating New drivers EAR')
###value socket receiver
new_earcal=client_socket.recv(BUFSIZE) #recv 1
value=new_earcal.decode('utf-8')
print('Received EAR: ', value)
###SET DB EAR value
sql = "UPDATE User U INNER JOIN Command C ON U.NAME = C.REGISTRATION2 SET U.EAR =" + value + "WHERE U.NAME = C.REGISTRATION2"
curs.execute(sql)
conn.commit()
print('updated your EAR')
###SET DB Regist OFF
sql = "update Command SET Registration2 = 'OFF'"
curs.execute(sql)
conn.commit()
elif str_regis == 'OFF': ##detecting drowsiness part
client_socket.send('OFF'.encode('utf-8')) #send regis OFF
print('Driver confirmed')
#time.sleep(0.3)
nothing3=client_socket.recv(BUFSIZE) #n3
sql = "select Name from Driver"
curs.execute(sql)
name = curs.fetchall()
drivername = ''.join(name[0])
if drivername == 'OFF':
EAR_load = '0.05'
print('Driver OFF')
client_socket.send(EAR_load.encode('utf-8')) #send regis 0.05
elif drivername == '':
EAR_load = '0.05'
print('Driver null')
client_socket.send(EAR_load.encode('utf-8'))
else:
sql = "select EAR from User WHERE Name = '"+drivername+"'"
curs.execute(sql)
EARdb = curs.fetchall() #if not in EARdb EAR_load= 0.05 in
print(EARdb)
EAR_load = ''.join(EARdb[0])
print('Driver EAR: ', EAR_load)
client_socket.send(EAR_load.encode('utf-8')) #send regis EAR
### Write Data to ALERT (wait)
#send
clientdata = client_socket.recv(BUFSIZE)
if 'Normal' in clientdata.decode('utf-8'):
print('1 Normal Driver Condition')
client_socket.send('1st 2nd'.encode('utf-8')) #n4
clientdata = client_socket.recv(BUFSIZE)
if 'Alert(DWS)' in clientdata.decode('utf-8'):
sql="update Command SET Alert = 'ON'"
curs.execute(sql)
conn.commit()
print('*******Alert(DWS); Set Alert ON in DB*******')
elif 'Normal' in clientdata.decode('utf-8'):
print('2 Normal Driver Condition')
conn.close()
###
client_socket.close()
server_socket.close()
print('[System] Server close..')
|
/* Distributed under the OSI-approved BSD 3-Clause License. See accompanying
file Copyright.txt or https://cmake.org/licensing#kwsys for details. */
#include "kwsysPrivate.h"
#include KWSYS_HEADER(Base64.h)
/* Work-around CMake dependency scanning limitation. This must
duplicate the above list of headers. */
#if 0
#include "Base64.h.in"
#endif
/*--------------------------------------------------------------------------*/
static const unsigned char kwsysBase64EncodeTable[65] =
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"abcdefghijklmnopqrstuvwxyz"
"0123456789+/";
/*--------------------------------------------------------------------------*/
static const unsigned char kwsysBase64DecodeTable[256] = {
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0x3E, 0xFF, 0xFF, 0xFF, 0x3F, 0x34, 0x35, 0x36, 0x37,
0x38, 0x39, 0x3A, 0x3B, 0x3C, 0x3D, 0xFF, 0xFF, 0xFF, 0x00, 0xFF, 0xFF, 0xFF,
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C,
0x0D, 0x0E, 0x0F, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F, 0x20,
0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2A, 0x2B, 0x2C, 0x2D,
0x2E, 0x2F, 0x30, 0x31, 0x32, 0x33, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
/*------------------------------------*/
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
};
/*--------------------------------------------------------------------------*/
static unsigned char kwsysBase64EncodeChar(int c)
{
return kwsysBase64EncodeTable[(unsigned char)c];
}
/*--------------------------------------------------------------------------*/
static unsigned char kwsysBase64DecodeChar(unsigned char c)
{
return kwsysBase64DecodeTable[c];
}
/*--------------------------------------------------------------------------*/
/* Encode 3 bytes into a 4 byte string. */
void kwsysBase64_Encode3(const unsigned char* src, unsigned char* dest)
{
dest[0] = kwsysBase64EncodeChar((src[0] >> 2) & 0x3F);
dest[1] =
kwsysBase64EncodeChar(((src[0] << 4) & 0x30) | ((src[1] >> 4) & 0x0F));
dest[2] =
kwsysBase64EncodeChar(((src[1] << 2) & 0x3C) | ((src[2] >> 6) & 0x03));
dest[3] = kwsysBase64EncodeChar(src[2] & 0x3F);
}
/*--------------------------------------------------------------------------*/
/* Encode 2 bytes into a 4 byte string. */
void kwsysBase64_Encode2(const unsigned char* src, unsigned char* dest)
{
dest[0] = kwsysBase64EncodeChar((src[0] >> 2) & 0x3F);
dest[1] =
kwsysBase64EncodeChar(((src[0] << 4) & 0x30) | ((src[1] >> 4) & 0x0F));
dest[2] = kwsysBase64EncodeChar(((src[1] << 2) & 0x3C));
dest[3] = '=';
}
/*--------------------------------------------------------------------------*/
/* Encode 1 bytes into a 4 byte string. */
void kwsysBase64_Encode1(const unsigned char* src, unsigned char* dest)
{
dest[0] = kwsysBase64EncodeChar((src[0] >> 2) & 0x3F);
dest[1] = kwsysBase64EncodeChar(((src[0] << 4) & 0x30));
dest[2] = '=';
dest[3] = '=';
}
/*--------------------------------------------------------------------------*/
/* Encode 'length' bytes from the input buffer and store the
encoded stream into the output buffer. Return the length of the encoded
buffer (output). Note that the output buffer must be allocated by the caller
(length * 1.5 should be a safe estimate). If 'mark_end' is true than an
extra set of 4 bytes is added to the end of the stream if the input is a
multiple of 3 bytes. These bytes are invalid chars and therefore they will
stop the decoder thus enabling the caller to decode a stream without
actually knowing how much data to expect (if the input is not a multiple of
3 bytes then the extra padding needed to complete the encode 4 bytes will
stop the decoding anyway). */
size_t kwsysBase64_Encode(const unsigned char* input, size_t length,
unsigned char* output, int mark_end)
{
const unsigned char* ptr = input;
const unsigned char* end = input + length;
unsigned char* optr = output;
/* Encode complete triplet */
while ((end - ptr) >= 3) {
kwsysBase64_Encode3(ptr, optr);
ptr += 3;
optr += 4;
}
/* Encodes a 2-byte ending into 3 bytes and 1 pad byte and writes. */
if (end - ptr == 2) {
kwsysBase64_Encode2(ptr, optr);
optr += 4;
}
/* Encodes a 1-byte ending into 2 bytes and 2 pad bytes */
else if (end - ptr == 1) {
kwsysBase64_Encode1(ptr, optr);
optr += 4;
}
/* Do we need to mark the end */
else if (mark_end) {
optr[0] = optr[1] = optr[2] = optr[3] = '=';
optr += 4;
}
return (size_t)(optr - output);
}
/*--------------------------------------------------------------------------*/
/* Decode 4 bytes into a 3 byte string. */
int kwsysBase64_Decode3(const unsigned char* src, unsigned char* dest)
{
unsigned char d0, d1, d2, d3;
d0 = kwsysBase64DecodeChar(src[0]);
d1 = kwsysBase64DecodeChar(src[1]);
d2 = kwsysBase64DecodeChar(src[2]);
d3 = kwsysBase64DecodeChar(src[3]);
/* Make sure all characters were valid */
if (d0 == 0xFF || d1 == 0xFF || d2 == 0xFF || d3 == 0xFF) {
return 0;
}
/* Decode the 3 bytes */
dest[0] = (unsigned char)(((d0 << 2) & 0xFC) | ((d1 >> 4) & 0x03));
dest[1] = (unsigned char)(((d1 << 4) & 0xF0) | ((d2 >> 2) & 0x0F));
dest[2] = (unsigned char)(((d2 << 6) & 0xC0) | ((d3 >> 0) & 0x3F));
/* Return the number of bytes actually decoded */
if (src[2] == '=') {
return 1;
}
if (src[3] == '=') {
return 2;
}
return 3;
}
/*--------------------------------------------------------------------------*/
/* Decode bytes from the input buffer and store the decoded stream
into the output buffer until 'length' bytes have been decoded. Return the
real length of the decoded stream (which should be equal to 'length'). Note
that the output buffer must be allocated by the caller. If
'max_input_length' is not null, then it specifies the number of encoded
bytes that should be at most read from the input buffer. In that case the
'length' parameter is ignored. This enables the caller to decode a stream
without actually knowing how much decoded data to expect (of course, the
buffer must be large enough). */
size_t kwsysBase64_Decode(const unsigned char* input, size_t length,
unsigned char* output, size_t max_input_length)
{
const unsigned char* ptr = input;
unsigned char* optr = output;
/* Decode complete triplet */
if (max_input_length) {
const unsigned char* end = input + max_input_length;
while (ptr < end) {
int len = kwsysBase64_Decode3(ptr, optr);
optr += len;
if (len < 3) {
return (size_t)(optr - output);
}
ptr += 4;
}
} else {
unsigned char* oend = output + length;
while ((oend - optr) >= 3) {
int len = kwsysBase64_Decode3(ptr, optr);
optr += len;
if (len < 3) {
return (size_t)(optr - output);
}
ptr += 4;
}
/* Decode the last triplet */
if (oend - optr == 2) {
unsigned char temp[3];
int len = kwsysBase64_Decode3(ptr, temp);
if (len >= 2) {
optr[0] = temp[0];
optr[1] = temp[1];
optr += 2;
} else if (len > 0) {
optr[0] = temp[0];
optr += 1;
}
} else if (oend - optr == 1) {
unsigned char temp[3];
int len = kwsysBase64_Decode3(ptr, temp);
if (len > 0) {
optr[0] = temp[0];
optr += 1;
}
}
}
return (size_t)(optr - output);
}
|
from distutils.core import setup
setup(
name='ledgame',
version='0.0.1',
author='gentlemans-club',
author_email='kentsd16@student.uia.no',
packages=['ledgame'],
python_requires='>=3.5.0',
install_requires=[
'notpi',
'Pillow'
],
dependency_links=['git+https://github.com/gentlemans-club/notpi.git@master#egg=notpi-0']
)
|
var fs = require("fs");
var deepClone = require("clone");
var _ = require("underscore");
function FileTransport(filename) {
this._filename = filename;
this._requests = [];
if (fs.existsSync(this._filename)) {
fs.unlink(this._filename);
}
}
FileTransport.prototype.ensureConnection = function() {
};
FileTransport.prototype.report = function(detached, auth, report, done) {
// For convenience of debugging strip null fields (they are not sent over
// the wire by the JS thrift implementation anyway).
report = deepClone(report);
_.each(report.log_records, function (log) {
_.each(log, function (val, key) {
if (val === null) {
delete log[key];
}
});
});
_.each(report.span_records, function (span) {
_.each(span, function (val, key) {
if (val === null) {
delete span[key];
}
});
});
this._requests.push({
detached : detached,
auth : auth,
report : report,
});
fs.writeFileSync(this._filename, JSON.stringify({
requests : this._requests,
}, null, 4));
done();
};
module.exports = FileTransport;
|
import angular from "/ui/web_modules/angular.js";
import qwQueryService from "/_p/ui/query/qw_query_service.js";
export default 'mnGsiService';
angular
.module('mnGsiService', [
qwQueryService
])
.factory('mnGsiService', mnGsiServiceFactory);
function mnGsiServiceFactory($http, qwQueryService) {
var mnGsiService = {
getIndexesState: getIndexesState,
postDropIndex: postDropIndex
};
return mnGsiService;
function postDropIndex(row) {
// to drop an index, we create a 'DROP' query to send to the query workbench
return qwQueryService
.executeQueryUtil('DROP INDEX `' + row.bucket + '`.`' + row.indexName + '`', true);
}
function getIndexesState(mnHttpParams) {
return $http({
method: 'GET',
url: '/indexStatus',
mnHttp: mnHttpParams
}).then(function (resp) {
var byNodes = {};
var byBucket = {};
var byID = {};
resp.data.indexes.forEach(function (index) {
byBucket[index.bucket] = byBucket[index.bucket] || [];
byBucket[index.bucket].push(Object.assign({}, index));
index.hosts.forEach(function (node) {
byNodes[node] = byNodes[node] || [];
byNodes[node].push(Object.assign({}, index));
});
});
resp.data.byBucket = byBucket;
resp.data.byNodes = byNodes;
resp.data.byID = resp.data.indexes;
return resp.data;
});
}
}
|
#!/usr/bin/env python3
"""
Misc subroutines for linear fitting:
* init correlation matrix A, value vector b
* fit to get unknown coefficients x
* predict using fitted x
"""
import re
import glob
import logging
import numpy as np
import scipy
import scipy.sparse
from cssolve.csfit import csfit, predict_holdout
def print_version(version):
print(" " * 3, version, "\n")
def print_end():
print("Done")
def add_common_parameter(parser):
"""
Add a few command-line parameters common to different models
:return:
"""
parser.add_argument("--log_level", type=int, help="Logging level. Default 1", default=1)
parser.add_argument("--symm_step", type=int, help="Space group. 1 = file, *2 = spglib, 3 = spglib & save", default=2)
parser.add_argument("--symm_prim", action='store_false',
help="Symmetrize primitive cell with space group and save to POSCAR_sym. Default: True", default=True)
parser.add_argument("--clus_step", type=int, help="Clusters. 0 = ignore & exit, 1 = file, 2 = generate, *3 = generate & save", default=3)
parser.add_argument("--symC_step", type=int, help="Independent parameters. 0 = ignore & exit, 1 = file, 2 = generate, *3 = generate & save", default=3)
parser.add_argument("--train_step", type=int, help="Correlation matrix. 0 = ignore & exit, 1 = file, 2 = generate, *3 = generate & save, 4 = skip", default=3)
parser.add_argument("--fit_step", type=int, help="Fitting. 0 = ignore & exit, 1 = file, 2 = generate, *3 = generate & save", default=3)
parser.add_argument("--pred_step", type=int, help="Prediction. *0 = skip, 1 = file, 2 = generate, 3 = generate & save", default=0)
parser.add_argument("--refit", action='store_true',
help="Perform another fitting, equivalent to \"--clus_step 1 --symC_step 1 --train_step 1\". Default: False", default=False)
parser.add_argument("--cont", "-c", action='store_true',
help="Continue from previous run, equivalent to \"--clus_step 1 --symC_step 1 --train_step 4 --fit_step 1\". Default: False", default=False)
parser.add_argument("--predict", action='store_true',
help="predict, equivalent to \"--cont --pred_step 2 and skipping magnon/phonon steps\". Default: False", default=False)
parser.add_argument("--override", "-o", action="append", help="Override setting file from command line, e.g. \"[structure] epsilon_inf=eps.txt\"")
def process_common_options(options):
# Quick prediction mode
if options.predict:
options.cont= True
options.pred_step = 2
# Assuming fitting is available from previous run
if options.cont:
options.clus_step = 1
options.symC_step = 1
options.train_step = 4
options.fit_step = 1
# Another fit reusing existing info
if options.refit:
options.clus_step = 1
options.symC_step = 1
options.train_step = 1
def override_from_commandline(override_list, settings):
if not override_list:
return
pattern = re.compile(r"\[\w+\] *\w+ *=.*")
for x in override_list:
if not pattern.match(x):
print('ERROR: %s is not a valid override. Expecting e.g. [sec] name=val'%(x))
return
sec, other = re.split(r'\]', x[1:], maxsplit=1)
sec = sec.lower()
other = other.strip()
tag, val = re.split(r'=', other, 1)
tag = tag.rstrip()
val = val.lstrip()
if sec not in settings:
settings.add_section(sec)
settings.set(sec, tag, val)
def upon_exit(pdfout):
if pdfout is not None:
pdfout.close()
def init_training(model, setting, step, **kwargs):
"""
training structures from which to obtain the Correlation matrix
:return:
"""
from csld.util.io_utils import co, load_matrix
if step <= 0:
exit(0)
elif step == 4:
Amat = None
fval = None
elif step == 1:
Amat = scipy.sparse.csr_matrix(load_matrix(setting['corr_in']))
fval = np.zeros((Amat.shape[0], 3))
fval[:, 0] = np.loadtxt(setting['fval_in'])
elif step in [2, 3]:
traindat= [y.split() for x, y in setting.items() if re.match('traindat.*', x) is not None]
Amat, fval = model.get_correlation([[sc[0],
[f for subs in sc[1:]
for f in sorted(glob.glob(subs))]] for sc in traindat],
corrtype=setting['corr_type'], setting=setting, **kwargs)
if step == 3:
scipy.io.mmwrite(setting['corr_out'], Amat)
np.savetxt(setting['fval_out'], fval[:, 0])
else:
print("ERROR: Unknown corr_step: ", step)
exit(-1)
print("+ Correlation matrix for training done")
return Amat, fval
def fit_data(model, Amat, fval, setting, step, pdfout):
"""
Fitting
:param model
:param Amat:
:param fval:
:param setting:
:param step:
:param pdfout:
:return: optimal solution
"""
if step <= 0:
exit(0)
elif step == 1:
solutions = model.load_solution(setting['solution_in'])
if Amat is not None:
err = [np.std(Amat.dot(solutions[i])-fval[:,0]) for i in range(solutions.shape[0])]
ibest = np.argmin(err)
else:
ibest = 0
if solutions.size <= 0:
logging.error("ERROR: empty solution")
exit(-1)
if solutions.shape[0] > 1:
logging.warning("More than 1 solutions found. Returning the first.")
elif step in [2, 3]:
mulist = list(map(float, setting['mulist'].split()))
submodels = [y.split() for x, y in setting.items() if re.match('submodel.*', x) is not None]
submodels = [[x[0], ' '.join(x[1:])] for x in submodels]
knownsol = setting.get('solution_known', '')
submodels = model.get_submodels(submodels, setting=setting, knownsol=knownsol)
ibest, solutions, rel_err = csfit(Amat, fval[:,0], 1, mulist,
method=int(setting['method']),
maxIter=int(setting['maxiter']),
tol=float(setting['tolerance']),
nSubset=int(setting['nsubset']),
subsetsize=float(setting['subsetsize']),
holdsize=float(setting['holdsize']),
lbd=float(setting['lambda']),
# bcs options
reweight=setting.getboolean('bcs_reweight', False),
penalty=setting.get('bcs_penalty', 'arctan'),
jcutoff=setting.getfloat('bcs_jcutoff',1E-7),
sigma2=setting.getfloat('bcs_sigma2',-1.0),
eta=setting.getfloat('bcs_eta',1E-3),
fitf=setting.get('true_v_fit'),
submodels=submodels, pdfout=pdfout)
if step == 3:
np.savetxt(setting['solution_out'], solutions)
np.savetxt(setting['solution_out']+'_full', model.Cmat.T.dot(np.array(solutions)[:,:model.Cmat.shape[0]].T).T)
else:
print("ERROR: Unknown fit_step: ", step)
exit(-1)
print("+ Fitting done. Best solution", ibest)
return ibest, solutions, rel_err
def predict(model, sols, setting, step):
"""
:param model:
:param sol:
:param setting:
:param step:
:return:
"""
if step <= 0:
return
elif step in [1, 2, 3]:
Amat, fval = init_training(model, setting, step, delForce=0)
else:
print("ERROR: Unknown pred_step: ", step)
exit(-1)
errs = []
for i in range(len(sols)):
err = predict_holdout(Amat, fval[:, 0], sols[i])
errs.append(err[0])
print(" sol# %d: err= (%.2f%%) %f" % (i, err[0], err[1]))
np.savetxt("%s_%d"%(setting['fval_out'],i), np.transpose(err[2:4]))
print("+ Prediction done")
return np.argmin(errs)
|
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.errors = exports.enums = exports.Client = undefined;
require('./logging/request');
var _enums = require('./enums');
var enums = _interopRequireWildcard(_enums);
var _errors = require('./errors');
var errors = _interopRequireWildcard(_errors);
var _client = require('./client');
var _client2 = _interopRequireDefault(_client);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) newObj[key] = obj[key]; } } newObj.default = obj; return newObj; } }
/**
* Export `Client`, `enums` and `errors`.
*/
/**
* Module dependencies.
*/
exports.Client = _client2.default;
exports.enums = enums;
exports.errors = errors;
|
from setuptools import setup
requirements = [
'area',
'black',
'geojson',
'requests',
'shapely',
'sparqlwrapper',
'timezonefinder',
'xlsx2csv',
]
setup(name='country_levels_lib', python_requires='>=3.7', install_requires=requirements)
|
/* -*- mode: c; c-basic-offset: 4; indent-tabs-mode: nil -*- */
/* lib/krb5/os/read_msg.c */
/*
* Copyright 1991 by the Massachusetts Institute of Technology.
* All Rights Reserved.
*
* Export of this software from the United States of America may
* require a specific license from the United States Government.
* It is the responsibility of any person or organization contemplating
* export to obtain such a license before exporting.
*
* WITHIN THAT CONSTRAINT, permission to use, copy, modify, and
* distribute this software and its documentation for any purpose and
* without fee is hereby granted, provided that the above copyright
* notice appear in all copies and that both that copyright notice and
* this permission notice appear in supporting documentation, and that
* the name of M.I.T. not be used in advertising or publicity pertaining
* to distribution of the software without specific, written prior
* permission. Furthermore if you modify this software you must label
* your software as modified software and not distribute it in such a
* fashion that it might be confused with the original M.I.T. software.
* M.I.T. makes no representations about the suitability of
* this software for any purpose. It is provided "as is" without express
* or implied warranty.
*/
#include <errno.h>
#include "k5-int.h"
krb5_error_code krb5_read_message(krb5_context context, krb5_pointer fdp,
krb5_data* inbuf)
{
krb5_int32 len;
int len2, ilen;
char* buf = NULL;
int fd = *((int*)fdp);
*inbuf = empty_data();
if ((len2 = krb5_net_read(context, fd, (char*)&len, 4)) != 4)
return ((len2 < 0) ? errno : ECONNABORTED);
len = ntohl(len);
if ((len & VALID_UINT_BITS) != (krb5_ui_4)len) /* Overflow size_t??? */
return ENOMEM;
ilen = (int)len;
if (ilen)
{
/*
* We may want to include a sanity check here someday....
*/
if (!(buf = malloc(ilen)))
{
return (ENOMEM);
}
if ((len2 = krb5_net_read(context, fd, buf, ilen)) != ilen)
{
free(buf);
return ((len2 < 0) ? errno : ECONNABORTED);
}
}
*inbuf = make_data(buf, ilen);
return (0);
}
|
# -*- coding: utf-8 -*-
# File: config.py
import numpy as np
import os
import pprint
import six
from tensorpack.utils import logger
from tensorpack.utils.gpu import get_num_gpu
__all__ = ['config', 'finalize_configs']
class AttrDict():
_freezed = False
""" Avoid accidental creation of new hierarchies. """
def __getattr__(self, name):
if self._freezed:
raise AttributeError(name)
if name.startswith('_'):
# Do not mess with internals. Otherwise copy/pickle will fail
raise AttributeError(name)
ret = AttrDict()
setattr(self, name, ret)
return ret
def __setattr__(self, name, value):
if self._freezed and name not in self.__dict__:
raise AttributeError(
"Config was freezed! Unknown config: {}".format(name))
super().__setattr__(name, value)
def __str__(self):
return pprint.pformat(self.to_dict(), indent=1, width=100, compact=True)
__repr__ = __str__
def to_dict(self):
"""Convert to a nested dict. """
return {k: v.to_dict() if isinstance(v, AttrDict) else v
for k, v in self.__dict__.items() if not k.startswith('_')}
def update_args(self, args):
"""Update from command line args. """
for cfg in args:
keys, v = cfg.split('=', maxsplit=1)
keylist = keys.split('.')
dic = self
for i, k in enumerate(keylist[:-1]):
assert k in dir(dic), "Unknown config key: {}".format(keys)
dic = getattr(dic, k)
key = keylist[-1]
oldv = getattr(dic, key)
if not isinstance(oldv, str):
v = eval(v)
setattr(dic, key, v)
def freeze(self, freezed=True):
self._freezed = freezed
for v in self.__dict__.values():
if isinstance(v, AttrDict):
v.freeze(freezed)
# avoid silent bugs
def __eq__(self, _):
raise NotImplementedError()
def __ne__(self, _):
raise NotImplementedError()
config = AttrDict()
_C = config # short alias to avoid coding
# mode flags ---------------------
_C.TRAINER = 'replicated' # options: 'horovod', 'replicated'
_C.MODE_MASK = True # FasterRCNN or MaskRCNN
_C.MODE_FPN = False
# dataset -----------------------
_C.DATA.BASEDIR = '/path/to/your/DATA/DIR'
# All TRAIN dataset will be concatenated for training.
_C.DATA.TRAIN = ('coco_train2014', 'coco_valminusminival2014') # i.e. trainval35k, AKA train2017
# Each VAL dataset will be evaluated separately (instead of concatenated)
_C.DATA.VAL = ('coco_minival2014', ) # AKA val2017
# This two config will be populated later by the dataset loader:
_C.DATA.NUM_CATEGORY = 80 # without the background class (e.g., 80 for COCO)
_C.DATA.CLASS_NAMES = [] # NUM_CLASS (NUM_CATEGORY+1) strings, the first is "BG".
# whether the coordinates in the annotations are absolute pixel values, or a relative value in [0, 1]
_C.DATA.ABSOLUTE_COORD = True
# Number of data loading workers.
# In case of horovod training, this is the number of workers per-GPU (so you may want to use a smaller number).
# Set to 0 to disable parallel data loading
_C.DATA.NUM_WORKERS = 10
# backbone ----------------------
_C.BACKBONE.WEIGHTS = '' # /path/to/weights.npz
_C.BACKBONE.RESNET_NUM_BLOCKS = [3, 4, 6, 3] # for resnet50
# RESNET_NUM_BLOCKS = [3, 4, 23, 3] # for resnet101
_C.BACKBONE.FREEZE_AFFINE = False # do not train affine parameters inside norm layers
_C.BACKBONE.NORM = 'FreezeBN' # options: FreezeBN, SyncBN, GN, None
_C.BACKBONE.FREEZE_AT = 2 # options: 0, 1, 2
# Use a base model with TF-preferred padding mode,
# which may pad more pixels on right/bottom than top/left.
# See https://github.com/tensorflow/tensorflow/issues/18213
# In tensorpack model zoo, ResNet models with TF_PAD_MODE=False are marked with "-AlignPadding".
# All other models under `ResNet/` in the model zoo are using TF_PAD_MODE=True.
# Using either one should probably give the same performance.
# We use the "AlignPadding" one just to be consistent with caffe2.
_C.BACKBONE.TF_PAD_MODE = False
_C.BACKBONE.STRIDE_1X1 = False # True for MSRA models
# schedule -----------------------
_C.TRAIN.NUM_GPUS = None # by default, will be set from code
_C.TRAIN.WEIGHT_DECAY = 1e-4
_C.TRAIN.BASE_LR = 1e-2 # defined for total batch size=8. Otherwise it will be adjusted automatically
_C.TRAIN.WARMUP = 1000 # in terms of iterations. This is not affected by #GPUs
_C.TRAIN.WARMUP_INIT_LR = 1e-2 * 0.33 # defined for total batch size=8. Otherwise it will be adjusted automatically
_C.TRAIN.STEPS_PER_EPOCH = 500
_C.TRAIN.STARTING_EPOCH = 1 # the first epoch to start with, useful to continue a training
# LR_SCHEDULE means equivalent steps when the total batch size is 8.
# When the total bs!=8, the actual iterations to decrease learning rate, and
# the base learning rate are computed from BASE_LR and LR_SCHEDULE.
# Therefore, there is *no need* to modify the config if you only change the number of GPUs.
_C.TRAIN.LR_SCHEDULE = [120000, 160000, 180000] # "1x" schedule in detectron
# _C.TRAIN.LR_SCHEDULE = [240000, 320000, 360000] # "2x" schedule in detectron
# Longer schedules for from-scratch training (https://arxiv.org/abs/1811.08883):
# _C.TRAIN.LR_SCHEDULE = [960000, 1040000, 1080000] # "6x" schedule in detectron
# _C.TRAIN.LR_SCHEDULE = [1500000, 1580000, 1620000] # "9x" schedule in detectron
_C.TRAIN.EVAL_PERIOD = 25 # period (epochs) to run evaluation
# preprocessing --------------------
# Alternative old (worse & faster) setting: 600
_C.PREPROC.TRAIN_SHORT_EDGE_SIZE = [800, 800] # [min, max] to sample from
_C.PREPROC.TEST_SHORT_EDGE_SIZE = 800
_C.PREPROC.MAX_SIZE = 1333
# mean and std in RGB order.
# Un-scaled version: [0.485, 0.456, 0.406], [0.229, 0.224, 0.225]
_C.PREPROC.PIXEL_MEAN = [123.675, 116.28, 103.53]
_C.PREPROC.PIXEL_STD = [58.395, 57.12, 57.375]
# anchors -------------------------
_C.RPN.ANCHOR_STRIDE = 16
_C.RPN.ANCHOR_SIZES = (32, 64, 128, 256, 512) # sqrtarea of the anchor box
_C.RPN.ANCHOR_RATIOS = (0.5, 1., 2.)
_C.RPN.POSITIVE_ANCHOR_THRESH = 0.7
_C.RPN.NEGATIVE_ANCHOR_THRESH = 0.3
# rpn training -------------------------
_C.RPN.FG_RATIO = 0.5 # fg ratio among selected RPN anchors
_C.RPN.BATCH_PER_IM = 256 # total (across FPN levels) number of anchors that are marked valid
_C.RPN.MIN_SIZE = 0
_C.RPN.PROPOSAL_NMS_THRESH = 0.7
# Anchors which overlap with a crowd box (IOA larger than threshold) will be ignored.
# Setting this to a value larger than 1.0 will disable the feature.
# It is disabled by default because Detectron does not do this.
_C.RPN.CROWD_OVERLAP_THRESH = 9.99
_C.RPN.HEAD_DIM = 1024 # used in C4 only
# RPN proposal selection -------------------------------
# for C4
_C.RPN.TRAIN_PRE_NMS_TOPK = 12000
_C.RPN.TRAIN_POST_NMS_TOPK = 2000
_C.RPN.TEST_PRE_NMS_TOPK = 6000
_C.RPN.TEST_POST_NMS_TOPK = 1000 # if you encounter OOM in inference, set this to a smaller number
# for FPN, #proposals per-level and #proposals after merging are (for now) the same
# if FPN.PROPOSAL_MODE = 'Joint', these options have no effect
_C.RPN.TRAIN_PER_LEVEL_NMS_TOPK = 2000
_C.RPN.TEST_PER_LEVEL_NMS_TOPK = 1000
# fastrcnn training ---------------------
_C.FRCNN.BATCH_PER_IM = 512
_C.FRCNN.BBOX_REG_WEIGHTS = [10., 10., 5., 5.] # Slightly better setting: 20, 20, 10, 10
_C.FRCNN.FG_THRESH = 0.5
_C.FRCNN.FG_RATIO = 0.25 # fg ratio in a ROI batch
# FPN -------------------------
_C.FPN.ANCHOR_STRIDES = (4, 8, 16, 32, 64) # strides for each FPN level. Must be the same length as ANCHOR_SIZES
_C.FPN.PROPOSAL_MODE = 'Level' # 'Level', 'Joint'
_C.FPN.NUM_CHANNEL = 256
_C.FPN.NORM = 'None' # 'None', 'GN'
# The head option is only used in FPN. For C4 models, the head is C5
_C.FPN.FRCNN_HEAD_FUNC = 'fastrcnn_2fc_head'
# choices: fastrcnn_2fc_head, fastrcnn_4conv1fc_{,gn_}head
_C.FPN.FRCNN_CONV_HEAD_DIM = 256
_C.FPN.FRCNN_FC_HEAD_DIM = 1024
_C.FPN.MRCNN_HEAD_FUNC = 'maskrcnn_up4conv_head' # choices: maskrcnn_up4conv_{,gn_}head
# Mask-RCNN
_C.MRCNN.HEAD_DIM = 256
# Cascade-RCNN, only available in FPN mode
_C.FPN.CASCADE = False
_C.CASCADE.IOUS = [0.5, 0.6, 0.7]
_C.CASCADE.BBOX_REG_WEIGHTS = [[10., 10., 5., 5.], [20., 20., 10., 10.], [30., 30., 15., 15.]]
# testing -----------------------
_C.TEST.FRCNN_NMS_THRESH = 0.5
# Smaller threshold value gives significantly better mAP. But we use 0.05 for consistency with Detectron.
# mAP with 1e-4 threshold can be found at https://github.com/tensorpack/tensorpack/commit/26321ae58120af2568bdbf2269f32aa708d425a8#diff-61085c48abee915b584027e1085e1043 # noqa
_C.TEST.RESULT_SCORE_THRESH = 0.05
_C.TEST.RESULT_SCORE_THRESH_VIS = 0.5 # only visualize confident results
_C.TEST.RESULTS_PER_IM = 100
_C.freeze() # avoid typo / wrong config keys
def finalize_configs(is_training):
"""
Run some sanity checks, and populate some configs from others
"""
_C.freeze(False) # populate new keys now
if isinstance(_C.DATA.VAL, six.string_types): # support single string (the typical case) as well
_C.DATA.VAL = (_C.DATA.VAL, )
if isinstance(_C.DATA.TRAIN, six.string_types): # support single string
_C.DATA.TRAIN = (_C.DATA.TRAIN, )
assert _C.BACKBONE.NORM in ['FreezeBN', 'SyncBN', 'GN', 'None'], _C.BACKBONE.NORM
if _C.BACKBONE.NORM != 'FreezeBN':
assert not _C.BACKBONE.FREEZE_AFFINE
assert _C.BACKBONE.FREEZE_AT in [0, 1, 2]
_C.RPN.NUM_ANCHOR = len(_C.RPN.ANCHOR_SIZES) * len(_C.RPN.ANCHOR_RATIOS)
assert len(_C.FPN.ANCHOR_STRIDES) == len(_C.RPN.ANCHOR_SIZES)
# image size into the backbone has to be multiple of this number
_C.FPN.RESOLUTION_REQUIREMENT = _C.FPN.ANCHOR_STRIDES[3] # [3] because we build FPN with features r2,r3,r4,r5
if _C.MODE_FPN:
size_mult = _C.FPN.RESOLUTION_REQUIREMENT * 1.
_C.PREPROC.MAX_SIZE = np.ceil(_C.PREPROC.MAX_SIZE / size_mult) * size_mult
assert _C.FPN.PROPOSAL_MODE in ['Level', 'Joint']
assert _C.FPN.FRCNN_HEAD_FUNC.endswith('_head')
assert _C.FPN.MRCNN_HEAD_FUNC.endswith('_head')
assert _C.FPN.NORM in ['None', 'GN']
if _C.FPN.CASCADE:
# the first threshold is the proposal sampling threshold
assert _C.CASCADE.IOUS[0] == _C.FRCNN.FG_THRESH
assert len(_C.CASCADE.BBOX_REG_WEIGHTS) == len(_C.CASCADE.IOUS)
if is_training:
train_scales = _C.PREPROC.TRAIN_SHORT_EDGE_SIZE
if isinstance(train_scales, (list, tuple)) and train_scales[1] - train_scales[0] > 100:
# don't autotune if augmentation is on
os.environ['TF_CUDNN_USE_AUTOTUNE'] = '0'
os.environ['TF_AUTOTUNE_THRESHOLD'] = '1'
assert _C.TRAINER in ['horovod', 'replicated'], _C.TRAINER
# setup NUM_GPUS
if _C.TRAINER == 'horovod':
import horovod.tensorflow as hvd
ngpu = hvd.size()
else:
assert 'OMPI_COMM_WORLD_SIZE' not in os.environ
ngpu = get_num_gpu()
assert ngpu > 0, "Has to train with GPU!"
assert ngpu % 8 == 0 or 8 % ngpu == 0, "Can only train with 1,2,4 or >=8 GPUs, but found {} GPUs".format(ngpu)
else:
# autotune is too slow for inference
os.environ['TF_CUDNN_USE_AUTOTUNE'] = '0'
ngpu = get_num_gpu()
if _C.TRAIN.NUM_GPUS is None:
_C.TRAIN.NUM_GPUS = ngpu
else:
if _C.TRAINER == 'horovod':
assert _C.TRAIN.NUM_GPUS == ngpu
else:
assert _C.TRAIN.NUM_GPUS <= ngpu
_C.freeze()
logger.info("Config: ------------------------------------------\n" + str(_C))
|
import card from "./card.jpg";
import avatar from "@asset/badges/ningguang.png";
import skill from "./ningguang.skill.js";
export default {
cardURL: card,
detailURL: "",
name: "ningguang",
chs: "凝光",
element: "rock",
weapon: "book",
star: 4,
avatar,
skill,
}
|
'use strict';
const common = require('../common');
const fs = require('fs');
const path = require('path');
const bench = common.createBenchmark(main, {
n: [1e6],
statSyncType: ['throw', 'noThrow']
});
function main({ n, statSyncType }) {
const arg = path.join(__dirname, 'non.existent');
bench.start();
for (let i = 0; i < n; i++) {
if (statSyncType === 'noThrow') {
fs.statSync(arg, { throwIfNoEntry: false });
} else {
try {
fs.statSync(arg);
} catch {
}
}
}
bench.end(n);
}
|
/*
* This header is generated by classdump-dyld 1.0
* on Saturday, June 1, 2019 at 6:52:01 PM Mountain Standard Time
* Operating System: Version 12.1.1 (Build 16C5050a)
* Image Source: /System/Library/PrivateFrameworks/UserNotificationsServer.framework/UserNotificationsServer
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by Elias Limneos.
*/
@protocol UNSRemoteNotificationServerObserver <NSObject>
@required
-(void)didReceiveDeviceToken:(id)arg1 forBundleIdentifier:(id)arg2;
@end
|
/* TURTLEVW.H
040130 aggiunto BitMultiSave
040126 in BitLoad e BitSave mod. tipo terzo argomento
040123 nuovi CTurtleView::BitLoad e CTurtleView::BitSave; rinominato vecchi in TurtleLoad e TurtleSave
031030 aggiunto metodo OnSetCursor e attributo m_hCursor
000502 aggiunto SetTextProperties
000402 aggiunto m_bPenTransparent e m_bFloodTransparent
000113 aggiunto m_dosFloodColor, GetDosFloodColor, SetDosFloodColor, SetPixel
991226 m_pBitmap[] in iperlvw.h (condivise tra finestre Tarta
991116 aggiunto m_isSelectionOn
991105 spostato gestione menu da turtleCtlr a turtleVw
991014 OVsZoom (OVs)
991014 mediato print e print-preview (OVs)
991012 riattivato override di PreCreateWindow
991011 aggiunto m_ogID
990920 aggiunto m_dlgEntry
990901 per compatibilita' con v.3.06, reinserito OnCmdMsg e OnWndMsg
990825 uso di MvcScrollView_T anziche' MvcScrollWrapper_T
come da sample ShowcaseView.h
990819 impaccamento membri di struttura: /Zp1 -> /Zp8 (standard)
990818 sostituito CScrollView con CView
990814 aggiunto OnPrepareDC (OVs); m_vpCanvas -> m_vpDiagram
990813 aggiunto OnCmdMsg e OnWndMsg (OVs)
990812 variabile per viewport di MVC (OVs)
970801 Iniziato implementazione funzioni di menu
*/
// turtlevw.h : header file
//
// #include "ilcpp.h"
#include "stdafx.h"
#include "logo.h"
// #include "turtleDoc.h" // OVs
// #include "turtleVp.h" // OVs
// #include "MvcScrollView.h" /// OVs
/////////////////////////////////////////////////////////////////////////////
// CTurtleView view
#ifndef DEF_TURTLEVIEW
#define DEF_TURTLEVIEW
//#define MAX_BITMAPS 1024
/// class CTurtleView : public CView
#ifndef OVs
class CTurtleView : public CView
#else // OVs
class CTurtleView : public MvcScrollView_T<CTurtleViewport>
#endif // OVs
{
DECLARE_DYNCREATE(CTurtleView)
protected:
CTurtleView(); // protected constructor used by dynamic creation
BOOL PreCreateWindow(CREATESTRUCT& cs); ////
CMenu* MenuGetItem (UINT iPos);
// Attributes
protected:
#ifdef OVs
//>OVs
// This is the viewport that draws the visual representataion of
// the model contained in the document object. Wrapper classes
// are used to extend the viewport to support scrolling and
// offscreen buffering.
/// MvcScrollWrapper_T<CTurtleViewport> m_vpDiagram;
CTurtleModel* m_pModel;
CODComponent* m_pCursor; // graphic cursor (tarta)
CODLineComponent* m_pLine; // current line (spezzata)
BOOL m_isPolyline;
CODLineComponent* m_pPolygon;// current polygon
BOOL m_isPolygon;
#endif // OVs
public:
BOOL m_isOVs;
node m_dlgEntry;
#ifdef OVs
CTurtleDoc* GetDocument();
#endif OVs
/// CTurtleViewport* GetViewport();
//<OVs
// Operations
public:
//>OVs
// Overrides (da CShowcaseView)
virtual void OnPrepareDC(CDC* pDC, CPrintInfo* pInfo = NULL);
virtual BOOL OnPreparePrinting(CPrintInfo* pInfo);
virtual void OnBeginPrinting(CDC* pDC, CPrintInfo* pInfo);
virtual void OnEndPrinting(CDC* pDC, CPrintInfo* pInfo);
virtual void OnPrint(CDC* pDC, CPrintInfo*);
virtual void OnEndPrintPreview(CDC* pDC, CPrintInfo* pInfo, POINT point, CPreviewView* pView);
virtual BOOL OnCmdMsg(UINT nID, int nCode, void* pExtra, AFX_CMDHANDLERINFO* pHandlerInfo);
virtual BOOL OnWndMsg(UINT message, WPARAM wParam, LPARAM lParam, LRESULT* pResult);
// int CreateToolbar (UINT toolbarId, char* toolbarTitle, UINT toolarStyle, UINT toolbarDocking);
#ifdef OVs
int OVsCreateToolbar (UINT toolbarId, char* toolbarTitle, UINT toolarStyle, UINT toolbarDocking);
int OVsShowToolbar (CToolBar* pToolbar, int show);
int OVsTools (int nextTools);
int OVsProps (int nextProps);
void GoHome();
void ClearScreen (BOOL goHome);
CODLineComponent* CloseLine();
void CreateCursor ();
void ShowCursor (BOOL show);
void MoveCursor (int x, int y, double alfa);
int BitLoadFromFile (CString strImagePath, int mode);
BOOL FileLoad(BOOL bOVsMode, CString strFileName);
BOOL FileSave(BOOL bOVsMode, BOOL bSaveAs, CString strFileName);
void PrintPreview ();
void SetComponentProperties (CODComponent* pComp);
void SetTextProperties (CODTextComponent* pText);
int OVsZoom (double factor);
//<OVs
#endif // OVs
unsigned long GetAttr ();
void SetAttr (node attrList);
void GetName(char* name);
void BitmapNew();
int GetPenStyle();
void SetPenStyle(int style);
int GetPenWidth();
void SetPenWidth(int width);
COLORREF GetPenColor();
void SetPenColor(COLORREF color);
COLORREF GetTextBkColor();
void SetTextBkColor(COLORREF color);
COLORREF GetBkColor();
void SetBkColor(COLORREF color);
COLORREF GetFloodColor();
void SetFloodColor(COLORREF color);
int GetDosPalette();
void SetDosPalette(int iPal);
int GetDosPenColor();
void SetDosPenColor(int iCol);
int GetDosBkColor();
void SetDosBkColor(int iCol);
int GetDosFloodColor();
void SetDosFloodColor(int iCol);
int GetDrawMode();
void SetDrawMode(int DrawMode);
int GetTextBkMode();
void SetTextBkMode(int mode);
LOGFONT *GetTextFont();
BOOL SetTextFont(LOGFONT *lpLogFont);
void SetRefresh(BOOL noyes);
COLORREF GetPixel(int x, int y);
int SetPixel(int x, int y, COLORREF color); // 000113
int Point(int x, int y);
int MoveTo(int x, int y);
int LineTo(int x, int y);
int Arc(int x1, int y1, int x2, int y2, int x3, int y3, int x4, int y4);
int TextOut(int x, int y, char *s);
BOOL FloodFill(int x, int y);
void FillRect (int x, int y, int x2, int y2, COLORREF *pColor);
int Zoom (double factor, int xPos, int yPos, int xDim, int yDim);
int Resize (int xPos, int yPos, int xDim, int yDim);
void SetMouseState (int state);
CPoint GetMousePos ();
void SetKeybState (int state);
UINT GetKey ();
CPoint ViewToCanvas (CPoint point);
void ShowSelectArea ();
//>040123
//int BitLoad (int handle, int mode);
//int BitSave (int handle, int mode);
int TurtleLoad (int handle, int mode);
int TurtleSave (int handle, int mode);
int BitLoad (int bitIndex, char *filename, DWORD imagetype);
int BitSave (int bitIndex, char *filename, DWORD imagetype);
//<040123
int BitMultiSave (char *filename, DWORD imagetype, int nLoops, node bitIndexList, node delayList); // 040130
int BitCutCopy (int bitIndex, int is_cut, int x, int y, int width, int height);
int BitPaste (int bitIndex, int bitMode, int x, int y);
int BitFit (int bitIndex, int width, int height);
// int BitPrint ();
int TurtlePrint ();
int BitmapPrint (int xLowView, int yLowView, int xHighView, int yHighView);
void ActivatePalette (CDC *pDC);
void ClearPalette ();
// void SetActiveArea (double xLowArea, double xHighArea, double yLowArea, double yHighArea);
// void GetActiveArea (double *xLowArea, double *xHighArea, double *yLowArea, double *yHighArea);
void SetActiveArea (double xLowArea, double yLowArea, double xHighArea, double yHighArea);
void GetActiveArea (double *xLowArea, double *yLowArea, double *xHighArea, double *yHighArea);
int SelectionToActiveArea ();
void ResetActiveArea ();
void Center (int x, int y);
void OnTurtlestatusHide();
void OnTurtlestatusRefresh();
void MapClipInvalidateRect (int top, int left, int bottom, int right);
BOOL CTurtleView::GetModify();
void CTurtleView::SetModify(BOOL bModified);
int CTurtleView::ConfirmExit();
// Implementation
public:
virtual ~CTurtleView();
#ifdef _DEBUG
virtual void AssertValid() const;
virtual void Dump(CDumpContext& dc) const;
#endif
protected:
// New
virtual void OnDraw(CDC* pDC); // overridden to draw this view
virtual void OnInitialUpdate(); // first time after construct
HBITMAP GetClipboardData (); // Clipboard data handle
BOOL SetClipboardData(HBITMAP hBitmap, HPALETTE hPalette, HGLOBAL hDib);
int m_nDC;
CBitmap *m_pCanvas; // screen image
public: // 000113
int m_dosPalette;
int m_dosPenColor;
int m_dosScreenColor;
int m_dosFloodColor;
int m_dosTextBkColor;
COLORREF m_screenColor;
COLORREF m_floodColor;
COLORREF m_penColor;
COLORREF m_textBkColor; // sfondo per testo e hatched lines
int m_textBkMode; // modalita' sfondo per testo e hatched lines
int m_penStyle;
int m_penWidth;
int m_drawMode;
BOOL m_bPenTransparent; // if TTRUE, performs no writing
BOOL m_bFloodTransparent; // if TTRUE, performs no filling
public:
BOOL m_refresh;
int m_mouseState;
CPoint m_mousePos;
CPoint m_buttonDownPos;
CPoint m_buttonUpPos;
protected:
int m_keybState;
UINT m_key;
UINT m_keybFlags;
LOGFONT* m_pLogFont;
CFont *m_pFont;
// CBitmap *m_pBitmap[MAX_BITMAPS]; // result of BitCut / BitCopy
double m_xLowArea, m_xHighArea, m_yLowArea, m_yHighArea;
CRect m_rectSelectArea; // area selezionata (coordin. fisiche)
BOOL m_isSelectArea; // l'area selezionata e' mostrata
BOOL m_isDrag; // e' in corso dragging del cursore
BOOL m_bModified; // l'immagine e' stata modificata
CWnd* m_pOldMain; // main application window (v. print preview)
public:
BOOL m_isSelectionOn; // e' consentita selezione interattiva (down-drag-up)
LOGFONT m_logFont; // 980401: was protected
int m_xDim, m_yDim;
int m_xPos, m_yPos;
int m_xPixel, m_yPixel; // size of memory canvas
int m_xScroll, m_yScroll; // size of scrolled window
int *m_pMaxX, *m_pMaxY; // pointers to fields in C structures
CWnd* m_frame;
char m_saveFile [64];
struct dview* m_logoView;
int m_nView;
CWnd* m_status;
double m_zoomFactor;
BOOL m_eraseFlag; // modo penna ERASE: usare col.sfondo
#ifdef OVs
ODComponentId m_ogID; // id ultimo oggetto doppiocliccato
#endif // OVs
HCURSOR m_hCursor; // 031030: handle di cursore grafico
// Generated message map functions
public:
//{{AFX_MSG(CTurtleView)
afx_msg void OnBitmapPrint();
afx_msg void OnBitmapActivearea();
afx_msg void OnBitmapLoad();
afx_msg void OnBitmapNew();
afx_msg void OnBitmapSave();
afx_msg void OnBitmapSaveas();
afx_msg void OnSetFloodcolor();
afx_msg void OnSetFont();
afx_msg void OnSetPencolor();
afx_msg void OnSetScreencolor();
afx_msg void OnZoomIn();
afx_msg void OnZoomNormal();
afx_msg void OnZoomOut();
afx_msg void OnTurtlestatusShow();
afx_msg void OnFileClose();
afx_msg void OnPrintSetup();
afx_msg void OnViewCenter();
afx_msg void OnSetPenminus();
afx_msg void OnSetPenplus();
afx_msg void OnSelectTurtle();
afx_msg void OnSetFocus(CWnd* pOldWnd);
afx_msg void OnEditCopy();
afx_msg void OnEditCut();
afx_msg void OnEditDelete();
afx_msg void OnEditPaste();
afx_msg void OnSetPalette();
afx_msg void OnEditFill();
afx_msg void OnEditSelectall();
afx_msg void OnEditSelectarea();
afx_msg void OnKeyDown(UINT nChar, UINT nRepCnt, UINT nFlags);
afx_msg void OnKeyUp(UINT nChar, UINT nRepCnt, UINT nFlags);
afx_msg int OnCreate(LPCREATESTRUCT lpCreateStruct);
afx_msg void OnSize(UINT nType, int cx, int cy);
afx_msg BOOL OnEraseBkgnd(CDC* pDC);
afx_msg void OnMouseMove(UINT nFlags, CPoint point);
afx_msg void OnLButtonDown(UINT nFlags, CPoint point);
afx_msg void OnLButtonUp(UINT nFlags, CPoint point);
afx_msg void OnRButtonDown(UINT nFlags, CPoint point);
afx_msg void OnRButtonUp(UINT nFlags, CPoint point);
afx_msg void OnFilePrint();
afx_msg void OnFilePrintPreview();
afx_msg void OnFilePrintsetup();
afx_msg void OnUpdateFilePrint(CCmdUI* pCmdUI);
afx_msg void OnUpdateFilePrintPreview(CCmdUI* pCmdUI);
afx_msg void OnUpdateEditButta(CCmdUI* pCmdUI);
afx_msg void OnUpdateEditTaglia(CCmdUI* pCmdUI);
afx_msg void OnUpdateEditRiempi(CCmdUI* pCmdUI);
afx_msg void OnUpdateEditCopia(CCmdUI* pCmdUI);
afx_msg void OnFileNew();
afx_msg BOOL OnSetCursor(CWnd* pWnd, UINT nHitTest, UINT message);// 031029
//}}AFX_MSG
afx_msg void OnMenuButton(UINT uID); // 991105
afx_msg void OnUpdateMenuButton(CCmdUI *pCmdUI); // 991105
DECLARE_MESSAGE_MAP()
};
#ifdef OVs
#ifndef _DEBUG // debug version in turtleVw.cpp
inline CTurtleDoc* CTurtleView::GetDocument()
{ return (CTurtleDoc*)m_pDocument; }
#endif
#endif OVs
#endif
/////////////////////////////////////////////////////////////////////////////
|
'use strict' // forces strict type setting
var a = 10;
if (true) {
a = 20;
console.log(a) // 20
}
console.log(a) // 20
var b = 30;
if (true) {
let b = 40;
console.log(b) // 40
}
console.log(b) // 30
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['InsightArgs', 'Insight']
@pulumi.input_type
class InsightArgs:
def __init__(__self__, *,
filters: pulumi.Input['InsightFiltersArgs'],
group_by_attribute: pulumi.Input[str],
name: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Insight resource.
:param pulumi.Input['InsightFiltersArgs'] filters: A configuration block including one or more (up to 10 distinct) attributes used to filter the findings included in the insight. The insight only includes findings that match criteria defined in the filters. See filters below for more details.
:param pulumi.Input[str] group_by_attribute: The attribute used to group the findings for the insight e.g. if an insight is grouped by `ResourceId`, then the insight produces a list of resource identifiers.
:param pulumi.Input[str] name: The name of the custom insight.
"""
pulumi.set(__self__, "filters", filters)
pulumi.set(__self__, "group_by_attribute", group_by_attribute)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def filters(self) -> pulumi.Input['InsightFiltersArgs']:
"""
A configuration block including one or more (up to 10 distinct) attributes used to filter the findings included in the insight. The insight only includes findings that match criteria defined in the filters. See filters below for more details.
"""
return pulumi.get(self, "filters")
@filters.setter
def filters(self, value: pulumi.Input['InsightFiltersArgs']):
pulumi.set(self, "filters", value)
@property
@pulumi.getter(name="groupByAttribute")
def group_by_attribute(self) -> pulumi.Input[str]:
"""
The attribute used to group the findings for the insight e.g. if an insight is grouped by `ResourceId`, then the insight produces a list of resource identifiers.
"""
return pulumi.get(self, "group_by_attribute")
@group_by_attribute.setter
def group_by_attribute(self, value: pulumi.Input[str]):
pulumi.set(self, "group_by_attribute", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the custom insight.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class _InsightState:
def __init__(__self__, *,
arn: Optional[pulumi.Input[str]] = None,
filters: Optional[pulumi.Input['InsightFiltersArgs']] = None,
group_by_attribute: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Insight resources.
:param pulumi.Input[str] arn: ARN of the insight.
:param pulumi.Input['InsightFiltersArgs'] filters: A configuration block including one or more (up to 10 distinct) attributes used to filter the findings included in the insight. The insight only includes findings that match criteria defined in the filters. See filters below for more details.
:param pulumi.Input[str] group_by_attribute: The attribute used to group the findings for the insight e.g. if an insight is grouped by `ResourceId`, then the insight produces a list of resource identifiers.
:param pulumi.Input[str] name: The name of the custom insight.
"""
if arn is not None:
pulumi.set(__self__, "arn", arn)
if filters is not None:
pulumi.set(__self__, "filters", filters)
if group_by_attribute is not None:
pulumi.set(__self__, "group_by_attribute", group_by_attribute)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def arn(self) -> Optional[pulumi.Input[str]]:
"""
ARN of the insight.
"""
return pulumi.get(self, "arn")
@arn.setter
def arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "arn", value)
@property
@pulumi.getter
def filters(self) -> Optional[pulumi.Input['InsightFiltersArgs']]:
"""
A configuration block including one or more (up to 10 distinct) attributes used to filter the findings included in the insight. The insight only includes findings that match criteria defined in the filters. See filters below for more details.
"""
return pulumi.get(self, "filters")
@filters.setter
def filters(self, value: Optional[pulumi.Input['InsightFiltersArgs']]):
pulumi.set(self, "filters", value)
@property
@pulumi.getter(name="groupByAttribute")
def group_by_attribute(self) -> Optional[pulumi.Input[str]]:
"""
The attribute used to group the findings for the insight e.g. if an insight is grouped by `ResourceId`, then the insight produces a list of resource identifiers.
"""
return pulumi.get(self, "group_by_attribute")
@group_by_attribute.setter
def group_by_attribute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "group_by_attribute", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the custom insight.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
class Insight(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
filters: Optional[pulumi.Input[pulumi.InputType['InsightFiltersArgs']]] = None,
group_by_attribute: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides a Security Hub custom insight resource. See the [Managing custom insights section](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-custom-insights.html) of the AWS User Guide for more information.
## Example Usage
### Filter by AWS account ID
```python
import pulumi
import pulumi_aws as aws
example_account = aws.securityhub.Account("exampleAccount")
example_insight = aws.securityhub.Insight("exampleInsight",
filters=aws.securityhub.InsightFiltersArgs(
aws_account_ids=[
aws.securityhub.InsightFiltersAwsAccountIdArgs(
comparison="EQUALS",
value="1234567890",
),
aws.securityhub.InsightFiltersAwsAccountIdArgs(
comparison="EQUALS",
value="09876543210",
),
],
),
group_by_attribute="AwsAccountId",
opts=pulumi.ResourceOptions(depends_on=[example_account]))
```
### Filter by date range
```python
import pulumi
import pulumi_aws as aws
example_account = aws.securityhub.Account("exampleAccount")
example_insight = aws.securityhub.Insight("exampleInsight",
filters=aws.securityhub.InsightFiltersArgs(
created_ats=[aws.securityhub.InsightFiltersCreatedAtArgs(
date_range=aws.securityhub.InsightFiltersCreatedAtDateRangeArgs(
unit="DAYS",
value=5,
),
)],
),
group_by_attribute="CreatedAt",
opts=pulumi.ResourceOptions(depends_on=[example_account]))
```
### Filter by destination IPv4 address
```python
import pulumi
import pulumi_aws as aws
example_account = aws.securityhub.Account("exampleAccount")
example_insight = aws.securityhub.Insight("exampleInsight",
filters=aws.securityhub.InsightFiltersArgs(
network_destination_ipv4s=[aws.securityhub.InsightFiltersNetworkDestinationIpv4Args(
cidr="10.0.0.0/16",
)],
),
group_by_attribute="NetworkDestinationIpV4",
opts=pulumi.ResourceOptions(depends_on=[example_account]))
```
### Filter by finding's confidence
```python
import pulumi
import pulumi_aws as aws
example_account = aws.securityhub.Account("exampleAccount")
example_insight = aws.securityhub.Insight("exampleInsight",
filters=aws.securityhub.InsightFiltersArgs(
confidences=[aws.securityhub.InsightFiltersConfidenceArgs(
gte="80",
)],
),
group_by_attribute="Confidence",
opts=pulumi.ResourceOptions(depends_on=[example_account]))
```
### Filter by resource tags
```python
import pulumi
import pulumi_aws as aws
example_account = aws.securityhub.Account("exampleAccount")
example_insight = aws.securityhub.Insight("exampleInsight",
filters=aws.securityhub.InsightFiltersArgs(
resource_tags=[aws.securityhub.InsightFiltersResourceTagArgs(
comparison="EQUALS",
key="Environment",
value="Production",
)],
),
group_by_attribute="ResourceTags",
opts=pulumi.ResourceOptions(depends_on=[example_account]))
```
## Import
Security Hub insights can be imported using the ARN, e.g.
```sh
$ pulumi import aws:securityhub/insight:Insight example arn:aws:securityhub:us-west-2:1234567890:insight/1234567890/custom/91299ed7-abd0-4e44-a858-d0b15e37141a
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['InsightFiltersArgs']] filters: A configuration block including one or more (up to 10 distinct) attributes used to filter the findings included in the insight. The insight only includes findings that match criteria defined in the filters. See filters below for more details.
:param pulumi.Input[str] group_by_attribute: The attribute used to group the findings for the insight e.g. if an insight is grouped by `ResourceId`, then the insight produces a list of resource identifiers.
:param pulumi.Input[str] name: The name of the custom insight.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: InsightArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a Security Hub custom insight resource. See the [Managing custom insights section](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-custom-insights.html) of the AWS User Guide for more information.
## Example Usage
### Filter by AWS account ID
```python
import pulumi
import pulumi_aws as aws
example_account = aws.securityhub.Account("exampleAccount")
example_insight = aws.securityhub.Insight("exampleInsight",
filters=aws.securityhub.InsightFiltersArgs(
aws_account_ids=[
aws.securityhub.InsightFiltersAwsAccountIdArgs(
comparison="EQUALS",
value="1234567890",
),
aws.securityhub.InsightFiltersAwsAccountIdArgs(
comparison="EQUALS",
value="09876543210",
),
],
),
group_by_attribute="AwsAccountId",
opts=pulumi.ResourceOptions(depends_on=[example_account]))
```
### Filter by date range
```python
import pulumi
import pulumi_aws as aws
example_account = aws.securityhub.Account("exampleAccount")
example_insight = aws.securityhub.Insight("exampleInsight",
filters=aws.securityhub.InsightFiltersArgs(
created_ats=[aws.securityhub.InsightFiltersCreatedAtArgs(
date_range=aws.securityhub.InsightFiltersCreatedAtDateRangeArgs(
unit="DAYS",
value=5,
),
)],
),
group_by_attribute="CreatedAt",
opts=pulumi.ResourceOptions(depends_on=[example_account]))
```
### Filter by destination IPv4 address
```python
import pulumi
import pulumi_aws as aws
example_account = aws.securityhub.Account("exampleAccount")
example_insight = aws.securityhub.Insight("exampleInsight",
filters=aws.securityhub.InsightFiltersArgs(
network_destination_ipv4s=[aws.securityhub.InsightFiltersNetworkDestinationIpv4Args(
cidr="10.0.0.0/16",
)],
),
group_by_attribute="NetworkDestinationIpV4",
opts=pulumi.ResourceOptions(depends_on=[example_account]))
```
### Filter by finding's confidence
```python
import pulumi
import pulumi_aws as aws
example_account = aws.securityhub.Account("exampleAccount")
example_insight = aws.securityhub.Insight("exampleInsight",
filters=aws.securityhub.InsightFiltersArgs(
confidences=[aws.securityhub.InsightFiltersConfidenceArgs(
gte="80",
)],
),
group_by_attribute="Confidence",
opts=pulumi.ResourceOptions(depends_on=[example_account]))
```
### Filter by resource tags
```python
import pulumi
import pulumi_aws as aws
example_account = aws.securityhub.Account("exampleAccount")
example_insight = aws.securityhub.Insight("exampleInsight",
filters=aws.securityhub.InsightFiltersArgs(
resource_tags=[aws.securityhub.InsightFiltersResourceTagArgs(
comparison="EQUALS",
key="Environment",
value="Production",
)],
),
group_by_attribute="ResourceTags",
opts=pulumi.ResourceOptions(depends_on=[example_account]))
```
## Import
Security Hub insights can be imported using the ARN, e.g.
```sh
$ pulumi import aws:securityhub/insight:Insight example arn:aws:securityhub:us-west-2:1234567890:insight/1234567890/custom/91299ed7-abd0-4e44-a858-d0b15e37141a
```
:param str resource_name: The name of the resource.
:param InsightArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(InsightArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
filters: Optional[pulumi.Input[pulumi.InputType['InsightFiltersArgs']]] = None,
group_by_attribute: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = InsightArgs.__new__(InsightArgs)
if filters is None and not opts.urn:
raise TypeError("Missing required property 'filters'")
__props__.__dict__["filters"] = filters
if group_by_attribute is None and not opts.urn:
raise TypeError("Missing required property 'group_by_attribute'")
__props__.__dict__["group_by_attribute"] = group_by_attribute
__props__.__dict__["name"] = name
__props__.__dict__["arn"] = None
super(Insight, __self__).__init__(
'aws:securityhub/insight:Insight',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
arn: Optional[pulumi.Input[str]] = None,
filters: Optional[pulumi.Input[pulumi.InputType['InsightFiltersArgs']]] = None,
group_by_attribute: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None) -> 'Insight':
"""
Get an existing Insight resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] arn: ARN of the insight.
:param pulumi.Input[pulumi.InputType['InsightFiltersArgs']] filters: A configuration block including one or more (up to 10 distinct) attributes used to filter the findings included in the insight. The insight only includes findings that match criteria defined in the filters. See filters below for more details.
:param pulumi.Input[str] group_by_attribute: The attribute used to group the findings for the insight e.g. if an insight is grouped by `ResourceId`, then the insight produces a list of resource identifiers.
:param pulumi.Input[str] name: The name of the custom insight.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _InsightState.__new__(_InsightState)
__props__.__dict__["arn"] = arn
__props__.__dict__["filters"] = filters
__props__.__dict__["group_by_attribute"] = group_by_attribute
__props__.__dict__["name"] = name
return Insight(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def arn(self) -> pulumi.Output[str]:
"""
ARN of the insight.
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter
def filters(self) -> pulumi.Output['outputs.InsightFilters']:
"""
A configuration block including one or more (up to 10 distinct) attributes used to filter the findings included in the insight. The insight only includes findings that match criteria defined in the filters. See filters below for more details.
"""
return pulumi.get(self, "filters")
@property
@pulumi.getter(name="groupByAttribute")
def group_by_attribute(self) -> pulumi.Output[str]:
"""
The attribute used to group the findings for the insight e.g. if an insight is grouped by `ResourceId`, then the insight produces a list of resource identifiers.
"""
return pulumi.get(self, "group_by_attribute")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the custom insight.
"""
return pulumi.get(self, "name")
|
import argparse
import os
import glob
import random
import darknet
import time
import cv2
import numpy as np
import darknet
# Check if GPIO module is available or not
try:
import RPi.GPIO as GPIO
except ImportError:
print ("Cannot locate RPi.GPIO module!")
sys.exit(1)
def parser():
parser = argparse.ArgumentParser(description="YOLO Object Detection")
parser.add_argument("--input", type=str, default="",
help="image source. It can be a single image, a"
"txt with paths to them, or a folder. Image valid"
" formats are jpg, jpeg or png."
"If no input is given, ")
parser.add_argument("--batch_size", default=1, type=int,
help="number of images to be processed at the same time")
parser.add_argument("--weights", default="yolov4.weights",
help="yolo weights path")
parser.add_argument("--dont_show", action='store_true',
help="windown inference display. For headless systems")
parser.add_argument("--ext_output", action='store_true',
help="display bbox coordinates of detected objects")
parser.add_argument("--save_labels", action='store_true',
help="save detections bbox for each image in yolo format")
parser.add_argument("--config_file", default="./cfg/yolov4.cfg",
help="path to config file")
parser.add_argument("--data_file", default="./cfg/coco.data",
help="path to data file")
parser.add_argument("--thresh", type=float, default=.25,
help="remove detections with lower confidence")
return parser.parse_args()
def check_arguments_errors(args):
assert 0 < args.thresh < 1, "Threshold should be a float between zero and one (non-inclusive)"
if not os.path.exists(args.config_file):
raise(ValueError("Invalid config path {}".format(os.path.abspath(args.config_file))))
if not os.path.exists(args.weights):
raise(ValueError("Invalid weight path {}".format(os.path.abspath(args.weights))))
if not os.path.exists(args.data_file):
raise(ValueError("Invalid data file path {}".format(os.path.abspath(args.data_file))))
if args.input and not os.path.exists(args.input):
raise(ValueError("Invalid image path {}".format(os.path.abspath(args.input))))
def check_batch_shape(images, batch_size):
"""
Image sizes should be the same width and height
"""
shapes = [image.shape for image in images]
if len(set(shapes)) > 1:
raise ValueError("Images don't have same shape")
if len(shapes) > batch_size:
raise ValueError("Batch size higher than number of images")
return shapes[0]
def load_images(images_path):
"""
If image path is given, return it directly
For txt file, read it and return each line as image path
In other case, it's a folder, return a list with names of each
jpg, jpeg and png file
"""
input_path_extension = images_path.split('.')[-1]
if input_path_extension in ['jpg', 'jpeg', 'png']:
return [images_path]
elif input_path_extension == "txt":
with open(images_path, "r") as f:
return f.read().splitlines()
else:
return glob.glob(
os.path.join(images_path, "*.jpg")) + \
glob.glob(os.path.join(images_path, "*.png")) + \
glob.glob(os.path.join(images_path, "*.jpeg"))
def prepare_batch(images, network, channels=3):
width = darknet.network_width(network)
height = darknet.network_height(network)
darknet_images = []
for image in images:
image_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
image_resized = cv2.resize(image_rgb, (width, height),
interpolation=cv2.INTER_LINEAR)
custom_image = image_resized.transpose(2, 0, 1)
darknet_images.append(custom_image)
batch_array = np.concatenate(darknet_images, axis=0)
batch_array = np.ascontiguousarray(batch_array.flat, dtype=np.float32)/255.0
darknet_images = batch_array.ctypes.data_as(darknet.POINTER(darknet.c_float))
return darknet.IMAGE(width, height, channels, darknet_images)
def image_detection(image_path, network, class_names, class_colors, thresh):
# Darknet doesn't accept numpy images.
# Create one with image we reuse for each detect
width = darknet.network_width(network)
height = darknet.network_height(network)
darknet_image = darknet.make_image(width, height, 3)
image = cv2.imread(image_path)
image_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
image_resized = cv2.resize(image_rgb, (width, height),
interpolation=cv2.INTER_LINEAR)
darknet.copy_image_from_bytes(darknet_image, image_resized.tobytes())
detections = darknet.detect_image(network, class_names, darknet_image, thresh=thresh)
# Setup BCM Mode
GPIO.setmode(GPIO.BCM)
# Setup pin number
pinYellow = 8 #Yellow led -> Pin 24 on the board
pinGreen = 18 #Green led -> Pin 12 on the board
pinRed = 24 #Red led -> Pin 18 on the board
# Setup the pin as output direction
valueLow = GPIO.LOW
valueHigh = GPIO.HIGH
GPIO.setup(pinYellow, GPIO.OUT)
GPIO.setup(pinGreen, GPIO.OUT)
GPIO.setup(pinRed, GPIO.OUT)
if(detections[0][0] == "with_mask"):
GPIO.output(pinYellow, valueLow)
GPIO.output(pinGreen, valueHigh)
GPIO.output(pinRed, valueLow)
elif(detections[0][0] == "mask_wearing_incorrect"):
GPIO.output(pinYellow, valueLow)
GPIO.output(pinGreen, valueHigh)
GPIO.output(pinRed, valueLow)
else:
GPIO.output(pinYellow, valueLow)
GPIO.output(pinGreen, valueLow)
GPIO.output(pinRed, valueHigh)
# Clean it up
GPIO.cleanup()
darknet.free_image(darknet_image)
image = darknet.draw_boxes(detections, image_resized, class_colors)
return cv2.cvtColor(image, cv2.COLOR_BGR2RGB), detections
def batch_detection(network, images, class_names, class_colors,
thresh=0.25, hier_thresh=.5, nms=.45, batch_size=4):
image_height, image_width, _ = check_batch_shape(images, batch_size)
darknet_images = prepare_batch(images, network)
batch_detections = darknet.network_predict_batch(network, darknet_images, batch_size, image_width,
image_height, thresh, hier_thresh, None, 0, 0)
batch_predictions = []
for idx in range(batch_size):
num = batch_detections[idx].num
detections = batch_detections[idx].dets
if nms:
darknet.do_nms_obj(detections, num, len(class_names), nms)
predictions = darknet.remove_negatives(detections, class_names, num)
images[idx] = darknet.draw_boxes(predictions, images[idx], class_colors)
batch_predictions.append(predictions)
darknet.free_batch_detections(batch_detections, batch_size)
return images, batch_predictions
def image_classification(image, network, class_names):
width = darknet.network_width(network)
height = darknet.network_height(network)
image_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
image_resized = cv2.resize(image_rgb, (width, height),
interpolation=cv2.INTER_LINEAR)
darknet_image = darknet.make_image(width, height, 3)
darknet.copy_image_from_bytes(darknet_image, image_resized.tobytes())
detections = darknet.predict_image(network, darknet_image)
predictions = [(name, detections[idx]) for idx, name in enumerate(class_names)]
darknet.free_image(darknet_image)
return sorted(predictions, key=lambda x: -x[1])
def convert2relative(image, bbox):
"""
YOLO format use relative coordinates for annotation
"""
x, y, w, h = bbox
height, width, _ = image.shape
return x/width, y/height, w/width, h/height
def save_annotations(name, image, detections, class_names):
"""
Files saved with image_name.txt and relative coordinates
"""
file_name = name.split(".")[:-1][0] + ".txt"
with open(file_name, "w") as f:
for label, confidence, bbox in detections:
x, y, w, h = convert2relative(image, bbox)
label = class_names.index(label)
f.write("{} {:.4f} {:.4f} {:.4f} {:.4f} {:.4f}\n".format(label, x, y, w, h, float(confidence)))
def batch_detection_example():
args = parser()
check_arguments_errors(args)
batch_size = 3
random.seed(3) # deterministic bbox colors
network, class_names, class_colors = darknet.load_network(
args.config_file,
args.data_file,
args.weights,
batch_size=batch_size
)
image_names = ['data/horses.jpg', 'data/horses.jpg', 'data/eagle.jpg']
images = [cv2.imread(image) for image in image_names]
images, detections, = batch_detection(network, images, class_names,
class_colors, batch_size=batch_size)
for name, image in zip(image_names, images):
cv2.imwrite(name.replace("data/", ""), image)
print(detections)
def main():
args = parser()
check_arguments_errors(args)
random.seed(3) # deterministic bbox colors
network, class_names, class_colors = darknet.load_network(
args.config_file,
args.data_file,
args.weights,
batch_size=args.batch_size
)
images = load_images(args.input)
index = 0
while True:
# loop asking for new image paths if no list is given
if args.input:
if index >= len(images):
break
image_name = images[index]
else:
GPIO.setmode(GPIO.BCM)
# Setup pin number
pinGreen = 18 #Green led
pinRed = 24 #Red led
# Setup the pin as output direction
valueLow = GPIO.LOW
GPIO.setup(pinGreen, GPIO.OUT)
GPIO.setup(pinRed, GPIO.OUT)
GPIO.output(pinRed, valueLow)
GPIO.output(pinGreen, valueLow)
image_name = input("Enter Image Path: ")
prev_time = time.time()
image, detections = image_detection(
image_name, network, class_names, class_colors, args.thresh
)
if args.save_labels:
save_annotations(image_name, image, detections, class_names)
darknet.print_detections(detections, args.ext_output)
fps = int(1/(time.time() - prev_time))
print("FPS: {}".format(fps))
if not args.dont_show:
cv2.imshow('Inference', image)
if cv2.waitKey() & 0xFF == ord('q'):
break
index += 1
if __name__ == "__main__":
# unconmment next line for an example of batch processing
# batch_detection_example()
main()
|
# Scheduler constants
LAG_5_MINUTES = 5 # lag in minutes between finish of the timeperiod and beginning of its processing
PROCESS_SCHEDULER = 'Scheduler'
PROCESS_GC = 'GarbageCollectorWorker'
TOKEN_SCHEDULER = 'scheduler'
TOKEN_GC = 'gc'
# State Machine names
STATE_MACHINE_CONTINUOUS = 'continuous'
STATE_MACHINE_DISCRETE = 'discrete'
STATE_MACHINE_SIMPLE_DISCRETE = 'simple_discrete'
STATE_MACHINE_FREERUN = 'freerun'
# Supported worker types
TYPE_MANAGED = 'type_managed'
TYPE_FREERUN = 'type_freerun'
TYPE_DAEMON = 'type_daemon'
TYPE_GARBAGE_COLLECTOR = 'type_gc'
BLOCKING_NORMAL = 'blocking_normal'
BLOCKING_DEPENDENCIES = 'blocking_dependencies'
BLOCKING_CHILDREN = 'blocking_children'
# MQ exchanges that are required by Synergy Scheduler
EXCHANGE_MANAGED_WORKER = 'ex_managed_worker'
EXCHANGE_FREERUN_WORKER = 'ex_freerun_worker'
EXCHANGE_UTILS = 'ex_utils'
# DB Collection/Table names
COLLECTION_MANAGED_PROCESS = 'managed_process'
COLLECTION_FREERUN_PROCESS = 'freerun_process'
COLLECTION_UNIT_OF_WORK = 'unit_of_work'
COLLECTION_JOB_HOURLY = 'job_hourly'
COLLECTION_JOB_DAILY = 'job_daily'
COLLECTION_JOB_MONTHLY = 'job_monthly'
COLLECTION_JOB_YEARLY = 'job_yearly'
QUEUE_UOW_REPORT = 'q_uow_report'
|
# Copyright 2022 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
from django.contrib.admin.apps import AdminConfig
class InterviewAdminConfig(AdminConfig):
default_site = 'interview_db.admin.SAMLAdminSite'
|
/* global */
import {DropDownMenu} from "../DropDown.js";
import {Utils} from "../Utils.js";
export class Issues {
static removeCategory (pPanel, pCatName) {
const rows = pPanel.table.tBodies[0].childNodes;
for (const tr of rows) {
if (tr.myCatName === pCatName) {
tr.parentNode.removeChild(tr);
}
}
}
static removeIssue (pPanel, pCatName, pIssueName) {
const rows = pPanel.table.tBodies[0].childNodes;
for (const tr of rows) {
if (tr.myCatName === pCatName && tr.myIssueName === pIssueName) {
tr.parentNode.removeChild(tr);
}
}
}
static readyCategory (pPanel, pMsg) {
// remove the "loading info..." message
pPanel.msg2.removeChild(pMsg);
// any category still loading?
if (pPanel.msg2.childNodes.length > 0) {
// not yet
return;
}
const txt = Utils.txtZeroOneMany(
pPanel.table.tBodies[0].children.length,
"No issues", "{0} issue", "{0} issues");
pPanel.setMsg(txt);
pPanel.setTableSortable("Description", "asc");
}
static addIssue (pPanel, pCatName, pIssueName) {
// remove a previous incarnation of the same issue
Issues.removeIssue(pPanel, pCatName, pIssueName);
const theTr = document.createElement("tr");
// const keyTd = document.createElement("td");
// keyTd.innerText = pCatName + "-" + pIssueName;
// keyTd.classList.add("key");
// theTr.appendChild(keyTd);
const menu = new DropDownMenu(theTr, true);
theTr.menu = menu;
const descTd = document.createElement("td");
const descSpan = document.createElement("span");
descSpan.classList.add("desc");
descTd.appendChild(descSpan);
theTr.appendChild(descTd);
theTr.myCatName = pCatName;
theTr.myIssueName = pIssueName;
theTr.panel = pPanel;
pPanel.table.tBodies[0].appendChild(theTr);
return theTr;
}
static addIssueMsg (pTr, pTitle) {
const desc = pTr.querySelector("td .desc");
desc.innerText = pTitle;
}
static addIssueErr (pTr, pErrorMsg) {
const desc = pTr.querySelector("td .desc");
Utils.addToolTip(desc, pErrorMsg);
}
static addIssueCmd (pTr, pTitle, pTarget, pCommand) {
pTr.menu.addMenuItem(pTitle + "...", () => {
pTr.panel.runCommand("", pTarget, pCommand);
});
if (pTr.hasClick !== true) {
pTr.addEventListener("click", (ClickEvent) => {
pTr.panel.runCommand("", pTarget, pCommand);
ClickEvent.stopPropagation();
});
}
pTr.hasClick = true;
}
static addIssueNav (pTr, pPage, pArgs) {
let title;
if (pPage.endsWith("-minion")) {
// when unclear, add "for this minion" to title
title = "Go to " + pPage.replace("-minion", "") + " page";
} else {
title = "Go to " + pPage + " page";
}
pTr.menu.addMenuItem(title, () => {
pTr.panel.router.goTo(pPage, pArgs);
});
if (pTr.hasClick !== true) {
pTr.addEventListener("click", (ClickEvent) => {
pTr.panel.router.goTo(pPage, pArgs);
ClickEvent.stopPropagation();
});
}
pTr.hasClick = true;
}
onGetIssues (pPanel, pTitle) {
this.api = pPanel.api;
const msg = document.createElement("div");
msg.classList.add("msg");
msg.innerText = "(loading info for " + pTitle + ")";
pPanel.msg2.appendChild(msg);
return msg;
}
}
|
# coding: utf-8
"""
This module provides a class for one of the models described by the API.
"""
from __future__ import absolute_import
from digital.forge.data.models.base_model_ import Model
from digital.forge.data import util
class ErrorResponse(Model):
"""NOTE: This class is auto generated by OpenAPI Generator.
(https://openapi-generator.tech)
Do not edit the class manually.
"""
openapi_types = {
'code': int,
'message': str
}
attribute_map = {
'code': 'code',
'message': 'message'
}
def __init__(self, code=None, message=None): # noqa: E501
"""ErrorResponse - a model defined in OpenAPI
:param code: The code of this ErrorResponse. # noqa: E501
:type code: int
:param message: The message of this ErrorResponse. # noqa: E501
:type message: str
"""
if code is None:
raise ValueError('`code` is a required value')
self._code = code
if message is None:
raise ValueError('`message` is a required value')
self._message = message
@classmethod
def from_dict(cls, dikt) -> 'ErrorResponse':
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The ErrorResponse of this ErrorResponse. # noqa: E501
:rtype: ErrorResponse
"""
return util.deserialize_model(dikt, cls)
@property
def code(self):
"""Gets the code of this ErrorResponse.
An error code corresponding to some internal state of the server. # noqa: E501
:return: The code of this ErrorResponse.
:rtype: int
"""
return self._code
@code.setter
def code(self, code):
"""Sets the code of this ErrorResponse.
An error code corresponding to some internal state of the server. # noqa: E501
:param code: The code of this ErrorResponse.
:type code: int
"""
if code is None:
raise ValueError("Invalid value for `code`, must not be `None`")
if code is not None and not isinstance(code, int):
raise ValueError("Invalid value for `code`, must be a `int`")
self._code = code
@property
def message(self):
"""Gets the message of this ErrorResponse.
A human-readable message explaining the error code or what went wrong. # noqa: E501
:return: The message of this ErrorResponse.
:rtype: str
"""
return self._message
@message.setter
def message(self, message):
"""Sets the message of this ErrorResponse.
A human-readable message explaining the error code or what went wrong. # noqa: E501
:param message: The message of this ErrorResponse.
:type message: str
"""
if message is None:
raise ValueError("Invalid value for `message`, must not be `None`")
if message is not None and not isinstance(message, str):
raise ValueError("Invalid value for `message`, must be a `str`")
self._message = message
|
/* eslint-disable */
// this is an auto generated file. This will be overwritten
export const getSpace = /* GraphQL */ `
query GetSpace($id: ID!) {
getSpace(id: $id) {
id
name
description
createdAt
updatedAt
owner
}
}
`;
export const listSpaces = /* GraphQL */ `
query ListSpaces(
$filter: ModelSpaceFilterInput
$limit: Int
$nextToken: String
) {
listSpaces(filter: $filter, limit: $limit, nextToken: $nextToken) {
items {
id
name
description
createdAt
updatedAt
owner
}
nextToken
}
}
`;
export const getLocale = /* GraphQL */ `
query GetLocale($id: ID!) {
getLocale(id: $id) {
id
name
code
spaceId
fallbackCode
default
contentManagementApi
contentDeliveryApi
optional
sys {
type
version
createdAt
updatedAt
}
createdAt
updatedAt
owner
}
}
`;
export const listLocales = /* GraphQL */ `
query ListLocales(
$filter: ModelLocaleFilterInput
$limit: Int
$nextToken: String
) {
listLocales(filter: $filter, limit: $limit, nextToken: $nextToken) {
items {
id
name
code
spaceId
fallbackCode
default
contentManagementApi
contentDeliveryApi
optional
sys {
type
version
createdAt
updatedAt
}
createdAt
updatedAt
owner
}
nextToken
}
}
`;
export const getContentSchema = /* GraphQL */ `
query GetContentSchema($id: ID!) {
getContentSchema(id: $id) {
id
spaceId
name
displayField
description
sys {
id
type
createdAt
updatedAt
publishedVersion
publishedAt
firstPublishedAt
publishedCounter
version
}
fields {
id
name
type
localized
required
validations
disabled
omitted
}
space {
id
name
description
createdAt
updatedAt
owner
}
createdAt
updatedAt
owner
}
}
`;
export const listContentSchemas = /* GraphQL */ `
query ListContentSchemas(
$filter: ModelContentSchemaFilterInput
$limit: Int
$nextToken: String
) {
listContentSchemas(filter: $filter, limit: $limit, nextToken: $nextToken) {
items {
id
spaceId
name
displayField
description
sys {
id
type
createdAt
updatedAt
publishedVersion
publishedAt
firstPublishedAt
publishedCounter
version
}
fields {
id
name
type
localized
required
validations
disabled
omitted
}
space {
id
name
description
createdAt
updatedAt
owner
}
createdAt
updatedAt
owner
}
nextToken
}
}
`;
export const getEntry = /* GraphQL */ `
query GetEntry($id: ID!) {
getEntry(id: $id) {
id
name
spaceId
space {
id
name
description
createdAt
updatedAt
owner
}
contentSchema
sys {
id
type
createdAt
updatedAt
publishedVersion
publishedAt
firstPublishedAt
publishedCounter
version
}
fields
createdAt
updatedAt
owner
}
}
`;
export const listEntrys = /* GraphQL */ `
query ListEntrys(
$filter: ModelEntryFilterInput
$limit: Int
$nextToken: String
) {
listEntrys(filter: $filter, limit: $limit, nextToken: $nextToken) {
items {
id
name
spaceId
space {
id
name
description
createdAt
updatedAt
owner
}
contentSchema
sys {
id
type
createdAt
updatedAt
publishedVersion
publishedAt
firstPublishedAt
publishedCounter
version
}
fields
createdAt
updatedAt
owner
}
nextToken
}
}
`;
export const getAlbum = /* GraphQL */ `
query GetAlbum($id: ID!) {
getAlbum(id: $id) {
id
name
description
photos {
items {
id
albumId
bucket
createdAt
updatedAt
owner
}
nextToken
}
spaceId
space {
id
name
description
createdAt
updatedAt
owner
}
createdAt
updatedAt
owner
}
}
`;
export const listAlbums = /* GraphQL */ `
query ListAlbums(
$filter: ModelAlbumFilterInput
$limit: Int
$nextToken: String
) {
listAlbums(filter: $filter, limit: $limit, nextToken: $nextToken) {
items {
id
name
description
photos {
nextToken
}
spaceId
space {
id
name
description
createdAt
updatedAt
owner
}
createdAt
updatedAt
owner
}
nextToken
}
}
`;
export const getPhoto = /* GraphQL */ `
query GetPhoto($id: ID!) {
getPhoto(id: $id) {
id
albumId
album {
id
name
description
photos {
nextToken
}
spaceId
space {
id
name
description
createdAt
updatedAt
owner
}
createdAt
updatedAt
owner
}
bucket
fullsize {
s3key
width
height
}
thumbnail {
s3key
width
height
}
createdAt
updatedAt
owner
}
}
`;
export const listPhotos = /* GraphQL */ `
query ListPhotos(
$filter: ModelPhotoFilterInput
$limit: Int
$nextToken: String
) {
listPhotos(filter: $filter, limit: $limit, nextToken: $nextToken) {
items {
id
albumId
album {
id
name
description
spaceId
createdAt
updatedAt
owner
}
bucket
fullsize {
s3key
width
height
}
thumbnail {
s3key
width
height
}
createdAt
updatedAt
owner
}
nextToken
}
}
`;
export const listLocalesBySpace = /* GraphQL */ `
query ListLocalesBySpace(
$spaceId: ID
$sortDirection: ModelSortDirection
$filter: ModelLocaleFilterInput
$limit: Int
$nextToken: String
) {
listLocalesBySpace(
spaceId: $spaceId
sortDirection: $sortDirection
filter: $filter
limit: $limit
nextToken: $nextToken
) {
items {
id
name
code
spaceId
fallbackCode
default
contentManagementApi
contentDeliveryApi
optional
sys {
type
version
createdAt
updatedAt
}
createdAt
updatedAt
owner
}
nextToken
}
}
`;
export const listContentSchemaBySpace = /* GraphQL */ `
query ListContentSchemaBySpace(
$spaceId: ID
$sortDirection: ModelSortDirection
$filter: ModelContentSchemaFilterInput
$limit: Int
$nextToken: String
) {
listContentSchemaBySpace(
spaceId: $spaceId
sortDirection: $sortDirection
filter: $filter
limit: $limit
nextToken: $nextToken
) {
items {
id
spaceId
name
displayField
description
sys {
id
type
createdAt
updatedAt
publishedVersion
publishedAt
firstPublishedAt
publishedCounter
version
}
fields {
id
name
type
localized
required
validations
disabled
omitted
}
space {
id
name
description
createdAt
updatedAt
owner
}
createdAt
updatedAt
owner
}
nextToken
}
}
`;
export const listEntriesBySpace = /* GraphQL */ `
query ListEntriesBySpace(
$spaceId: ID
$sortDirection: ModelSortDirection
$filter: ModelEntryFilterInput
$limit: Int
$nextToken: String
) {
listEntriesBySpace(
spaceId: $spaceId
sortDirection: $sortDirection
filter: $filter
limit: $limit
nextToken: $nextToken
) {
items {
id
name
spaceId
space {
id
name
description
createdAt
updatedAt
owner
}
contentSchema
sys {
id
type
createdAt
updatedAt
publishedVersion
publishedAt
firstPublishedAt
publishedCounter
version
}
fields
createdAt
updatedAt
owner
}
nextToken
}
}
`;
export const listEntriesByContentSchema = /* GraphQL */ `
query ListEntriesByContentSchema(
$contentSchema: String
$sortDirection: ModelSortDirection
$filter: ModelEntryFilterInput
$limit: Int
$nextToken: String
) {
listEntriesByContentSchema(
contentSchema: $contentSchema
sortDirection: $sortDirection
filter: $filter
limit: $limit
nextToken: $nextToken
) {
items {
id
name
spaceId
space {
id
name
description
createdAt
updatedAt
owner
}
contentSchema
sys {
id
type
createdAt
updatedAt
publishedVersion
publishedAt
firstPublishedAt
publishedCounter
version
}
fields
createdAt
updatedAt
owner
}
nextToken
}
}
`;
export const listAlbumsBySpace = /* GraphQL */ `
query ListAlbumsBySpace(
$spaceId: ID
$sortDirection: ModelSortDirection
$filter: ModelAlbumFilterInput
$limit: Int
$nextToken: String
) {
listAlbumsBySpace(
spaceId: $spaceId
sortDirection: $sortDirection
filter: $filter
limit: $limit
nextToken: $nextToken
) {
items {
id
name
description
photos {
nextToken
}
spaceId
space {
id
name
description
createdAt
updatedAt
owner
}
createdAt
updatedAt
owner
}
nextToken
}
}
`;
export const listPhotosByAlbum = /* GraphQL */ `
query ListPhotosByAlbum(
$albumId: ID
$sortDirection: ModelSortDirection
$filter: ModelPhotoFilterInput
$limit: Int
$nextToken: String
) {
listPhotosByAlbum(
albumId: $albumId
sortDirection: $sortDirection
filter: $filter
limit: $limit
nextToken: $nextToken
) {
items {
id
albumId
album {
id
name
description
spaceId
createdAt
updatedAt
owner
}
bucket
fullsize {
s3key
width
height
}
thumbnail {
s3key
width
height
}
createdAt
updatedAt
owner
}
nextToken
}
}
`;
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from pip.req import parse_requirements
import re, ast
# get version from __version__ variable in winsun/__init__.py
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('winsun/__init__.py', 'rb') as f:
version = str(ast.literal_eval(_version_re.search(
f.read().decode('utf-8')).group(1)))
requirements = parse_requirements("requirements.txt", session="")
setup(
name='winsun',
version=version,
description='Winsun Custom App for Luma Industrias',
author='Damian Hernandez',
author_email='damher@gmail.com',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
install_requires=[str(ir.req) for ir in requirements],
dependency_links=[str(ir._link) for ir in requirements if ir._link]
)
|
from appinit_backend.lib.imports import *
import smtplib
def call(**kwargs):
emails = kwargs['emails']
s = smtplib.SMTP('smtp.corp.redhat.com')
for e in emails:
s.sendmail(*e)
s.quit()
return True
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django_services import admin
from django.contrib import admin as django_admin
from ..service.maintenance import MaintenanceService
from ..forms import MaintenanceForm
from .. import models
from django.utils.html import format_html
from django.http import HttpResponseRedirect
import logging
from django.contrib import messages
from django.core.urlresolvers import reverse
LOG = logging.getLogger(__name__)
class MaintenanceParametersInline(django_admin.TabularInline):
model = models.MaintenanceParameters
fields = ('parameter_name', 'function_name',)
template = 'admin/physical/shared/inline_form.html'
can_delete = False
def get_readonly_fields(self, request, obj=None):
maintenance = obj
self.max_num = None
if maintenance and maintenance.status != models.Maintenance.REJECTED:
self.change_form_template = ("admin/maintenance/maintenance/"
"custom_change_form.html")
else:
self.change_form_template = None
if maintenance and maintenance.celery_task_id:
self.max_num = 0
return self.fields
return ()
class MaintenanceAdmin(admin.DjangoServicesAdmin):
service_class = MaintenanceService
search_fields = ("scheduled_for", "description", "maximum_workers",
"status")
list_display = ("description", "scheduled_for", "started_at",
"finished_at", "maximum_workers", "affected_hosts_html",
"created_by", "friendly_status")
list_filter = ["scheduled_for", "maximum_workers", "status"]
fields = (
"description", "scheduled_for", "started_at", "finished_at",
"main_script", "rollback_script", "hostsid", "maximum_workers",
"disable_alarms", "status", "celery_task_id", "affected_hosts",
"created_by", "revoked_by"
)
form = MaintenanceForm
actions = None
inlines = [MaintenanceParametersInline, ]
def revoke_maintenance(request, id):
import celery
from system.models import Configuration
celery_inpsect = celery.current_app.control.inspect()
celery_workers = Configuration.get_by_name_as_list('celery_workers',)
try:
workers = celery_inpsect.ping().keys()
except Exception as e:
LOG.warn("All celery workers are down! {} :(".format(e))
messages.add_message(
request, messages.ERROR,
("Maintenance can't be revoked because all celery "
"workers are down!"),
)
return HttpResponseRedirect(request.META.get('HTTP_REFERER'))
if workers and workers != celery_workers:
LOG.warn("At least one celery worker is down! :(")
messages.add_message(
request, messages.ERROR,
("Maintenance can't be revoked because at least one celery "
"worker is down!"),
)
return HttpResponseRedirect(request.META.get('HTTP_REFERER'))
maintenance = models.Maintenance.objects.get(id=id)
if maintenance.status == maintenance.WAITING:
if maintenance.revoke_maintenance(request):
messages.add_message(request, messages.SUCCESS,
"Maintenance revoked!",)
else:
messages.add_message(request, messages.ERROR,
"Maintenance has already started!",)
else:
messages.add_message(request, messages.ERROR,
"Maintenance can't be revoked!",)
return HttpResponseRedirect(
reverse('admin:maintenance_maintenance_changelist')
)
buttons = [
{'url': 'revoke_maintenance',
'textname': 'Revoke Maintenance',
'func': revoke_maintenance,
'confirm': u'Do you really want to revoke this maintenance?',
'id': 'revoke_maintenance'},
]
def change_view(self, request, object_id, form_url='', extra_context={}):
extra_context['buttons'] = self.buttons
return super(MaintenanceAdmin, self).change_view(
request, object_id, form_url, extra_context=extra_context
)
def get_urls(self):
from django.conf.urls import url
urls = super(MaintenanceAdmin, self).get_urls()
my_urls = list(
(url(r'^(.+)/%(url)s/$' % b, self.admin_site.admin_view(
b['func'])) for b in self.buttons)
)
return my_urls + urls
def get_readonly_fields(self, request, obj=None):
maintenance = obj
if maintenance and maintenance.status != models.Maintenance.REJECTED:
self.change_form_template = ("admin/maintenance/maintenance"
"/custom_change_form.html")
else:
self.change_form_template = None
if maintenance and maintenance.celery_task_id:
return self.fields
return (
'status', 'celery_task_id', 'affected_hosts', 'started_at',
'finished_at', 'created_by', 'revoked_by'
)
def friendly_status(self, maintenance):
html_finished = '<span class="label label-info">Finished</span>'
html_rejected = '<span class="label label-important">Rejected</span>'
html_waiting = '<span class="label label-warning">Waiting</span>'
html_running = '<span class="label label-success">Running</span>'
html_revoked = '<span class="label label-primary">Revoked</span>'
if maintenance.status == models.Maintenance.FINISHED:
return format_html(html_finished)
elif maintenance.status == models.Maintenance.REJECTED:
return format_html(html_rejected)
elif maintenance.status == models.Maintenance.WAITING:
return format_html(html_waiting)
elif maintenance.status == models.Maintenance.RUNNING:
return format_html(html_running)
elif maintenance.status == models.Maintenance.REVOKED:
return format_html(html_revoked)
friendly_status.short_description = "Status"
def affected_hosts_html(self, maintenance):
html = []
html.append("<a href='../hostmaintenance/?maintenance__id=%s'>%s</a>" %
(maintenance.id, maintenance.affected_hosts))
return format_html("".join(html))
affected_hosts_html.short_description = "Affected hosts"
def save_model(self, request, obj, form, change):
if not change:
obj.created_by = request.user.username
obj.save()
|
class BinaryTreeNode(object):
def __init__(self,value=None):
self.left = None
self.right = None
self.value = value
self.visited = False
def get_value(self):
return self.value
def set_value(self, value):
self.value = value
def set_left(self, node):
self.left = node
def set_right(self, node):
self.right = node
def get_left(self):
return self.left
def get_right(self):
return self.right
def visit(self):
self.visited = True
print(self.value)
|
#!/usr/bin/env python2
# Copyright (c) 2014 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Test for -rpcbind, as well as -rpcallowip and -rpcconnect
# Add python-bitcoinrpc to module search path:
import os
import sys
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), "python-bitcoinrpc"))
import json
import shutil
import subprocess
import tempfile
import traceback
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
from util import *
from netutil import *
def run_bind_test(tmpdir, allow_cts, connect_to, addresses, expected):
'''
Start a node with requested rpcallowip and rpcbind parameters,
then try to connect, and check if the set of bound addresses
matches the expected set.
'''
expected = [(addr_to_hex(addr), port) for (addr, port) in expected]
base_args = ['-disablewallet', '-nolisten']
if allow_cts:
base_args += ['-rpcallowip=' + x for x in allow_cts]
binds = ['-rpcbind='+addr for addr in addresses]
nodes = start_nodes(1, tmpdir, [base_args + binds], connect_to)
try:
pid = bitcoind_processes[0].pid
assert_equal(set(get_bind_addrs(pid)), set(expected))
finally:
stop_nodes(nodes)
wait_bitcoinds()
def run_allowip_test(tmpdir, allow_cts, rpchost, rpcport):
'''
Start a node with rpcwallow IP, and request getinfo
at a non-localhost IP.
'''
base_args = ['-disablewallet', '-nolisten'] + ['-rpcallowip='+x for x in allow_cts]
nodes = start_nodes(1, tmpdir, [base_args])
try:
# connect to node through non-loopback interface
url = "http://rt:rt@%s:%d" % (rpchost, rpcport,)
node = AuthServiceProxy(url)
node.getinfo()
finally:
node = None # make sure connection will be garbage collected and closed
stop_nodes(nodes)
wait_bitcoinds()
def run_test(tmpdir):
assert(sys.platform == 'linux2') # due to OS-specific network stats queries, this test works only on Linux
# find the first non-loopback interface for testing
non_loopback_ip = None
for name,ip in all_interfaces():
if ip != '127.0.0.1':
non_loopback_ip = ip
break
if non_loopback_ip is None:
assert(not 'This test requires at least one non-loopback IPv4 interface')
print("Using interface %s for testing" % non_loopback_ip)
defaultport = rpc_port(0)
# check default without rpcallowip (IPv4 and IPv6 localhost)
run_bind_test(tmpdir, None, '127.0.0.1', [],
[('127.0.0.1', defaultport), ('::1', defaultport)])
# check default with rpcallowip (IPv6 any)
run_bind_test(tmpdir, ['127.0.0.1'], '127.0.0.1', [],
[('::0', defaultport)])
# check only IPv4 localhost (explicit)
run_bind_test(tmpdir, ['127.0.0.1'], '127.0.0.1', ['127.0.0.1'],
[('127.0.0.1', defaultport)])
# check only IPv4 localhost (explicit) with alternative port
run_bind_test(tmpdir, ['127.0.0.1'], '127.0.0.1:32171', ['127.0.0.1:32171'],
[('127.0.0.1', 32171)])
# check only IPv4 localhost (explicit) with multiple alternative ports on same host
run_bind_test(tmpdir, ['127.0.0.1'], '127.0.0.1:32171', ['127.0.0.1:32171', '127.0.0.1:32172'],
[('127.0.0.1', 32171), ('127.0.0.1', 32172)])
# check only IPv6 localhost (explicit)
run_bind_test(tmpdir, ['[::1]'], '[::1]', ['[::1]'],
[('::1', defaultport)])
# check both IPv4 and IPv6 localhost (explicit)
run_bind_test(tmpdir, ['127.0.0.1'], '127.0.0.1', ['127.0.0.1', '[::1]'],
[('127.0.0.1', defaultport), ('::1', defaultport)])
# check only non-loopback interface
run_bind_test(tmpdir, [non_loopback_ip], non_loopback_ip, [non_loopback_ip],
[(non_loopback_ip, defaultport)])
# Check that with invalid rpcallowip, we are denied
run_allowip_test(tmpdir, [non_loopback_ip], non_loopback_ip, defaultport)
try:
run_allowip_test(tmpdir, ['1.1.1.1'], non_loopback_ip, defaultport)
assert(not 'Connection not denied by rpcallowip as expected')
except ValueError:
pass
def main():
import optparse
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--nocleanup", dest="nocleanup", default=False, action="store_true",
help="Leave bitcoinds and test.* datadir on exit or error")
parser.add_option("--srcdir", dest="srcdir", default="../../src",
help="Source directory containing bitcoind/bitcoin-cli (default: %default%)")
parser.add_option("--tmpdir", dest="tmpdir", default=tempfile.mkdtemp(prefix="test"),
help="Root directory for datadirs")
(options, args) = parser.parse_args()
os.environ['PATH'] = options.srcdir+":"+os.environ['PATH']
check_json_precision()
success = False
nodes = []
try:
print("Initializing test directory "+options.tmpdir)
if not os.path.isdir(options.tmpdir):
os.makedirs(options.tmpdir)
initialize_chain(options.tmpdir)
run_test(options.tmpdir)
success = True
except AssertionError as e:
print("Assertion failed: "+e.message)
except Exception as e:
print("Unexpected exception caught during testing: "+str(e))
traceback.print_tb(sys.exc_info()[2])
if not options.nocleanup:
print("Cleaning up")
wait_bitcoinds()
shutil.rmtree(options.tmpdir)
if success:
print("Tests successful")
sys.exit(0)
else:
print("Failed")
sys.exit(1)
if __name__ == '__main__':
main()
|
'use strict';
var _passiveEvents = require('./passiveEvents');
var _passiveEvents2 = _interopRequireDefault(_passiveEvents);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
// Reliable `window` and `document` detection
var canUseDOM = !!(typeof window !== 'undefined' && window.document && window.document.createElement);
// Check `document` and `window` in case of server-side rendering
/* eslint-disable */
/**
* Detect Element Resize.
* Forked in order to guard against unsafe 'window' and 'document' references.
*
* https://github.com/sdecima/javascript-detect-element-resize
* Sebastian Decima
*
* version: 0.5.3
**/
var _window;
if (canUseDOM) {
_window = window;
} else if (typeof self !== 'undefined') {
_window = self;
} else {
_window = undefined;
}
var attachEvent = typeof document !== 'undefined' && document.attachEvent;
var stylesCreated = false;
if (canUseDOM && !attachEvent) {
var requestFrame = function () {
var raf = _window.requestAnimationFrame || _window.mozRequestAnimationFrame || _window.webkitRequestAnimationFrame || function (fn) {
return _window.setTimeout(fn, 20);
};
return function (fn) {
return raf(fn);
};
}();
var cancelFrame = function () {
var cancel = _window.cancelAnimationFrame || _window.mozCancelAnimationFrame || _window.webkitCancelAnimationFrame || _window.clearTimeout;
return function (id) {
return cancel(id);
};
}();
var resetTriggers = function resetTriggers(element) {
var triggers = element.__resizeTriggers__,
expand = triggers.firstElementChild,
contract = triggers.lastElementChild,
expandChild = expand.firstElementChild;
contract.scrollLeft = contract.scrollWidth;
contract.scrollTop = contract.scrollHeight;
expandChild.style.width = expand.offsetWidth + 1 + 'px';
expandChild.style.height = expand.offsetHeight + 1 + 'px';
expand.scrollLeft = expand.scrollWidth;
expand.scrollTop = expand.scrollHeight;
};
var checkTriggers = function checkTriggers(element) {
return element.offsetWidth != element.__resizeLast__.width || element.offsetHeight != element.__resizeLast__.height;
};
var scrollListener = function scrollListener(e) {
var element = this;
resetTriggers(this);
if (this.__resizeRAF__) cancelFrame(this.__resizeRAF__);
this.__resizeRAF__ = requestFrame(function () {
if (checkTriggers(element)) {
element.__resizeLast__.width = element.offsetWidth;
element.__resizeLast__.height = element.offsetHeight;
element.__resizeListeners__.forEach(function (fn) {
fn.call(element, e);
});
}
});
};
/* Detect CSS Animations support to detect element display/re-attach */
var animation = false,
animationstring = 'animation',
keyframeprefix = '',
animationstartevent = 'animationstart',
domPrefixes = 'Webkit Moz O ms'.split(' '),
startEvents = 'webkitAnimationStart animationstart oAnimationStart MSAnimationStart'.split(' '),
pfx = '';
if (canUseDOM) {
var elm = document.createElement('fakeelement');
if (elm.style.animationName !== undefined) {
animation = true;
}
if (animation === false) {
for (var i = 0; i < domPrefixes.length; i++) {
if (elm.style[domPrefixes[i] + 'AnimationName'] !== undefined) {
pfx = domPrefixes[i];
animationstring = pfx + 'Animation';
keyframeprefix = '-' + pfx.toLowerCase() + '-';
animationstartevent = startEvents[i];
animation = true;
break;
}
}
}
}
var animationName = 'resizeanim';
var animationKeyframes = '@' + keyframeprefix + 'keyframes ' + animationName + ' { from { opacity: 0; } to { opacity: 0; } } ';
var animationStyle = keyframeprefix + 'animation: 1ms ' + animationName + '; ';
}
var createStyles = function createStyles() {
if (!stylesCreated) {
//opacity:0 works around a chrome bug https://code.google.com/p/chromium/issues/detail?id=286360
var css = (animationKeyframes ? animationKeyframes : '') + '.resize-triggers { ' + (animationStyle ? animationStyle : '') + 'visibility: hidden; opacity: 0; } ' + '.resize-triggers, .resize-triggers > div, .contract-trigger:before { content: " "; display: block; position: absolute; top: 0; left: 0; height: 100%; width: 100%; overflow: hidden; } .resize-triggers > div { background: #eee; overflow: auto; } .contract-trigger:before { width: 200%; height: 200%; }',
head = document.head || document.getElementsByTagName('head')[0],
style = document.createElement('style');
style.type = 'text/css';
if (style.styleSheet) {
style.styleSheet.cssText = css;
} else {
style.appendChild(document.createTextNode(css));
}
head.appendChild(style);
stylesCreated = true;
}
};
var addResizeListener = function addResizeListener(element, fn) {
if (element.parentNode === undefined) {
var tempParentDiv = document.createElement('div');
element.parentNode = tempParentDiv;
}
element = element.parentNode;
if (attachEvent) element.attachEvent('onresize', fn);else {
if (!element.__resizeTriggers__) {
if (getComputedStyle(element).position == 'static') element.style.position = 'relative';
createStyles();
element.__resizeLast__ = {};
element.__resizeListeners__ = [];
(element.__resizeTriggers__ = document.createElement('div')).className = 'resize-triggers';
element.__resizeTriggers__.innerHTML = '<div class="expand-trigger"><div></div></div>' + '<div class="contract-trigger"></div>';
element.appendChild(element.__resizeTriggers__);
resetTriggers(element);
(0, _passiveEvents2.default)(element, 'scroll', scrollListener, true);
/* Listen for a css animation to detect element display/re-attach */
animationstartevent && element.__resizeTriggers__.addEventListener(animationstartevent, function (e) {
if (e.animationName == animationName) resetTriggers(element);
});
}
element.__resizeListeners__.push(fn);
}
};
var removeResizeListener = function removeResizeListener(element, fn) {
element = element.parentNode;
if (attachEvent) element.detachEvent('onresize', fn);else {
element.__resizeListeners__.splice(element.__resizeListeners__.indexOf(fn), 1);
if (!element.__resizeListeners__.length) {
element.removeEventListener('scroll', scrollListener);
element.__resizeTriggers__ = !element.removeChild(element.__resizeTriggers__);
}
}
};
module.exports = {
addResizeListener: addResizeListener,
removeResizeListener: removeResizeListener
};
|
import Bree from 'bree';
import Cabin from 'cabin';
import Graceful from '@ladjs/graceful';
import { Signale } from 'signale';
// initialize cabin
const cabin = new Cabin({ axe: { logger: new Signale() } });
const breeScheduler = () => {
const bree = new Bree({
logger: cabin,
jobs: [
{
name: 'scheduledGlucoseCheckins',
interval: 'at 10:00 am also at 2:30 pm also at 7:00 pm',
},
{
name: 'scheduledDiabetesTips',
interval: 'at 8:00 am also at 9:00 pm',
},
],
});
const graceful = new Graceful({ brees: [bree] });
graceful.listen();
bree.start();
};
export default breeScheduler;
|
__author__ = 'gpatarkatsishvili'
from bs4 import BeautifulSoup
import requests
class CONCRETE_JOBS:
_job_headline =[]
_job_info = {}
def __init__(self, url):
self.url = "http://jobs.ge/" + url +"/"
self.jobsGe_request = requests.get(self.url)
self.soup = BeautifulSoup(self.jobsGe_request.text)
def get_concrete(self):
#get headline info of concrete job
self._job_info = self.get__job_headline()
#get description of concrete job
self._job_info['აღწერილობა'] = self.get_job_description()
return self._job_info
def get__job_headline(self):
for headline in self.soup.find_all('td', {'class':"adtitle"}):
self._job_headline.append(headline.get_text())
#convert list as string, split text as key-value and next convert as dictionary
_job_headline_as_string = ','.join(self._job_headline).replace("/", ",")
return dict(x.strip().split(':') for x in _job_headline_as_string.split(','))
def get_job_description(self):
return self.soup.find_all('table', {'class':"ad"})[-1].get_text()
@staticmethod
def get_job_info(id):
concrete_job = CONCRETE_JOBS(id)
return concrete_job.get_concrete()
|
define(
"dojox/grid/enhanced/nls/pt-pt/EnhancedGrid", ({
singleSort: "Ordenação única",
nestedSort: "Ordenação imbricada",
ascending: "Faça clique para ordenar Ascendente",
descending: "Faça clique para ordenar Descendente",
sortingState: "${0} - ${1}",
unsorted: "Não ordenar esta coluna",
indirectSelectionRadio: "Fila ${0}, selecção única, caixa de opção",
indirectSelectionCheckBox: "Fila ${0}, selecção múltipla, caixa de verificação",
selectAll: "Seleccionar tudo"
})
);
|
from decimal import Decimal
_ = lambda x:x
#from i18n import _
from electrum import WalletStorage, Wallet
from electrum.util import format_satoshis, set_verbosity
from electrum.bitcoin import is_valid, COIN, TYPE_ADDRESS
from electrum.network import filter_protocol
import sys, getpass, datetime
# minimal fdisk like gui for console usage
# written by rofl0r, with some bits stolen from the text gui (ncurses)
class ElectrumGui:
def __init__(self, config, daemon, plugins):
self.config = config
self.network = daemon.network
storage = WalletStorage(config.get_wallet_path())
if not storage.file_exists:
print "Wallet not found. try 'electrum-ion create'"
exit()
if storage.is_encrypted():
password = getpass.getpass('Password:', stream=None)
storage.decrypt(password)
self.done = 0
self.last_balance = ""
set_verbosity(False)
self.str_recipient = ""
self.str_description = ""
self.str_amount = ""
self.str_fee = ""
self.wallet = Wallet(storage)
self.wallet.start_threads(self.network)
self.contacts = self.wallet.contacts
self.network.register_callback(self.on_network, ['updated', 'banner'])
self.commands = [_("[h] - displays this help text"), \
_("[i] - display transaction history"), \
_("[o] - enter payment order"), \
_("[p] - print stored payment order"), \
_("[s] - send stored payment order"), \
_("[r] - show own receipt addresses"), \
_("[c] - display contacts"), \
_("[b] - print server banner"), \
_("[q] - quit") ]
self.num_commands = len(self.commands)
def on_network(self, event, *args):
if event == 'updated':
self.updated()
elif event == 'banner':
self.print_banner()
def main_command(self):
self.print_balance()
c = raw_input("enter command: ")
if c == "h" : self.print_commands()
elif c == "i" : self.print_history()
elif c == "o" : self.enter_order()
elif c == "p" : self.print_order()
elif c == "s" : self.send_order()
elif c == "r" : self.print_addresses()
elif c == "c" : self.print_contacts()
elif c == "b" : self.print_banner()
elif c == "n" : self.network_dialog()
elif c == "e" : self.settings_dialog()
elif c == "q" : self.done = 1
else: self.print_commands()
def updated(self):
s = self.get_balance()
if s != self.last_balance:
print(s)
self.last_balance = s
return True
def print_commands(self):
self.print_list(self.commands, "Available commands")
def print_history(self):
width = [20, 40, 14, 14]
delta = (80 - sum(width) - 4)/3
format_str = "%"+"%d"%width[0]+"s"+"%"+"%d"%(width[1]+delta)+"s"+"%" \
+ "%d"%(width[2]+delta)+"s"+"%"+"%d"%(width[3]+delta)+"s"
b = 0
messages = []
for item in self.wallet.get_history():
tx_hash, confirmations, value, timestamp, balance = item
if confirmations:
try:
time_str = datetime.datetime.fromtimestamp(timestamp).isoformat(' ')[:-3]
except Exception:
time_str = "unknown"
else:
time_str = 'unconfirmed'
label = self.wallet.get_label(tx_hash)
messages.append( format_str%( time_str, label, format_satoshis(value, whitespaces=True), format_satoshis(balance, whitespaces=True) ) )
self.print_list(messages[::-1], format_str%( _("Date"), _("Description"), _("Amount"), _("Balance")))
def print_balance(self):
print(self.get_balance())
def get_balance(self):
if self.wallet.network.is_connected():
if not self.wallet.up_to_date:
msg = _( "Synchronizing..." )
else:
c, u, x = self.wallet.get_balance()
msg = _("Balance")+": %f "%(Decimal(c) / COIN)
if u:
msg += " [%f unconfirmed]"%(Decimal(u) / COIN)
if x:
msg += " [%f unmatured]"%(Decimal(x) / COIN)
else:
msg = _( "Not connected" )
return(msg)
def print_contacts(self):
messages = map(lambda x: "%20s %45s "%(x[0], x[1][1]), self.contacts.items())
self.print_list(messages, "%19s %25s "%("Key", "Value"))
def print_addresses(self):
messages = map(lambda addr: "%30s %30s "%(addr, self.wallet.labels.get(addr,"")), self.wallet.get_addresses())
self.print_list(messages, "%19s %25s "%("Address", "Label"))
def print_order(self):
print("send order to " + self.str_recipient + ", amount: " + self.str_amount \
+ "\nfee: " + self.str_fee + ", desc: " + self.str_description)
def enter_order(self):
self.str_recipient = raw_input("Pay to: ")
self.str_description = raw_input("Description : ")
self.str_amount = raw_input("Amount: ")
self.str_fee = raw_input("Fee: ")
def send_order(self):
self.do_send()
def print_banner(self):
for i, x in enumerate( self.wallet.network.banner.split('\n') ):
print( x )
def print_list(self, list, firstline):
self.maxpos = len(list)
if not self.maxpos: return
print(firstline)
for i in range(self.maxpos):
msg = list[i] if i < len(list) else ""
print(msg)
def main(self):
while self.done == 0: self.main_command()
def do_send(self):
if not is_valid(self.str_recipient):
print(_('Invalid ION address'))
return
try:
amount = int(Decimal(self.str_amount) * COIN)
except Exception:
print(_('Invalid Amount'))
return
try:
fee = int(Decimal(self.str_fee) * COIN)
except Exception:
print(_('Invalid Fee'))
return
if self.wallet.use_encryption:
password = self.password_dialog()
if not password:
return
else:
password = None
c = ""
while c != "y":
c = raw_input("ok to send (y/n)?")
if c == "n": return
try:
tx = self.wallet.mktx([(TYPE_ADDRESS, self.str_recipient, amount)], password, self.config, fee)
except Exception as e:
print(str(e))
return
if self.str_description:
self.wallet.labels[tx.hash()] = self.str_description
print(_("Please wait..."))
status, msg = self.network.broadcast(tx)
if status:
print(_('Payment sent.'))
#self.do_clear()
#self.update_contacts_tab()
else:
print(_('Error'))
def network_dialog(self):
print("use 'electrum-ion setconfig server/proxy' to change your network settings")
return True
def settings_dialog(self):
print("use 'electrum-ion setconfig' to change your settings")
return True
def password_dialog(self):
return getpass.getpass()
# XXX unused
def run_receive_tab(self, c):
#if c == 10:
# out = self.run_popup('Address', ["Edit label", "Freeze", "Prioritize"])
return
def run_contacts_tab(self, c):
pass
|
"""HTTP server base class.
Note: the class in this module doesn't implement any HTTP request; see
SimpleHTTPServer for simple implementations of GET, HEAD and POST
(including CGI scripts). It does, however, optionally implement HTTP/1.1
persistent connections, as of version 0.3.
Contents:
- BaseHTTPRequestHandler: HTTP request handler base class
- test: test function
XXX To do:
- log requests even later (to capture byte count)
- log user-agent header and other interesting goodies
- send error log to separate file
"""
# See also:
#
# HTTP Working Group T. Berners-Lee
# INTERNET-DRAFT R. T. Fielding
# <draft-ietf-http-v10-spec-00.txt> H. Frystyk Nielsen
# Expires September 8, 1995 March 8, 1995
#
# URL: http://www.ics.uci.edu/pub/ietf/http/draft-ietf-http-v10-spec-00.txt
#
# and
#
# Network Working Group R. Fielding
# Request for Comments: 2616 et al
# Obsoletes: 2068 June 1999
# Category: Standards Track
#
# URL: http://www.faqs.org/rfcs/rfc2616.html
# Log files
# ---------
#
# Here's a quote from the NCSA httpd docs about log file format.
#
# | The logfile format is as follows. Each line consists of:
# |
# | host rfc931 authuser [DD/Mon/YYYY:hh:mm:ss] "request" ddd bbbb
# |
# | host: Either the DNS name or the IP number of the remote client
# | rfc931: Any information returned by identd for this person,
# | - otherwise.
# | authuser: If user sent a userid for authentication, the user name,
# | - otherwise.
# | DD: Day
# | Mon: Month (calendar name)
# | YYYY: Year
# | hh: hour (24-hour format, the machine's timezone)
# | mm: minutes
# | ss: seconds
# | request: The first line of the HTTP request as sent by the client.
# | ddd: the status code returned by the server, - if not available.
# | bbbb: the total number of bytes sent,
# | *not including the HTTP/1.0 header*, - if not available
# |
# | You can determine the name of the file accessed through request.
#
# (Actually, the latter is only true if you know the server configuration
# at the time the request was made!)
__version__ = "0.3"
__all__ = ["HTTPServer", "BaseHTTPRequestHandler"]
import sys
import time
import socket # For gethostbyaddr()
from warnings import filterwarnings, catch_warnings
with catch_warnings():
if sys.py3kwarning:
filterwarnings("ignore", ".*mimetools has been removed",
DeprecationWarning)
import mimetools
import SocketServer
# Default error message template
DEFAULT_ERROR_MESSAGE = """\
<head>
<title>Error response</title>
</head>
<body>
<h1>Error response</h1>
<p>Error code %(code)d.
<p>Message: %(message)s.
<p>Error code explanation: %(code)s = %(explain)s.
</body>
"""
DEFAULT_ERROR_CONTENT_TYPE = "text/html"
def _quote_html(html):
return html.replace("&", "&").replace("<", "<").replace(">", ">")
class HTTPServer(SocketServer.TCPServer):
allow_reuse_address = 1 # Seems to make sense in testing environment
def server_bind(self):
"""Override server_bind to store the server name."""
SocketServer.TCPServer.server_bind(self)
host, port = self.socket.getsockname()[:2]
self.server_name = socket.getfqdn(host)
self.server_port = port
class BaseHTTPRequestHandler(SocketServer.StreamRequestHandler):
"""HTTP request handler base class.
The following explanation of HTTP serves to guide you through the
code as well as to expose any misunderstandings I may have about
HTTP (so you don't need to read the code to figure out I'm wrong
:-).
HTTP (HyperText Transfer Protocol) is an extensible protocol on
top of a reliable stream transport (e.g. TCP/IP). The protocol
recognizes three parts to a request:
1. One line identifying the request type and path
2. An optional set of RFC-822-style headers
3. An optional data part
The headers and data are separated by a blank line.
The first line of the request has the form
<command> <path> <version>
where <command> is a (case-sensitive) keyword such as GET or POST,
<path> is a string containing path information for the request,
and <version> should be the string "HTTP/1.0" or "HTTP/1.1".
<path> is encoded using the URL encoding scheme (using %xx to signify
the ASCII character with hex code xx).
The specification specifies that lines are separated by CRLF but
for compatibility with the widest range of clients recommends
servers also handle LF. Similarly, whitespace in the request line
is treated sensibly (allowing multiple spaces between components
and allowing trailing whitespace).
Similarly, for output, lines ought to be separated by CRLF pairs
but most clients grok LF characters just fine.
If the first line of the request has the form
<command> <path>
(i.e. <version> is left out) then this is assumed to be an HTTP
0.9 request; this form has no optional headers and data part and
the reply consists of just the data.
The reply form of the HTTP 1.x protocol again has three parts:
1. One line giving the response code
2. An optional set of RFC-822-style headers
3. The data
Again, the headers and data are separated by a blank line.
The response code line has the form
<version> <responsecode> <responsestring>
where <version> is the protocol version ("HTTP/1.0" or "HTTP/1.1"),
<responsecode> is a 3-digit response code indicating success or
failure of the request, and <responsestring> is an optional
human-readable string explaining what the response code means.
This server parses the request and the headers, and then calls a
function specific to the request type (<command>). Specifically,
a request SPAM will be handled by a method do_SPAM(). If no
such method exists the server sends an error response to the
client. If it exists, it is called with no arguments:
do_SPAM()
Note that the request name is case sensitive (i.e. SPAM and spam
are different requests).
The various request details are stored in instance variables:
- client_address is the client IP address in the form (host,
port);
- command, path and version are the broken-down request line;
- headers is an instance of mimetools.Message (or a derived
class) containing the header information;
- rfile is a file object open for reading positioned at the
start of the optional input data part;
- wfile is a file object open for writing.
IT IS IMPORTANT TO ADHERE TO THE PROTOCOL FOR WRITING!
The first thing to be written must be the response line. Then
follow 0 or more header lines, then a blank line, and then the
actual data (if any). The meaning of the header lines depends on
the command executed by the server; in most cases, when data is
returned, there should be at least one header line of the form
Content-type: <type>/<subtype>
where <type> and <subtype> should be registered MIME types,
e.g. "text/html" or "text/plain".
"""
# The Python system version, truncated to its first component.
sys_version = "Python/" + sys.version.split()[0]
# The server software version. You may want to override this.
# The format is multiple whitespace-separated strings,
# where each string is of the form name[/version].
server_version = "BaseHTTP/" + __version__
# The default request version. This only affects responses up until
# the point where the request line is parsed, so it mainly decides what
# the client gets back when sending a malformed request line.
# Most web servers default to HTTP 0.9, i.e. don't send a status line.
default_request_version = "HTTP/0.9"
def parse_request(self):
"""Parse a request (internal).
The request should be stored in self.raw_requestline; the results
are in self.command, self.path, self.request_version and
self.headers.
Return True for success, False for failure; on failure, an
error is sent back.
"""
self.command = None # set in case of error on the first line
self.request_version = version = self.default_request_version
self.close_connection = 1
requestline = self.raw_requestline
requestline = requestline.rstrip('\r\n')
self.requestline = requestline
words = requestline.split()
if len(words) == 3:
command, path, version = words
if version[:5] != 'HTTP/':
self.send_error(400, "Bad request version (%r)" % version)
return False
try:
base_version_number = version.split('/', 1)[1]
version_number = base_version_number.split(".")
# RFC 2145 section 3.1 says there can be only one "." and
# - major and minor numbers MUST be treated as
# separate integers;
# - HTTP/2.4 is a lower version than HTTP/2.13, which in
# turn is lower than HTTP/12.3;
# - Leading zeros MUST be ignored by recipients.
if len(version_number) != 2:
raise ValueError
version_number = int(version_number[0]), int(version_number[1])
except (ValueError, IndexError):
self.send_error(400, "Bad request version (%r)" % version)
return False
if version_number >= (1, 1) and self.protocol_version >= "HTTP/1.1":
self.close_connection = 0
if version_number >= (2, 0):
self.send_error(505,
"Invalid HTTP Version (%s)" % base_version_number)
return False
elif len(words) == 2:
command, path = words
self.close_connection = 1
if command != 'GET':
self.send_error(400,
"Bad HTTP/0.9 request type (%r)" % command)
return False
elif not words:
return False
else:
self.send_error(400, "Bad request syntax (%r)" % requestline)
return False
self.command, self.path, self.request_version = command, path, version
# Examine the headers and look for a Connection directive
self.headers = self.MessageClass(self.rfile, 0)
conntype = self.headers.get('Connection', "")
if conntype.lower() == 'close':
self.close_connection = 1
elif (conntype.lower() == 'keep-alive' and
self.protocol_version >= "HTTP/1.1"):
self.close_connection = 0
return True
def handle_one_request(self):
"""Handle a single HTTP request.
You normally don't need to override this method; see the class
__doc__ string for information on how to handle specific HTTP
commands such as GET and POST.
"""
try:
self.raw_requestline = self.rfile.readline(65537)
if len(self.raw_requestline) > 65536:
self.requestline = ''
self.request_version = ''
self.command = ''
self.send_error(414)
return
if not self.raw_requestline:
self.close_connection = 1
return
if not self.parse_request():
# An error code has been sent, just exit
return
mname = 'do_' + self.command
if not hasattr(self, mname):
self.send_error(501, "Unsupported method (%r)" % self.command)
return
method = getattr(self, mname)
method()
self.wfile.flush() #actually send the response if not already done.
except socket.timeout, e:
#a read or a write timed out. Discard this connection
self.log_error("Request timed out: %r", e)
self.close_connection = 1
return
def handle(self):
"""Handle multiple requests if necessary."""
self.close_connection = 1
self.handle_one_request()
while not self.close_connection:
self.handle_one_request()
def send_error(self, code, message=None):
"""Send and log an error reply.
Arguments are the error code, and a detailed message.
The detailed message defaults to the short entry matching the
response code.
This sends an error response (so it must be called before any
output has been generated), logs the error, and finally sends
a piece of HTML explaining the error to the user.
"""
try:
short, long = self.responses[code]
except KeyError:
short, long = '???', '???'
if message is None:
message = short
explain = long
self.log_error("code %d, message %s", code, message)
# using _quote_html to prevent Cross Site Scripting attacks (see bug #1100201)
content = (self.error_message_format %
{'code': code, 'message': _quote_html(message), 'explain': explain})
self.send_response(code, message)
self.send_header("Content-Type", self.error_content_type)
self.send_header('Connection', 'close')
self.end_headers()
if self.command != 'HEAD' and code >= 200 and code not in (204, 304):
self.wfile.write(content)
error_message_format = DEFAULT_ERROR_MESSAGE
error_content_type = DEFAULT_ERROR_CONTENT_TYPE
def send_response(self, code, message=None):
"""Send the response header and log the response code.
Also send two standard headers with the server software
version and the current date.
"""
self.log_request(code)
if message is None:
if code in self.responses:
message = self.responses[code][0]
else:
message = ''
if self.request_version != 'HTTP/0.9':
self.wfile.write("%s %d %s\r\n" %
(self.protocol_version, code, message))
# print (self.protocol_version, code, message)
self.send_header('Server', self.version_string())
self.send_header('Date', self.date_time_string())
def send_header(self, keyword, value):
"""Send a MIME header."""
if self.request_version != 'HTTP/0.9':
self.wfile.write("%s: %s\r\n" % (keyword, value))
if keyword.lower() == 'connection':
if value.lower() == 'close':
self.close_connection = 1
elif value.lower() == 'keep-alive':
self.close_connection = 0
def end_headers(self):
"""Send the blank line ending the MIME headers."""
if self.request_version != 'HTTP/0.9':
self.wfile.write("\r\n")
def log_request(self, code='-', size='-'):
"""Log an accepted request.
This is called by send_response().
"""
self.log_message('"%s" %s %s',
self.requestline, str(code), str(size))
def log_error(self, format, *args):
"""Log an error.
This is called when a request cannot be fulfilled. By
default it passes the message on to log_message().
Arguments are the same as for log_message().
XXX This should go to the separate error log.
"""
self.log_message(format, *args)
def log_message(self, format, *args):
"""Log an arbitrary message.
This is used by all other logging functions. Override
it if you have specific logging wishes.
The first argument, FORMAT, is a format string for the
message to be logged. If the format string contains
any % escapes requiring parameters, they should be
specified as subsequent arguments (it's just like
printf!).
The client ip address and current date/time are prefixed to every
message.
"""
sys.stderr.write("%s - - [%s] %s\n" %
(self.client_address[0],
self.log_date_time_string(),
format%args))
def version_string(self):
"""Return the server software version string."""
return self.server_version + ' ' + self.sys_version
def date_time_string(self, timestamp=None):
"""Return the current date and time formatted for a message header."""
if timestamp is None:
timestamp = time.time()
year, month, day, hh, mm, ss, wd, y, z = time.gmtime(timestamp)
s = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
self.weekdayname[wd],
day, self.monthname[month], year,
hh, mm, ss)
return s
def log_date_time_string(self):
"""Return the current time formatted for logging."""
now = time.time()
year, month, day, hh, mm, ss, x, y, z = time.localtime(now)
s = "%02d/%3s/%04d %02d:%02d:%02d" % (
day, self.monthname[month], year, hh, mm, ss)
return s
weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
monthname = [None,
'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
def address_string(self):
"""Return the client address formatted for logging.
This version looks up the full hostname using gethostbyaddr(),
and tries to find a name that contains at least one dot.
"""
host, port = self.client_address[:2]
return socket.getfqdn(host)
# Essentially static class variables
# The version of the HTTP protocol we support.
# Set this to HTTP/1.1 to enable automatic keepalive
protocol_version = "HTTP/1.0"
# The Message-like class used to parse headers
MessageClass = mimetools.Message
# Table mapping response codes to messages; entries have the
# form {code: (shortmessage, longmessage)}.
# See RFC 2616.
responses = {
100: ('Continue', 'Request received, please continue'),
101: ('Switching Protocols',
'Switching to new protocol; obey Upgrade header'),
200: ('OK', 'Request fulfilled, document follows'),
201: ('Created', 'Document created, URL follows'),
202: ('Accepted',
'Request accepted, processing continues off-line'),
203: ('Non-Authoritative Information', 'Request fulfilled from cache'),
204: ('No Content', 'Request fulfilled, nothing follows'),
205: ('Reset Content', 'Clear input form for further input.'),
206: ('Partial Content', 'Partial content follows.'),
300: ('Multiple Choices',
'Object has several resources -- see URI list'),
301: ('Moved Permanently', 'Object moved permanently -- see URI list'),
302: ('Found', 'Object moved temporarily -- see URI list'),
303: ('See Other', 'Object moved -- see Method and URL list'),
304: ('Not Modified',
'Document has not changed since given time'),
305: ('Use Proxy',
'You must use proxy specified in Location to access this '
'resource.'),
307: ('Temporary Redirect',
'Object moved temporarily -- see URI list'),
400: ('Bad Request',
'Bad request syntax or unsupported method'),
401: ('Unauthorized',
'No permission -- see authorization schemes'),
402: ('Payment Required',
'No payment -- see charging schemes'),
403: ('Forbidden',
'Request forbidden -- authorization will not help'),
404: ('Not Found', 'Nothing matches the given URI'),
405: ('Method Not Allowed',
'Specified method is invalid for this resource.'),
406: ('Not Acceptable', 'URI not available in preferred format.'),
407: ('Proxy Authentication Required', 'You must authenticate with '
'this proxy before proceeding.'),
408: ('Request Timeout', 'Request timed out; try again later.'),
409: ('Conflict', 'Request conflict.'),
410: ('Gone',
'URI no longer exists and has been permanently removed.'),
411: ('Length Required', 'Client must specify Content-Length.'),
412: ('Precondition Failed', 'Precondition in headers is false.'),
413: ('Request Entity Too Large', 'Entity is too large.'),
414: ('Request-URI Too Long', 'URI is too long.'),
415: ('Unsupported Media Type', 'Entity body in unsupported format.'),
416: ('Requested Range Not Satisfiable',
'Cannot satisfy request range.'),
417: ('Expectation Failed',
'Expect condition could not be satisfied.'),
500: ('Internal Server Error', 'Server got itself in trouble'),
501: ('Not Implemented',
'Server does not support this operation'),
502: ('Bad Gateway', 'Invalid responses from another server/proxy.'),
503: ('Service Unavailable',
'The server cannot process the request due to a high load'),
504: ('Gateway Timeout',
'The gateway server did not receive a timely response'),
505: ('HTTP Version Not Supported', 'Cannot fulfill request.'),
}
def test(HandlerClass = BaseHTTPRequestHandler,
ServerClass = HTTPServer, protocol="HTTP/1.0"):
"""Test the HTTP request handler class.
This runs an HTTP server on port 8000 (or the first command line
argument).
"""
if sys.argv[1:]:
port = int(sys.argv[1])
else:
port = 8000
server_address = ('', port)
HandlerClass.protocol_version = protocol
httpd = ServerClass(server_address, HandlerClass)
sa = httpd.socket.getsockname()
print "Serving HTTP on", sa[0], "port", sa[1], "..."
httpd.serve_forever()
if __name__ == '__main__':
test()
|
const mongoose = require('mongoose');
mongoose.connect('mongodb://localhost/to-do');
const itemSchema = new mongoose.Schema({
title: String,
category: String,
dueDate: String,
notes: String,
});
const Items = mongoose.model('Items', itemSchema);
module.exports = { Items };
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Namespace for ops used in imperative programming."""
from __future__ import absolute_import
from ..ndarray import numpy as _mx_nd_np
__all__ = ["randint", "uniform", "normal"]
def randint(low, high=None, size=None, dtype=None, **kwargs):
"""Return random integers from `low` (inclusive) to `high` (exclusive).
Return random integers from the "discrete uniform" distribution of
the specified dtype in the "half-open" interval [`low`, `high`). If
`high` is None (the default), then results are from [0, `low`).
Parameters
----------
low : int
Lowest (signed) integer to be drawn from the distribution (unless
``high=None``, in which case this parameter is one above the
*highest* such integer).
high : int, optional
If provided, one above the largest (signed) integer to be drawn
from the distribution (see above for behavior if ``high=None``).
size : int or tuple of ints, optional
Output shape. If the given shape is, e.g., ``(m, n, k)``, then
``m * n * k`` samples are drawn. Default is None, in which case a
single value is returned.
dtype : dtype, optional
Desired dtype of the result. All dtypes are determined by their
name, i.e., 'int64', 'int', etc, so byteorder is not available
and a specific precision may have different C types depending
on the platform. The default value is 'np.int'.
ctx : Context, optional
Device context of output. Default is current context.
out : ndarray, optional
The output ndarray (default is `None`).
Returns
-------
out : ndarray of ints
`size`-shaped array of random integers from the appropriate
distribution, or a single such random int if `size` not provided.
Examples
--------
>>> np.random.randint(2, size=10)
array([1, 0, 0, 0, 1, 1, 0, 0, 1, 0])
>>> np.random.randint(1, size=10)
array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
Generate a 2 x 4 array of ints between 0 and 4, inclusive:
>>> np.random.randint(5, size=(2, 4))
array([[4, 0, 2, 1],
[3, 2, 2, 0]])
"""
return _mx_nd_np.random.randint(low, high, size, dtype, **kwargs)
def uniform(low=0.0, high=1.0, size=None, dtype=None, ctx=None, out=None):
"""Draw samples from a uniform distribution.
Samples are uniformly distributed over the half-open interval
``[low, high)`` (includes low, but excludes high). In other words,
any value within the given interval is equally likely to be drawn
by `uniform`.
Parameters
----------
low : float, ndarray, optional
Lower boundary of the output interval. All values generated will be
greater than or equal to low. The default value is 0.
high : float, ndarray, optional
Upper boundary of the output interval. All values generated will be
less than high. The default value is 1.0.
size : int or tuple of ints, optional
Output shape. If the given shape is, e.g., ``(m, n, k)``, then
``m * n * k`` samples are drawn. If size is ``None`` (default),
a scalar tensor containing a single value is returned if
``low`` and ``high`` are both scalars.
dtype : {'float16', 'float32', 'float64'}, optional
Data type of output samples. Default is 'float32'
ctx : Context, optional
Device context of output. Default is current context.
Returns
-------
out : ndarray
Drawn samples from the parameterized uniform distribution.
"""
return _mx_nd_np.random.uniform(low, high, size=size, ctx=ctx, dtype=dtype, out=out)
def normal(loc=0.0, scale=1.0, size=None, **kwargs):
"""Draw random samples from a normal (Gaussian) distribution.
Samples are distributed according to a normal distribution parametrized
by *loc* (mean) and *scale* (standard deviation).
Parameters
----------
loc : float, optional
Mean (centre) of the distribution.
scale : float, optional
Standard deviation (spread or "width") of the distribution.
size : int or tuple of ints, optional
Output shape. If the given shape is, e.g., `(m, n, k)`, then `m * n * k`
samples are drawn. If size is `None` (default), a scalar tensor containing
a single value is returned if loc and scale are both scalars.
dtype : {'float16', 'float32', 'float64'}, optional
Data type of output samples. Default is 'float32'
ctx : Context, optional
Device context of output. Default is current context.
out : ``ndarray``, optional
Store output to an existing ``ndarray``.
Returns
-------
out : ndarray
Drawn samples from the parameterized normal distribution.
Notes
-----
This function currently does not support ``loc`` and ``scale`` as ndarrays.
"""
return _mx_nd_np.random.normal(loc, scale, size, **kwargs)
def multinomial(n, pvals, size=None, **kwargs):
"""multinomial(n, pvals, size=None)
Draw samples from a multinomial distribution.
The multinomial distribution is a multivariate generalisation of the binomial distribution.
Take an experiment with one of ``p`` possible outcomes. An example of such an experiment is throwing a dice,
where the outcome can be 1 through 6. Each sample drawn from the distribution represents n such experiments.
Its values, ``X_i = [X_0, X_1, ..., X_p]``, represent the number of times the outcome was ``i``.
Parameters
----------
n : int
Number of experiments.
pvals : sequence of floats, length p
Probabilities of each of the p different outcomes. These should sum to 1.
size : int or tuple of ints, optional
Output shape. If the given shape is, e.g., ``(m, n, k)``, then ``m * n * k`` samples
are drawn. Default is None, in which case a single value is returned.
Returns
-------
out : ndarray
The drawn samples, of shape size, if that was provided. If not, the shape is ``(N,)``.
In other words, each entry ``out[i,j,...,:]`` is an N-dimensional value drawn from the distribution.
Examples
--------
Throw a dice 1000 times, and 1000 times again:
>>> np.random.multinomial(1000, [1/6.]*6, size=2)
array([[164, 161, 179, 158, 150, 188],
[178, 162, 177, 143, 163, 177]])
A loaded die is more likely to land on number 6:
>>> np.random.multinomial(100, [1/7.]*5 + [2/7.])
array([19, 14, 12, 11, 21, 23])
>>> np.random.multinomial(100, [1.0 / 3, 2.0 / 3])
array([32, 68])
"""
return _mx_nd_np.random.multinomial(n, pvals, size, **kwargs)
|
module.exports = {
name: "respect",
description: "Return A respect GIF!",
execute(message, args, Discord) {
const user = message.mentions.members.first() || message.guild.members.cache.get(args[0]);
if(!user) return message.channel.send('Provide a user to give repect!')
const gifs = [
'https://media.giphy.com/media/U6kTpJ6GIEZ0jdBhNb/giphy.gif',
'https://media.giphy.com/media/l4pT2ASyWWGw4NbDG/giphy.gif',
'https://media.giphy.com/media/WO5Q7FsxJN2pjYc424/giphy.gif',
'https://media.giphy.com/media/JlpjgShzDsrMIyFu5U/giphy.gif',
'https://media.giphy.com/media/1WbNcJYD0ruf8nl3OJ/giphy.gif',
'https://media.giphy.com/media/Qw7r3973Ni5152f0jW/giphy.gif',
];
const result = Math.floor(Math.random() * gifs.length);
return message.channel.send(`${gifs[result]}`).then(() =>
message.channel.send(`${message.author.username} gave respect to ${user.user.username}`))
}
};
|
"""
Copy docs from the napari repo into the napari.github.io repo
and update the table of contents.
By default, will assume that there is a folder named napari.github.io
in the same directory as the napari folder, if not a different copy
destination can be provided.
Read ORGANIZATION.md to learn more about how the documentation sources
are organized, and how everything comes together.
python -m copy-docs [dstdir]
"""
import copy
import os
import os.path as osp
import shutil
import sys
from fnmatch import fnmatch
import yaml
# path to copy and locations to copy to if different
TO_COPY = [
'ORGANIZATION.md',
'glossary.md',
'developers',
'community',
'howtos',
'release',
'roadmaps',
'images',
osp.join('_templates', 'autosummary'),
*[
(dire, osp.join(dire, 'stable'))
for dire in ('api', 'guides', 'plugins')
],
]
# paths to ignore
IGNORE = [
osp.join('images', 'logo.png'),
]
SRC = osp.dirname(__file__)
DOC_EXTS = ['.md', '.rst', '.ipynb']
TOC_IGNORE = [
'api/stable',
'images',
'_templates',
'ORGANIZATION.md',
'glossary.md', # this document will still be at the destination ToC
'guides/stable/_layer_events.md',
'guides/stable/_viewer_events.md',
'plugins/stable/_npe2_contributions.md',
'plugins/stable/_npe2_manifest.md',
'plugins/stable/_npe2_readers_guide.md',
'plugins/stable/_npe2_widgets_guide.md',
'plugins/stable/_npe2_writers_guide.md',
'plugins/stable/_npe2_sample_data_guide.md',
'plugins/stable/_layer_data_guide.md',
]
def exclude_filter(path):
"""Exclude files in the ignore list and duplicated files."""
for ignore in IGNORE:
if fnmatch(path, osp.join(SRC, ignore)): # in ignore list
return True
else:
if osp.isdir(path) or osp.splitext(path)[1] != '.md':
return False
with open(path) as f:
firstline = f.readline()
return firstline.startswith('```{include}') # duplicate file
def copy_path(srcdir, dstdir, path, newpath=None, *, exclude=None):
"""Copy a path from the source directory to the destination directory,
with the given path relative to the directory roots.
Parameters
----------
srcdir : path-like
Source directory root to copy from.
dstdir : path-like
Destination directory root to copy to.
path : path-like
Path relative to the `srcdir` of the path to copy from.
newpath : path-like, optional
Path relative to the `dstdir` of the path to copy to.
If not provided, will default to the value of `path`.
exclude : function(path-like) -> bool, keyword-only, optional
Conditional function on whether to exclude the given path.
Returns
-------
files : list of path-like
Paths of the copied files.
"""
if newpath is None:
newpath = path
src = osp.join(srcdir, path)
dst = osp.join(dstdir, newpath)
if exclude(src): # skip this path
return []
print(f'copying {src} to {dst}')
if osp.isfile(src):
shutil.copyfile(src, dst)
return [newpath]
elif osp.isdir(src):
if osp.exists(dst): # if the destination directory exists, delete it
shutil.rmtree(dst)
os.mkdir(dst)
files = []
for fpath in os.listdir(src): # recursively copy each child path
p = osp.join(path, fpath)
np = osp.join(newpath, fpath)
files += copy_path(srcdir, dstdir, p, np, exclude=exclude)
return files
else:
raise RuntimeError(f'unknown path type {src}')
def copy_paths(src, dst, paths, *, exclude=None):
"""Copy files/directories given a list of their paths from
the source directory to the destination directory.
Parameters
----------
src : path-like
Source directory to copy from.
dst : path-like
Destination directory to copy to.
paths : list of (path-like or 2-tuple of path-like)
Paths of the files/directories to copy relative to the source directory.
Pairs of paths in the list signify that the path to copy to is different
than the path copied from.
exclude : function(path-like) -> bool, keyword-only, optional
Conditional function on whether to exclude the given path.
Returns
-------
files : list of path-like
Paths of the copied files.
"""
files = []
for path in paths:
if isinstance(path, tuple):
files += copy_path(src, dst, path[0], path[1], exclude=exclude)
else:
files += copy_path(src, dst, path, exclude=exclude)
return files
def update_toc(toc, paths, ignore=[]):
"""Update the table of contents according to the paths of all files copied over.
Parameters
----------
toc : JSON
Table of contents according to the JupyterBook specification.
paths : list of path-like
Paths of the files copied over.
ignore : list of path-like
List of directories to ignore when updating the table of contents.
Returns
-------
new_toc : JSON
Updated table of contents.
"""
new_toc = copy.deepcopy(toc)
remaining_paths = []
# remove all paths in ignore list and those with the wrong extension
for path in paths:
base, ext = osp.splitext(path)
for prefix in ignore: # check if path should be ignored
if path.startswith(prefix):
break
else: # not on the ignore list
if ext in DOC_EXTS: # relevant filetype
remaining_paths.append(
base
) # the toc does not include extensions
chapters = new_toc[1]['chapters']
for chapter in chapters:
if (
'file' not in chapter
or (index := chapter['file']) not in remaining_paths
):
continue # skip irrelevant chapters
parent_dir = osp.dirname(index)
remaining_paths.remove(index)
sections = chapter['sections']
files = [section['file'] for section in sections]
# find and remove deleted files from toc
j = 0
for path in files:
if path in remaining_paths:
remaining_paths.remove(path)
j += 1
else:
print(f'deleting {path} from toc')
del sections[j] # delete from toc
new_files = filter(
lambda path: path.startswith(parent_dir), remaining_paths
)
for path in new_files:
print(f'adding {path} to toc')
sections.append({'file': path})
remaining_paths.remove(path)
return new_toc
def main(args):
dst = osp.join(
osp.dirname(osp.dirname(osp.dirname(__file__))), 'napari.github.io'
)
try:
dst = args[1]
except IndexError:
pass
files = copy_paths(SRC, dst, TO_COPY, exclude=exclude_filter)
toc_file = osp.join(dst, '_toc.yml')
with open(toc_file) as f:
toc = yaml.safe_load(f)
if toc is None:
print(f'toc file {toc_file} empty')
return
new_toc = update_toc(toc, files, TOC_IGNORE)
with open(toc_file, 'w') as f:
yaml.dump(new_toc, f)
if __name__ == '__main__':
main(sys.argv)
|
"""
Django settings for mysite project - run locally with this settings
Generated by 'django-admin startproject' using Django 2.1.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
"""
import os
PROJECT_ROOT = os.path.realpath(os.path.dirname(__file__))
SITE_ROOT = os.path.dirname(PROJECT_ROOT)
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/dev/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '=_u)#r(9b*t6p(v29al6p2d=pg2+hf41_)=r+j^@eh!(nuciqk'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
# Allow all host hosts/domain names for this site
ALLOWED_HOSTS = ['*']
# Parse database configuration from $DATABASE_URL
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Application definition
INSTALLED_APPS = [
'adapt.apps.NLPConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'mathfilters'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgilocal.application'
# Database
# https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'PORT' : '5432',
'HOST' : 'localhost',
'NAME' : 'django_development',
'USER' : 'newuser',
'PASSWORD' : 'newuser'
}
}
# Password validation
# https://docs.djangoproject.com/en/dev/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
},
'stream_to_console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler'
},
'file': {
'level': 'INFO',
'class': 'logging.FileHandler',
'filename': 'adapt.log',
},
},
'loggers': {
'django.request': {
'handlers': ['mail_admins', 'stream_to_console'],
'level': 'ERROR',
'propagate': True,
},
'nlp_logger': {
'handlers': ['file'],
'level': 'INFO',
'propagate': True,
},
}
}
# Internationalization
# https://docs.djangoproject.com/en/dev/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/dev/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATIC_URL = '/static/'
SLACKTOKEN= ""
GOOGLE_SERVICE_JSON="service.json"
|
const express = require('express');
const router = express.Router();
const User = require('../../models/user');
const { param, query, oneOf } = require('express-validator');
const { errorMessages, errorFormatter, hasErrorIn} = require('../../misc/utils');
const mapUser = (user) => {
return {
id: user.id,
firstName: user.firstName,
lastName: user.lastName,
email: user.email
};
}
router.get('/all', (req, res, next) => {
User.find()
.then((users) => {
const mappedUsers = users.map(user => mapUser(user));
return res
.status(200)
.json({
length: users.length,
data: mappedUsers
})
})
.catch(error => {
next(error);
});
});
router.get('', [
oneOf([
query('search')
.exists()
//.withMessage(errorMessages.get('shouldExist'))
.notEmpty()
//.withMessage(errorMessages.get('shouldNotEmpty'))
.trim()
.escape(),
query('page')
.exists()
// .withMessage(errorMessages.get('shouldExist'))
.toInt(),
],
'Search or page is required.'
),
query('count').toInt()],
async (req, res, next) => {
const [errorResult, errors] = hasErrorIn(req);
if (errorResult === true) {
return res
.status(400)
.json({
data: {
message: errors
}
});
}
let query = {};
if (req.query.search) {
// && req.query.search !== ''
// handle search
const searchTerm = req.query.search;
query = {$or:[{firstName:{$regex: searchTerm, $options: 'i'}},{lastName:{$regex: searchTerm, $options: 'i'}}]};
// query = {email:{$regex: searchTerm, $options: 'i'}};
}
const page = req.query.page || 1;
const perPage = req.query.count || 10;
const from = (page - 1) * perPage;
User.paginate(query, {page, limit: perPage})
.then(result => {
const users = result.docs.map(doc => mapUser(doc));
res
.status(200)
.json({
data: users,
meta: {
total: result.total,
numItems: users.length, // can be smaller than perPage, if we are on the lastPage
from: from + 1,
to: from + users.length,
perPage,
currentPage: page,
lastPage: result.pages
}
});
})
.catch(error => {
res
.status(404)
.json({
data: {
message: error
}
});
});
});
router.put('/:id', (req, res, next) => {
const id = req.params.id.toString();
const body = req.body.data;
User.findByIdAndUpdate(id, body)
.then(user => {
res.status(200).json({
data: {
id,
message: 'User successfully modified!'
}
});
})
.catch(error => {
res.status(404).json({
data: {
id,
message: 'User was not found.'
}
});
});
});
router.get('/:id', async (req, res, next) => {
const id = req.params.id.toString();
const user = await User.findById(id);
if (user) {
return res.status(200).json({
data: mapUser(user)
});
}
return res.status(404).json({
data: {
id: id,
message: 'User was not found.',
}
});
});
router.delete('/:id', [ param('id').exists().notEmpty().trim().escape() ], async (req, res, next) => {
const id = req.params.id;
const user = await User.findByIdAndDelete(id);
if (user) {
return res.status(200).json({
data: {
message: `User (${user.email}) is deleted.`
}
});
}
return res.status(404).json({
data: {
message: `User with ${user.email} was not found.`,
}
});
});
module.exports = router;
|
'use strict';
module.exports = gen => {
gen('if (obj && obj.$class) { obj = obj.$; }');
gen('if (obj == null) { obj = false; }');
gen('encoder.writeBool(obj);');
};
|
// ==========================================================================
// SC Unit Test
// ==========================================================================
/*globals SC */
var statechart = null;
var monitor, root, stateA, stateB, stateC, stateD, stateE, stateF;
module("SC.Statechart: With Concurrent States - Goto State Basic Tests", {
setup: function() {
statechart = SC.Statechart.create({
monitorIsActive: YES,
rootState: SC.State.design({
substatesAreConcurrent: YES,
a: SC.State.design({
initialSubstate: 'c',
c: SC.State.design(),
d: SC.State.design()
}),
b: SC.State.design({
initialSubstate: 'e',
e: SC.State.design(),
f: SC.State.design()
})
})
});
statechart.initStatechart();
monitor = statechart.get('monitor');
root = statechart.get('rootState');
stateA = statechart.getState('a');
stateB = statechart.getState('b');
stateC = statechart.getState('c');
stateD = statechart.getState('d');
stateE = statechart.getState('e');
stateF = statechart.getState('f');
},
teardown: function() {
statechart.destroy();
statechart = monitor = root = stateA = stateB = stateC = stateD = stateE = stateF = null;
}
});
test("check statechart initialization", function() {
equals(monitor.get('length'), 5, 'initial state sequence should be of length 5');
equals(monitor.matchSequence().begin()
.entered(root)
.beginConcurrent()
.beginSequence()
.entered('a', 'c')
.endSequence()
.beginSequence()
.entered('b', 'e')
.endSequence()
.endConcurrent()
.end(),
true, 'initial sequence should be entered[ROOT, a, c, b, e]');
equals(monitor.matchSequence().begin()
.entered(root)
.beginConcurrent()
.entered('a', 'b')
.endConcurrent()
.beginConcurrent()
.entered('c', 'e')
.endConcurrent()
.end(),
false, 'initial sequence should not be entered[ROOT, a, b, c, e]');
equals(statechart.get('currentStateCount'), 2, 'current state count should be 2');
equals(statechart.stateIsCurrentState('c'), true, 'current state should be c');
equals(statechart.stateIsCurrentState('e'), true, 'current state should be e');
equals(statechart.stateIsCurrentState('d'), false, 'current state should not be d');
equals(statechart.stateIsCurrentState('f'), false, 'current state should not be f');
equals(stateA.stateIsCurrentSubstate('c'), true, 'state a\'s current substate should be state c');
equals(stateA.stateIsCurrentSubstate('d'), false, 'state a\'s current substate should not be state d');
equals(stateB.stateIsCurrentSubstate('e'), true, 'state a\'s current substate should be state e');
equals(stateB.stateIsCurrentSubstate('f'), false, 'state a\'s current substate should not be state f');
equals(stateA.get('isCurrentState'), false, 'state a should not be current state');
equals(stateB.get('isCurrentState'), false, 'state b should not be current state');
equals(stateC.get('isCurrentState'), true, 'state c should be current state');
equals(stateD.get('isCurrentState'), false, 'state d should not be current state');
equals(stateE.get('isCurrentState'), true, 'state e should be current state');
equals(stateF.get('isCurrentState'), false, 'state f should not be current state');
ok(monitor.matchEnteredStates(root, 'a', 'c', 'b', 'e'), 'states root, A, C, B and E should all be entered');
});
test("from state c, go to state d, and from state e, go to state f", function() {
monitor.reset();
stateC.gotoState('d');
equals(monitor.get('length'), 2, 'state sequence should be of length 2');
equals(monitor.matchSequence().begin().exited('c').entered('d').end(), true, 'sequence should be exited[c], enterd[d]');
monitor.reset();
stateE.gotoState('f');
equals(monitor.get('length'), 2, 'state sequence should be of length 2');
equals(monitor.matchSequence().begin().exited('e').entered('f').end(), true, 'sequence should be exited[e], enterd[f]');
equals(statechart.get('currentStateCount'), 2, 'current state count should be 2');
equals(statechart.stateIsCurrentState('d'), true, 'current state should be d');
equals(statechart.stateIsCurrentState('f'), true, 'current state should be f');
equals(stateA.stateIsCurrentSubstate('c'), false, 'state a\'s current substate should not be state c');
equals(stateA.stateIsCurrentSubstate('d'), true, 'state a\'s current substate should be state d');
equals(stateB.stateIsCurrentSubstate('e'), false, 'state b\'s current substate should not be state e');
equals(stateB.stateIsCurrentSubstate('f'), true, 'state b\'s current substate should be state f');
equals(stateA.get('isCurrentState'), false, 'state a should not be current state');
equals(stateB.get('isCurrentState'), false, 'state b should not be current state');
equals(stateC.get('isCurrentState'), false, 'state c should not be current state');
equals(stateD.get('isCurrentState'), true, 'state d should be current state');
equals(stateE.get('isCurrentState'), false, 'state e should not be current state');
equals(stateF.get('isCurrentState'), true, 'state f should be current state');
ok(monitor.matchEnteredStates(root, 'a', 'd', 'b', 'f'), 'states root, A, D, B and F should all be entered');
});
test("from state a, go to sibling concurrent state b", function() {
monitor.reset();
// Expect to get an error to be outputted in the JS console, which is what we want since
// the pivot state is the root state and it's substates are concurrent
console.log('expecting to get an error...');
stateA.gotoState('b');
equals(monitor.get('length'), 0, 'state sequence should be of length 0');
equals(statechart.get('currentStateCount'), 2, 'current state count should be 2');
equals(statechart.stateIsCurrentState('c'), true, 'current state should be c');
equals(statechart.stateIsCurrentState('e'), true, 'current state should be e');
equals(stateA.stateIsCurrentSubstate('c'), true, 'state a\'s current substate should be state c');
equals(stateA.stateIsCurrentSubstate('d'), false, 'state a\'s current substate should not be state d');
equals(stateB.stateIsCurrentSubstate('e'), true, 'state a\'s current substate should be state e');
equals(stateB.stateIsCurrentSubstate('f'), false, 'state a\'s current substate should not be state f');
ok(monitor.matchEnteredStates(root, 'a', 'c', 'b', 'e'), 'states root, A, C, B and E should all be entered');
});
|
function CalendarCarousel(pannableElement)
{
this.pannable = pannableElement;
setupSpring();
setupPan();
function getDeviceWidth()
{
return Math.max(document.documentElement["clientWidth"],
document.body["scrollWidth"],
document.documentElement["scrollWidth"],
document.body["offsetWidth"],
document.documentElement["offsetWidth"]);
}
function setupSpring()
{
this.viewportWidth = this.getDeviceWidth();
this.changePage = null;
var springSystem = new rebound.SpringSystem();
this.spring = springSystem.createSpring(100, 15);
this.spring.setEndValue(0);
this.spring.addListener(
{
onSpringUpdate: function(spring)
{
var val = spring.getCurrentValue();
val = rebound.MathUtil.mapValueInRange(val, 0, 1, 0, this.viewportWidth);
this.slideCalendar(val);
}.bind(this),
onSpringAtRest: function()
{
switch (this.changePage)
{
case "previous":
var previousMonthYear = previousMonthYearNumber(this.props.year, this.props.month);
var previousMonth = previousMonthNumber(this.props.month);
this.props.switchPage(previousMonthYear, previousMonth);
break;
case "next":
var nextMonthYear = nextMonthYearNumber(this.props.year, this.props.month);
var nextMonth = nextMonthNumber(this.props.month);
this.props.switchPage(nextMonthYear, nextMonth);
break;
default:
break;
}
}.bind(this)
});
}
function slideCalendar(val)
{
this.pannable.style.left = val + "px";
}
function removeSpring()
{
this.spring.destroy();
}
function setupPan()
{
this.touchControl = new Hammer.Manager(this.pannable);
var panOptions = {
event: 'pan',
pointers: 0,
threshold: 0,
direction: Hammer.DIRECTION_HORIZONTAL
}
this.touchControl.add(new Hammer.Pan(panOptions));
this.touchControl.get('pan').set({ enable: true });
this.touchControl.on("panleft panright", this.pan);
this.touchControl.on("panend pancancel", this.panSpring)
}
function removePan()
{
this.touchControl.off('pan', this.pan);
}
function pan(event)
{
var panDistance = event.deltaX / this.viewportWidth;
this.spring.setCurrentValue(panDistance).setAtRest();
this.panDistance = this.pannable.style.left.slice(0, -2);
event.srcEvent.preventDefault();
}
function panSpring(event)
{
if (Math.abs(this.panDistance) / this.viewportWidth < 0.5)
{
this.spring.setEndValue(0);
return false;
}
else
{
if (this.panDistance > 0)
{
this.spring.setEndValue(1);
this.changePage = "previous";
return false;
}
else
{
this.spring.setEndValue(-1);
this.changePage = "next";
return false;
}
}
}
}
export default CalendarCarousel;
|
module.exports = require('../images/2/1f1fa.png');
|
'''
Created on Jun 16, 2019
@author: Winterberger
'''
from datetime import datetime
from hashlib import sha256
class Block:
def __init__(self, transactions, previous_hash, nonce = 0):
self.timestamp = datetime.now()
self.transactions = transactions
self.previous_hash = previous_hash
self.nonce = nonce
self.hash = self.generate_hash()
def print_block(self):
# prints block contents
print("timestamp:", self.timestamp)
print("transactions:", self.transactions)
print("current hash:", self.generate_hash())
def generate_hash(self):
# hash the blocks contents
# combine contents to a string
block_contents = str(self.timestamp) + str(self.transactions) + str(self.nonce) + str(self.previous_hash)
# hash the contents
block_hash = (sha256(block_contents.encode()))
return block_hash.hexdigest()
|
# Copyright (c) 2018, Xilinx, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION). HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
__author__ = "Peter Ogden"
__copyright__ = "Copyright 2018, Xilinx"
__email__ = "pynq_support@xilinx.com"
import cffi
import os
import pynq
import warnings
from wurlitzer import sys_pipes
_THIS_DIR = os.path.dirname(__file__)
with open(os.path.join(_THIS_DIR, 'xrfdc_functions.c'), 'r') as f:
header_text = f.read()
_ffi = cffi.FFI()
_ffi.cdef(header_text)
_lib = _ffi.dlopen(os.path.join(_THIS_DIR, 'libxrfdc.so'))
# Next stage is a simple wrapper function which checks the existance of the
# function in the library and the return code and throws an exception if either
# fails.
def _safe_wrapper(name, *args, **kwargs):
with sys_pipes():
if not hasattr(_lib, name):
raise RuntimeError(f"Function {name} not in library")
if getattr(_lib, name)(*args, **kwargs):
raise RuntimeError(f"Function {name} call failed")
# To reduce the amount of typing we define the properties we want for each
# class in the hierarchy. Each element of the array is a tuple consisting of
# the property name, the type of the property and whether or not it is
# read-only. These should match the specification of the C API but without the
# `XRFdc_` prefix in the case of the function name.
_block_props = [("BlockStatus", "XRFdc_BlockStatus", True),
("MixerSettings", "XRFdc_Mixer_Settings", False),
("QMCSettings", "XRFdc_QMC_Settings", False),
("CoarseDelaySettings", "XRFdc_CoarseDelay_Settings", False),
("NyquistZone", "u32", False),
("FabRdVldWords", "u32", True),
("FabWrVldWords", "u32", True)]
_adc_props = [("DecimationFactor", "u32", False),
("ThresholdClearMode", "u32", False),
("ThresholdSettings", "XRFdc_Threshold_Settings", False),
("CalibrationMode", "u8", False)]
_dac_props = [("InterpolationFactor", "u32", False),
("DecoderMode", "u32", False),
("OutputCurr", "int", True),
("InvSincFIR", "u16", False)]
_tile_props = [("FabClkOutDiv", "u16", False),
("FIFOStatus", "u8", True),
("ClockSource", "u32", True),
("PLLLockStatus", "u32", True)]
_rfdc_props = [("IPStatus", "XRFdc_IPStatus", True)]
# Next we define some helper functions for creating properties and
# packing/unpacking Python types into C structures
class PropertyDict(dict):
"""Subclass of dict to support update callbacks to C driver"""
def __init__(self, *args, **kwargs):
self.callback = lambda _:0
self.update(*args, **kwargs)
def set_callback(self, callback):
"""Set the callback function triggered on __setitem__"""
self.callback = callback
def __setitem__(self, key, val):
dict.__setitem__(self, key, val)
self.callback(self)
def _pack_value(typename, value):
if isinstance(value, dict):
c_value = _ffi.new(f"{typename}*")
for k, v in value.items():
setattr(c_value, k, v)
value = c_value
return value
def _unpack_value(typename, value):
if dir(value):
return PropertyDict({k: getattr(value, k) for k in dir(value)}) # Struct
else:
return value[0] # Scalar
# The underlying C functions for generic behaviour (applies to both DAC
# and ADC blocks) expect an argument for the type of block used.
# Other functions leave the type of block implicit. We handle this distinction
# by bubbling up through either `_call_function` or `_call_function_implicit`
# calls.
def _create_c_property(name, typename, readonly, implicit_type=False):
def _get(self):
value = _ffi.new(f"{typename}*")
c_func = self._call_function if not implicit_type else \
self._call_function_implicit
c_func(f"Get{name}", value)
value = _unpack_value(typename, value)
if isinstance(value, PropertyDict):
value.set_callback(lambda value: c_func(
f"Set{name}", _pack_value(typename, value)))
return value
def _set(self, value):
if not implicit_type:
self._call_function(f"Set{name}", _pack_value(typename, value))
else:
self._call_function_implicit(f"Set{name}", _pack_value(typename, value))
if readonly:
return property(_get)
else:
return property(_get, _set)
# Finally we can define the object hierarchy. Each element of the object
# hierarchy has a `_call_function` method which handles adding the
# block/tile/toplevel arguments to the list of function parameters.
class RFdcBlock:
def __init__(self, parent, index):
self._parent = parent
self._index = index
def _call_function(self, name, *args):
return self._parent._call_function(name, self._index, *args)
def ResetNCOPhase(self):
self._call_function("ResetNCOPhase")
def UpdateEvent(self, Event):
self._call_function("UpdateEvent", Event)
class RFdcDacBlock(RFdcBlock):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def _call_function_implicit(self, name, *args):
return self._parent._call_function_implicit(name, self._index, *args)
class RFdcAdcBlock(RFdcBlock):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def _call_function_implicit(self, name, *args):
return self._parent._call_function_implicit(name, self._index, *args)
def ThresholdStickyClear(self, ThresholdToUpdate):
self._call_function_implicit("ThresholdStickyClear", ThresholdToUpdate)
class RFdcTile:
def __init__(self, parent, index):
self._index = index
self._parent = parent
def _call_function(self, name, *args):
return self._parent._call_function(name, self._type, self._index, *args)
def _call_function_implicit(self, name, *args):
return self._parent._call_function(name, self._index, *args)
def StartUp(self):
self._call_function("StartUp")
def ShutDown(self):
self._call_function("Shutdown")
def Reset(self):
self._call_function("Reset")
def SetupFIFO(self, Enable):
self._call_function("SetupFIFO", Enable)
def DumpRegs(self):
self._call_function("DumpRegs")
def DynamicPLLConfig(self, source, ref_clk_freq, samp_rate):
self._call_function("DynamicPLLConfig", source, ref_clk_freq, samp_rate)
class RFdcDacTile(RFdcTile):
def __init__(self, *args):
super().__init__(*args)
self._type = _lib.XRFDC_DAC_TILE
self.blocks = [RFdcDacBlock(self, i) for i in range(4)]
class RFdcAdcTile(RFdcTile):
def __init__(self, *args):
super().__init__(*args)
self._type = _lib.XRFDC_ADC_TILE
self.blocks = [RFdcAdcBlock(self, i) for i in range(4)]
class RFdc(pynq.DefaultIP):
bindto = ["xilinx.com:ip:usp_rf_data_converter:2.1"]
def __init__(self, description):
super().__init__(description)
if 'parameters' in description:
from .config import populate_config
self._config = _ffi.new('XRFdc_Config*')
populate_config(self._config, description['parameters'])
pass
else:
warnings.warn("Please use an hwh file with the RFSoC driver"
" - the default configuration is being used")
self._config = _lib.XRFdc_LookupConfig(0)
self._instance = _ffi.new("XRFdc*")
self._config.BaseAddr = self.mmio.array.ctypes.data
_lib.XRFdc_CfgInitialize(self._instance, self._config)
self.adc_tiles = [RFdcAdcTile(self, i) for i in range(4)]
self.dac_tiles = [RFdcDacTile(self, i) for i in range(4)]
def _call_function(self, name, *args):
_safe_wrapper(f"XRFdc_{name}", self._instance, *args)
# Finally we can add our data-driven properties to each class in the hierarchy
for (name, typename, readonly) in _block_props:
setattr(RFdcBlock, name, _create_c_property(name, typename, readonly))
for (name, typename, readonly) in _adc_props:
setattr(RFdcAdcBlock, name, _create_c_property(name, typename, readonly, implicit_type=True))
for (name, typename, readonly) in _dac_props:
setattr(RFdcDacBlock, name, _create_c_property(name, typename, readonly, implicit_type=True))
for (name, typename, readonly) in _tile_props:
setattr(RFdcTile, name, _create_c_property(name, typename, readonly))
for (name, typename, readonly) in _rfdc_props:
setattr(RFdc, name, _create_c_property(name, typename, readonly))
# Some of our more important #define constants
CLK_SRC_PLL = 0x1
CLK_SRC_EXT = 0x2
EVNT_SRC_IMMEDIATE = 0x00000000
EVNT_SRC_SLICE = 0x00000001
EVNT_SRC_TILE = 0x00000002
EVNT_SRC_SYSREF = 0x00000003
EVNT_SRC_MARKER = 0x00000004
EVNT_SRC_PL = 0x00000005
EVENT_MIXER = 0x00000001
EVENT_CRSE_DLY = 0x00000002
EVENT_QMC = 0x00000004
MIXER_MODE_OFF = 0x0
MIXER_MODE_C2C = 0x1
MIXER_MODE_C2R = 0x2
MIXER_MODE_R2C = 0x3
MIXER_MODE_R2R = 0x4
MIXER_TYPE_COARSE = 0x1
MIXER_TYPE_FINE = 0x2
MIXER_TYPE_OFF = 0x3
COARSE_MIX_OFF = 0x0
COARSE_MIX_SAMPLE_FREQ_BY_TWO = 0x2
COARSE_MIX_SAMPLE_FREQ_BY_FOUR = 0x4
COARSE_MIX_MIN_SAMPLE_FREQ_BY_FOUR = 0x8
COARSE_MIX_BYPASS = 0x10
MIXER_SCALE_AUTO = 0x0
MIXER_SCALE_1P0 = 0x1
MIXER_SCALE_0P7 = 0x2
FAB_CLK_DIV1 = 0x1
FAB_CLK_DIV2 = 0x2
FAB_CLK_DIV4 = 0x3
FAB_CLK_DIV8 = 0x4
FAB_CLK_DIV16 = 0x5
|
# Data Parallel Control (dpctl)
#
# Copyright 2020-2021 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import timeit
import numpy as np
import syclbuffer as sb
import dpctl
X = np.full((10**4, 4098), 1e-4, dtype="d")
# warm-up
print("=" * 10 + " Executing warm-up " + "=" * 10)
print("NumPy result: ", X.sum(axis=0))
q = dpctl.SyclQueue("opencl:cpu")
print(
"SYCL({}) result: {}".format(
q.sycl_device.name,
sb.columnwise_total(X, queue=q),
)
)
q = dpctl.SyclQueue("opencl:gpu")
print(
"SYCL({}) result: {}".format(
q.sycl_device.name,
sb.columnwise_total(X, queue=q),
)
)
print("Times for 'opencl:cpu'")
print(
timeit.repeat(
stmt="sb.columnwise_total(X, queue=q)",
setup='q = dpctl.SyclQueue("opencl:cpu"); '
"sb.columnwise_total(X, queue=q)", # do not count JIT compilation
number=100,
globals=globals(),
)
)
print("Times for 'opencl:gpu'")
print(
timeit.repeat(
stmt="sb.columnwise_total(X, queue=q)",
setup='q = dpctl.SyclQueue("opencl:gpu"); '
"sb.columnwise_total(X, queue=q)",
number=100,
globals=globals(),
)
)
print("Times for NumPy")
print(timeit.repeat(stmt="X.sum(axis=0)", number=100, globals=globals()))
|
from django import forms
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
from newsfeed.models import RssUrl
class SignUpForm(UserCreationForm):
email = forms.CharField(
max_length=254,
required=True,
widget=forms.EmailInput()
)
class Meta:
model = User
fields = (
'username',
'email',
'password1',
'password2'
)
class FeedSubscriptionsForm(forms.Form):
subscription = forms.ModelMultipleChoiceField(
queryset=RssUrl.objects.all(),
widget=forms.CheckboxSelectMultiple
)
class BookmarkForm(forms.Form):
bookmark = forms.BooleanField()
bookmarked_obj = forms.CharField(widget=forms.HiddenInput)
|
import RPi.GPIO as GPIO
def action():
print('Callback from pi')
socket.emit(-action1)
def init():
GPIO.setmode(GPIO.BCM)
GPIO.setup(4, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
GPIO.add_event_detect(4, GPIO.BOTH)
init()
GPIO.add_event_callback(4, action)
GPIO.add_event_callback(5, action2)
|
/**
* Created by marija on 22.02.18.
*/
var p1 = Promise.resolve(18);
var p2 = Promise.reject(17);
var callbackArguments = [];
var base_0 = p1
var r_0= undefined
try {
r_0 = base_0.catch()
}
catch(e) {
r_0= e.message
}
function serialize(array){
return array.map(function(a){
if (a === null || a == undefined) return a;
var name = a.constructor.name;
if (name==='Object' || name=='Boolean'|| name=='Array'||name=='Number'||name=='String')
return JSON.stringify(a);
return name;
});
}
setTimeout(function(){
require("fs").writeFileSync("./experiments/promises/promiseRandom/test623.json",JSON.stringify({"baseObjects":serialize([base_0]),"returnObjects":serialize([r_0]),"callbackArgs":callbackArguments}))
},300)
|
import urllib
import logging
from decorator import decorator
import jsonlib
from pylons.decorators import jsonify
import herder.model
from herder.lib.base import *
log = logging.getLogger(__name__)
class LanguageController(BaseController):
def view(self, domain, language, id):
c.message = herder.model.Message.by_domain_language_id(
domain, language, id)
c.domain = herder.model.Domain.by_name(domain)
c.language = c.domain.get_language(id)
return render('/message/view.html')
|
import React, { useState } from 'react'
function useCreateNote(note, update = false) {
const [noteList, setNoteList] = useState([])
const [count, setCount] = useState(1)
}
export default useCreateNote
|
const { createServer } = require('http')
const { parse } = require('url')
const next = require('next')
const dev = process.env.NODE_ENV !== 'production'
const app = next({ dev })
const handle = app.getRequestHandler()
app.prepare().then(() => {
createServer((req, res) => {
// Be sure to pass `true` as the second argument to `url.parse`.
// This tells it to parse the query portion of the URL.
const parsedUrl = parse(req.url, true)
const { pathname, query } = parsedUrl
if (pathname === '/a') {
app.render(req, res, '/a', query)
} else if (pathname === '/b') {
app.render(req, res, '/b', query)
} else {
handle(req, res, parsedUrl)
}
}).listen(3000, (err) => {
if (err) throw err
console.log('> Ready on http://localhost:3000')
})
})
|
// Turkish
'use strict';
var tr = function() {}
tr.code = 'tr';
tr.data = {
GEN_Help_0 : 'Already have a wallet somewhere?',
GEN_Help_MetaMask : 'So easy! Keys stay in MetaMask, not on a phishing site! Try it today.',
GEN_Warning_1 : '**Do not lose it!** It cannot be recovered if you lose it.',
GEN_Warning_2 : '**Do not share it!** Your funds will be stolen if you use this file on a malicious/phishing site.',
GEN_Warning_3 : '**Make a backup!** Secure it like the millions of dollars it may one day be worth.',
GAS_Price_1 : 'Not So Fast',
GAS_Price_2 : 'Fast',
GAS_Price_3 : 'Fast AF',
CONTRACT_Helper_1 : 'Please change the address to your own Multisig Contract Address.',
CONTRACT_Warning_1 : 'You are about to **deploy a contract**.',
CONTRACT_Warning_2 : 'It will be deployed on the following network:',
CONTRACT_Warning_3 : 'You are about to **execute a function on contract**.',
SEND_Helper_Contract : 'In most cases you should leave this as 0.',
SEND_ViewOnly : 'You cannot send with only your address. You must use one of the other options to unlock your wallet in order to send.',
SEND_LoadTokens : 'Load Tokens',
SEND_CustomAddrMsg : 'A message regarding',
SWAP_Warning_1 : 'Warning! You do not have enough funds to complete this swap.',
SWAP_Warning_2 : 'Please add more funds to your wallet or access a different wallet.',
X_Advanced : 'Advanced Users Only.',
X_HelpfulLinks : 'Helpful Links & FAQs',
X_HelpfulLinks_1 : 'How to Access your Wallet',
X_HelpfulLinks_2 : 'I lost my private key',
X_HelpfulLinks_3 : 'My private key opens a different address',
X_HelpfulLinks_4 : 'Migrating to/from MyEtherWallet',
X_Network : 'Network', // aka "node" or "chain" - used in the dropdown in header
X_Network_Custom : 'Add Custom Network / Node',
DOMAIN_Buy : 'Buy the domain',
DOMAIN_BuyItNow : 'Price to buy the domain immediately:',
DOMAIN_bid : 'Bid for the domain',
DOMAIN_bid_0 : 'You are currently winning this auction with the highest bid. You can bid higher if you want, but it will delay the close of the auction for 24 hours.',
DOMAIN_bid_1 : 'Bid at least',
DOMAIN_bid_2 : 'on the domain.',
DOMAIN_bid_3 : 'You will win the domain if no higher bids are placed within the next 24 hours.',
DOMAIN_bid_4 : 'Note that the domain has a locked value of',
DOMAIN_bid_5 : 'As part of the sale you will receive the deed with this value but cannot claim it unless you release the name.',
DOMAIN_Finish_1 : 'Not related to that auction',
DOMAIN_Finish_2 : 'This address is neither the winner nor the seller of the auction.',
DOMAIN_Finish_3 : 'Finish the auction',
DOMAIN_Finish_4 : 'Finish the auction to allocate the domain to the winner and the funds to the seller.',
DOMAIN_Finish_5 : 'Click your TX hash to see if you successfully transferred the domain to DomainSale.',
DOMAIN_offer_4 : 'Offer For Sale:',
DOMAIN_offer_5 : 'Set either of both of the prices below to offer your domain for sale. Remember that any funds you have locked in the domain\'s deed will go to the buyer and 10% of the funds when sold goes to referrers.',
DOMAIN_offer_7 : 'Alter Your Offer for:',
DOMAIN_offer_8 : 'Change either of both of the prices below to alter your domain sale offer. Remember that any funds you have locked in the domain\'s deed will go to the buyer and 10% of the funds when sold goes to referrers.',
DOMAIN_offer_9 : 'Buy price',
DOMAIN_offer_10 : 'This is the price at which someone can buy the domain immediately. 0 means that the domain cannot be purchased immediately.',
DOMAIN_offer_11 : 'This is the price at which someone can start an auction for the domain. 0 means that the domain will not be available for auction.',
DOMAIN_offer_12 : 'Offer your domain',
DOMAIN_offer_13 : 'Alter your sale',
DOMAIN_offer_14 : 'Cancel your sale',
DOMAIN_offer_15 : 'You can cancel your domain sale, which will return the domain to you with no charge. This is only available before any bids have been received for the domain.',
ENS_WrongAddress_1 : 'The wallet you unlocked does not own the name ',
ENS_WrongAddress_2 : 'Please unlock the wallet with address ',
ENS_Finalize : 'Finalize',
ENS_Finalize_content : 'Finalizing this name assigns the ENS name to the winning bidder. The winner will be refunded the difference between their bid and the next-highest bid. If you are the only bidder, you will be refunded all but 0.01 ETH. Any non-winners will also be refunded.',
ENS_Finalize_content_1 : 'You are about to finalize the auction & claim the name:',
ENS_Helper_1 : 'What is the process like?',
ENS_Helper_2 : '1) Preparation',
ENS_Helper_3 : 'Decide which account you wish to own the name & ensure that you have multiple backups of that account.',
ENS_Helper_4 : 'Decide the maximum amount of ETH you are willing to pay for the name (your <u>Bid Amount</u>). Ensure that the account has enough to cover your bid + 0.01 ETH for gas.',
ENS_Helper_5 : '2) Start an Auction / Place a Bid',
ENS_Helper_6 : 'Bidding period lasts 3 days (72 hours).',
ENS_Helper_7 : 'You will enter the <u>name</u>, <u>Actual Bid Amount</u>, <u>Bid Mask</u>, which is protected by a <u>Secret Phrase</u>.',
ENS_Helper_8 : 'This places your bid, but this information is kept secret until you reveal it.',
ENS_Helper_9 : '3) Reveal your Bid',
ENS_Helper_10 : '**If you do not reveal your bid, you will not be refunded.**',
ENS_Helper_11 : 'Reveal Period lasts 2 days (48 hours).',
ENS_Helper_12 : 'You will unlock your account, enter the <u>Bid Amount</u>, and the <u>Secret Phrase</u>.',
ENS_Helper_13 : 'In the event that two parties bid exactly the same amount, the first bid revealed will win.',
ENS_Helper_14 : '4) Finalize the Auction',
ENS_Helper_15 : 'Once the auction has ended (after 5 days / 120 hours), the winner needs to finalize the auction in order to claim their new name.',
ENS_Helper_16 : 'The winner will be refunded the difference between their bid and the next-highest bid. If you are the only bidder, you will be refunded all but 0.01 ETH.',
ENS_Helper_17 : 'More Information',
ENS_Helper_18 : 'The auction for this registrar is a blind auction, and is described in',
ENS_Helper_19 : 'Basically, no one can see *anything* during the auction.',
ENS_Helper_20 : 'ENS: Read the Docs',
ENS_Helper_21 : 'Announcing the Ethereum Name Service Relaunch Date!',
ENS_Helper_22 : 'Knowledge Base: ENS',
ENS_Helper_23 : 'Debugging a [BAD INSTRUCTION] Reveal',
ENS_Helper_24 : 'Please try the above before relying on support for reveal issues as we are severely backlogged on support tickets. We\'re so sorry. :(',
EOS_01 : '**Generate EOS Key-pair**',
EOS_02 : '**Register / Map your EOS Key**',
EOS_03 : 'Select `register`',
EOS_04 : 'Enter your **EOS Public Key** <--- CAREFUL! EOS PUBLIC KEY!',
EOS_05 : 'Fund EOS Contract on Send Page',
EOS_06 : 'Go to Send Ether & Tokens Page',
EOS_07 : 'Unlock same wallet you are unlocking here.',
EOS_08 : 'Send Amount you want to Contribute to `0xd0a6E6C54DbC68Db5db3A091B171A77407Ff7ccf`',
EOS_09 : 'Claim EOS Tokens',
EOS_10 : 'Select `claimAll`.',
/* Onboarding */
ONBOARD_welcome_title : 'Welcome to MyEtherWallet.com',
ONBOARD_welcome_content__1 : 'Please take some time to understand this for your own safety. 🙏',
ONBOARD_welcome_content__2 : 'Your funds will be stolen if you do not heed these warnings.',
ONBOARD_welcome_content__3 : 'We know this click-through stuff is annoying. We are sorry.',
ONBOARD_welcome_content__4 : 'What is MEW? ',
ONBOARD_welcome_content__5 : 'MyEtherWallet is a free, open-source, client-side interface.',
ONBOARD_welcome_content__6 : 'We allow you to interact directly with the blockchain while remaining in full control of your keys & your funds.',
ONBOARD_welcome_content__7 : '**You** and **only you** are responsible for your security.',
ONBOARD_welcome_content__8 : 'We cannot recover your funds or freeze your account if you visit a phishing site or lose your private key.',
ONBOARD_bank_title : 'MyEtherWallet is not a Bank',
ONBOARD_bank_content__1 : 'When you open an account with a bank or exchange, they create an account for you in their system.',
ONBOARD_bank_content__2 : 'The bank keeps track of your personal information, account passwords, balances, transactions and ultimately your money.',
ONBOARD_bank_content__3 : 'The bank charge fees to manage your account and provide services, like refunding transactions when your card gets stolen.',
ONBOARD_bank_content__4 : 'The bank allows you to write a check or charge your debit card to send money, go online to check your balance, reset your password, and get a new debit card if you lose it.',
ONBOARD_bank_content__5 : 'You have an account *with the bank or exchange* and they decide how much money you can send, where you can send it, and how long to hold on a suspicious deposit. All for a fee.',
ONBOARD_welcome_title__alt : 'Introduction',
ONBOARD_interface_title : 'MyEtherWallet is an Interface',
ONBOARD_interface_content__1 : 'When you create an account on MyEtherWallet you are generating a cryptographic set of numbers: your private key and your public key (address).',
ONBOARD_interface_content__2 : 'The handling of your keys happens entirely on your computer, inside your browser.',
ONBOARD_interface_content__3 : 'We never transmit, receive or store your private key, password, or other account information.',
ONBOARD_interface_content__4 : 'We do not charge a transaction fee.',
ONBOARD_interface_content__5 : 'You are simply using our **interface** to interact **directly with the blockchain**.',
ONBOARD_interface_content__6 : 'If you send your *public key (address)* to someone, they can send you ETH or tokens. 👍',
ONBOARD_interface_content__7 : 'If you send your *private key* to someone, they now have full control of your account. 👎',
ONBOARD_bank_title__alt : 'MEW isn\'t a Bank',
ONBOARD_blockchain_title__alt : 'WTF is a Blockchain?',
ONBOARD_blockchain_skip : 'I already know what a blockchain is...',
ONBOARD_blockchain_title : 'Wait, WTF is a Blockchain?',
ONBOARD_blockchain_content__1 : 'The blockchain is like a huge, global, decentralized spreadsheet.',
ONBOARD_blockchain_content__2 : 'It keeps track of who sent how many coins to whom, and what the balance of every account is.',
ONBOARD_blockchain_content__3 : 'It is stored and maintained by thousands of people (miners) across the globe who have special computers.',
ONBOARD_blockchain_content__4 : 'The blocks in the blockchain are made up of all the individual transactions sent from MyEtherWallet, MetaMask, Exodus, Mist, Geth, Parity, and everywhere else.',
ONBOARD_blockchain_content__5 : 'When you see your balance on MyEtherWallet.com or view your transactions on [etherscan.io](https://etherscan.io), you are seeing data on the blockchain, not in our personal systems.',
ONBOARD_blockchain_content__6 : 'Again: **we are not a bank**.',
ONBOARD_interface_title__alt : 'MEW is an Interface',
ONBOARD_why_title__alt : 'But...why does this matter?',
ONBOARD_why_title : 'Why are you making me read all this?',
ONBOARD_why_content__1 : 'Because we need you to understand that we **cannot**...',
ONBOARD_why_content__2 : 'Access your account or send your funds for you X.',
ONBOARD_why_content__3 : 'Recover or change your private key.',
ONBOARD_why_content__4 : 'Recover or reset your password.',
ONBOARD_why_content__5 : 'Reverse, cancel, or refund transactions.',
ONBOARD_why_content__6 : 'Freeze accounts.',
ONBOARD_why_content__7 : '**You** and **only you** are responsible for your security.',
ONBOARD_why_content__8 : 'Be diligent to keep your private key and password safe. Your private key is sometimes called your mnemonic phrase, keystore file, UTC file, JSON file, wallet file.',
ONBOARD_why_content__9 : 'If you lose your private key or password, no one can recover it.',
ONBOARD_why_content__10 : 'If you enter your private key on a phishing website, you will have **all your funds taken**.',
ONBOARD_blockchain_title__alt : 'WTF is a Blockchain?',
ONBOARD_point_title__alt : 'What\'s the Point of MEW then?',
ONBOARD_whymew_title : 'If MyEtherWallet can\'t do those things, what\'s the point?',
ONBOARD_whymew_content__1 : 'Because that is the point of decentralization and the blockchain.',
ONBOARD_whymew_content__2 : 'You don\'t have to rely on your bank, government, or anyone else when you want to move your funds.',
ONBOARD_whymew_content__3 : 'You don\'t have to rely on the security of an exchange or bank to keep your funds safe.',
ONBOARD_whymew_content__4 : 'If you don\'t find these things valuable, ask yourself why you think the blockchain and cryptocurrencies are valuable. 😉',
ONBOARD_whymew_content__5 : 'If you don\'t like the sound of this, consider using [Coinbase](https://www.coinbase.com/) or [Blockchain.info](https://blockchain.info/wallet/#/signup). They have more familiar accounts with usernames & passwords.',
ONBOARD_whymew_content__6 : 'If you are scared but want to use MEW, [get a hardware wallet](https://kb.myetherwallet.com/hardware-wallets/hardware-wallet-recommendations.html)! These keep your keys secure.',
ONBOARD_why_title__alt : 'But...why?',
ONBOARD_secure_title : 'How To Protect Yourself & Your Funds',
ONBOARD_secure_1_title : 'How To Protect Yourself from Phishers',
ONBOARD_secure_1_content__1 : 'Phishers send you a message with a link to a website that looks just like MyEtherWallet, EtherDelta, Paypal, or your bank, but is not the real website. They steal your information and then steal your money.',
ONBOARD_secure_1_content__2 : 'Install [EAL](https://chrome.google.com/webstore/detail/etheraddresslookup/pdknmigbbbhmllnmgdfalmedcmcefdfn) or [MetaMask](https://chrome.google.com/webstore/detail/metamask/nkbihfbeogaeaoehlefnkodbefgpgknn) or [Cryptonite by Metacert](https://chrome.google.com/webstore/detail/cryptonite-by-metacert/keghdcpemohlojlglbiegihkljkgnige) or the [MyEtherWallet Chrome Extension](https://chrome.google.com/webstore/detail/myetherwallet-cx/nlbmnnijcnlegkjjpcfjclmcfggfefdm) to block malicious websites.',
ONBOARD_secure_1_content__3 : 'Always check the URL: `https://www.myetherwallet.com`.',
ONBOARD_secure_1_content__4 : 'Always make sure the URL bar has `MYETHERWALLET INC` in green.',
ONBOARD_secure_1_content__5 : 'Do not trust messages or links sent to you randomly via email, Slack, Reddit, Twitter, etc.',
ONBOARD_secure_1_content__6 : 'Always navigate directly to a site before you enter information. Do not enter information after clicking a link from a message or email.',
ONBOARD_secure_1_content__7 : '[Install an AdBlocker](https://chrome.google.com/webstore/detail/ublock-origin/cjpalhdlnbpafiamejdnhcphjbkeiagm?hl=en) and do not click ads on your search engine (e.g. Google).',
ONBOARD_point_title__alt_2 : 'What\'s the point?',
ONBOARD_secure_2_title : 'How To Protect Yourself from Scams',
ONBOARD_secure_2_content__1 : 'People will try to get you to give them money in return for nothing.',
ONBOARD_secure_2_content__2 : 'If it is too good to be true, it probably is.',
ONBOARD_secure_2_content__3 : 'Research before sending money to someone or some project. Look for information on a variety of websites and forums. Be wary.',
ONBOARD_secure_2_content__4 : 'Ask questions when you don\'t understand something or it doesn\'t seem right.',
ONBOARD_secure_2_content__5 : 'Don\'t let fear, FUD, or FOMO win over common sense. If something is very urgent, ask yourself "why?". It may be to create FOMO or prevent you from doing research.',
ONBOARD_secure_3_title__alt : 'Phuck Phishers',
ONBOARD_secure_3_title : 'How To Protect Yourself from Loss',
ONBOARD_secure_3_content__1 : 'If you lose your private key or password, it is gone forever. Don\'t lose it.',
ONBOARD_secure_3_content__2 : 'Make a backup of your private key and password. Do NOT just store it on your computer. Print it out on a piece of paper or save it to a USB drive.',
ONBOARD_secure_3_content__3 : 'Store this paper or USB drive in a different physical location. A backup is not useful if it is destroyed by a fire or flood along with your laptop.',
ONBOARD_secure_3_content__4 : 'Do not store your private key in Dropbox, Google Drive, or other cloud storage. If that account is compromised, your funds will be stolen.',
ONBOARD_secure_3_content__5 : 'If you have more than 1-week\'s worth of pay worth of cryptocurrency, get a hardware wallet. No excuses. It\'s worth it. I promise.',
ONBOARD_secure_3_content__6 : '[Even more Security Tips!](https://kb.myetherwallet.com/getting-started/protecting-yourself-and-your-funds.html)',
ONBOARD_secure_2_title__alt_2 : 'Screw Scams',
ONBOARD_final_title__alt : 'One more click & you\'re done! 🤘',
ONBOARD_final_title : 'Alright, I\'m done lecturing you!',
ONBOARD_final_subtitle : 'Sorry for being like this. Onwards!',
ONBOARD_final_content__1 : 'Create a wallet',
ONBOARD_final_content__2 : 'Get a hardware wallet',
ONBOARD_final_content__3 : 'How to Set up MEW + MetaMask',
ONBOARD_final_content__4 : 'How to Run MEW Offline / Locally',
ONBOARD_final_content__5 : 'How to Send via Ledger hardware wallet',
ONBOARD_final_content__6 : 'How to Send via TREZOR hardware wallet',
ONBOARD_final_content__7 : 'How to Send via MetaMask',
ONBOARD_final_content__8 : 'Learn More or Contact Us',
ONBOARD_final_content__9 : 'OMG, please just let me send FFS.',
ONBOARD_resume : 'It looks like you didn\'t finish reading through these slides last time. ProTip: Finish reading through the slides 😉',
HELP_2a_Title : 'How do I save/backup my wallet? ',
/* New Generics */
x_CancelReplaceTx : 'Cancel or Replace Transaction',
x_CancelTx : 'Cancel Transaction',
x_PasswordDesc : 'This password * encrypts * your private key. This does not act as a seed to generate your keys. **You will need this password + your private key to unlock your wallet.**',
x_ReadMore : 'Read More',
x_ReplaceTx : 'Replace Transaction',
x_TransHash : 'Transaction Hash',
x_TXFee : 'TX Fee',
x_TxHash : 'TX Hash',
/* Check TX Status */
NAV_CheckTxStatus : 'Check TX Status',
NAV_TxStatus : 'TX Status',
tx_Details : 'Transaction Details',
tx_Summary : 'During times of high volume (like during ICOs) transactions can be pending for hours, if not days. This tool aims to give you the ability to find and "cancel" / replace these TXs. ** This is not typically something you can do. It should not be relied upon & will only work when the TX Pools are full. [Please, read about this tool here.](https://kb.myetherwallet.com/transactions/check-status-of-ethereum-transaction.html)**',
tx_notFound : 'Transaction Not Found',
tx_notFound_1 : 'This TX cannot be found in the TX Pool of the node you are connected to.',
tx_notFound_2 : 'If you just sent the transaction, please wait 15 seconds and press the "Check TX Status" button again. ',
tx_notFound_3 : 'It could still be in the TX Pool of a different node, waiting to be mined.',
tx_notFound_4 : 'Please use the dropdown in the top-right & select a different ETH node (e.g. `ETH (Etherscan.io)` or `ETH (Infura.io)` or `ETH (MyEtherWallet)`) and check again.',
tx_foundInPending : 'Pending Transaction Found',
tx_foundInPending_1 : 'Your transaction was located in the TX Pool of the node you are connected to. ',
tx_foundInPending_2 : 'It is currently pending (waiting to be mined). ',
tx_foundInPending_3 : 'There is a chance you can "cancel" or replace this transaction. Unlock your wallet below.',
tx_FoundOnChain : 'Transaction Found',
tx_FoundOnChain_1 : 'Your transaction was successfully mined and is on the blockchain.',
tx_FoundOnChain_2 : '**If you see a red `( ! )`, a `BAD INSTRUCTION` or `OUT OF GAS` error message**, it means that the transaction was not successfully *sent*. You cannot cancel or replace this transaction. Instead, send a new transaction. If you received an "Out of Gas" error, you should double the gas limit you specified originally.',
tx_FoundOnChain_3 : '**If you do not see any errors, your transaction was successfully sent.** Your ETH or Tokens are where you sent them. If you cannot see this ETH or Tokens credited in your other wallet / exchange account, and it has been 24+ hours since you sent, please [contact that service](https://kb.myetherwallet.com/diving-deeper/ethereum-list-of-support-and-communities.html). Send them the *link* to your transaction and ask them, nicely, to look into your situation.',
/* Gen Wallet Updates */
GEN_Help_1 : 'Use your',
GEN_Help_2 : 'to access your account.',
GEN_Help_3 : 'Your device * is * your wallet.',
GEN_Help_4 : 'Guides & FAQ',
GEN_Help_5 : 'How to Create a Wallet',
GEN_Help_6 : 'Getting Started',
GEN_Help_7 : 'Keep it safe · Make a backup · Don\'t share it with anyone · Don\'t lose it · It cannot be recovered if you lose it.',
GEN_Help_8 : 'Not Downloading a File? ',
GEN_Help_9 : 'Try using Google Chrome ',
GEN_Help_10 : 'Right click & save file as. Filename: ',
GEN_Help_11 : 'Don\'t open this file on your computer ',
GEN_Help_12 : 'Use it to unlock your wallet via MyEtherWallet (or Mist, Geth, Parity and other wallet clients.) ',
GEN_Help_13 : 'How to Back Up Your Keystore File ',
GEN_Help_14 : 'What are these Different Formats? ',
GEN_Help_15 : 'Preventing loss & theft of your funds.',
GEN_Help_16 : 'What are these Different Formats?',
GEN_Help_17 : 'Why Should I?',
GEN_Help_18 : 'To have a secondary backup.',
GEN_Help_19 : 'In case you ever forget your password.',
GEN_Help_20 : 'Cold Storage',
GET_ConfButton : 'I understand. Continue.',
GEN_Label_5 : 'Save Your `Private Key`. ',
GEN_Unlock : 'Unlock your wallet to see your address',
GAS_PRICE_Desc : 'Gas Price is the amount you pay per unit of gas. `TX fee = gas price * gas limit` & is paid to miners for including your TX in a block. Higher the gas price = faster transaction, but more expensive. Default is `41 GWEI`.',
GAS_LIMIT_Desc : 'Gas limit is the amount of gas to send with your TX. `TX fee` = gas price * gas limit & is paid to miners for including your TX in a block. Increasing this number will not get your TX mined faster. Sending ETH = `21000`. Sending Tokens = ~`200000`.',
NONCE_Desc : 'The nonce is the number of transactions sent from a given address. It ensures transactions are sent in order & not more than once.',
TXFEE_Desc : 'The TX Fee is paid to miners for including your TX in a block. Is is the `gas limit` * `gas price`. [You can convert GWEI -> ETH here](https://www.myetherwallet.com/helpers.html)',
/* Navigation*/
NAV_AddWallet : 'Cüzdan ekle ',
NAV_BulkGenerate : 'Birkaç Cüzdan oluştur ',
NAV_Contact : 'Iletişime geç ',
NAV_Contracts : 'Sözleşmeler ',
NAV_DeployContract : 'Sözleşme kur ',
NAV_ENS : 'ENS',
NAV_GenerateWallet_alt : 'New Wallet ',
NAV_GenerateWallet : 'Cüzdan oluştur ',
NAV_Help : 'Yardim et ',
NAV_InteractContract : 'Interact with Contract ',
NAV_Multisig : 'Multisig ',
NAV_MyWallets : 'Cüzdanim ',
NAV_Offline : 'Offline gönder ',
NAV_SendEther : 'Ether ve Tokens gönder ',
NAV_SendTokens : 'Tokens gönder ',
NAV_SignMsg : 'Mesajı Doğrula ',
NAV_Swap : 'Çevir ',
NAV_ViewWallet : 'Cüzdan bilgilerni göster ',
NAV_YourWallets : 'Cüzdanin ',
/* General */
x_Access : 'Erişim ',
x_AddessDesc : 'Your Address can also be known as you `Account #` or your `Public Key`. It is what you share with people so they can send you Ether or Tokens. Find the colorful address icon. Make sure it matches your paper wallet & whenever you enter your address somewhere. Bu "hesap numarası" veya "genel anahtar" dir. Birisi ether göndermek istiyorsa bu adresi kullanmasi gerekir. Ikon adresini kontrol etmek kolay bir yoldur ',
x_Address : 'Adresin ',
x_Cancel : 'Iptal et ',
x_CSV : 'CSV dosya (şifrelenmemis) ',
x_Download : 'Indir ',
x_Json : 'JSON dosya (şifrelenmemis) ',
x_JsonDesc : 'Bu özel anahtarinin sifresiz, JSON formatidir. Demekki parolasiz cüzdanini acabilirsin. Özel anahatarina sahip olan herkez sifresiz cüzdani aca bilir. ',
x_Keystore : 'Keystore dosya (UTC / JSON · Tavsiye edilen · şifrelenmiş) ',
x_Keystore2 : 'Keystore dosya (UTC / JSON) ',
x_KeystoreDesc : 'This Keystore file matches the format used by Mist so you can easily import it in the future. It is the recommended file to download and back up. ',
x_MetaMask : 'MetaMask / Mist ',
x_Mnemonic : 'Mnemonic Phrase ',
x_ParityPhrase : 'Parity Phrase ',
x_Password : 'Parola ',
x_Print : 'Cüzdanin kağıt versiyonunu yazdir ',
x_PrintDesc : 'ProTavsiye: Eğer yazıcınız olmasa bile, "Yazdır" seçeneğini tıklayın ve PDF dosyası olarak kaydedin! ',
x_PrintShort : 'Yazdir ',
x_PrivKey : 'Özel anahtar (şifrelenmemis) ',
x_PrivKey2 : 'Özel anahtar ',
x_PrivKeyDesc : 'Bu özel anahtarinin şifrelenmemiş metin sürümüdür. Birisi şifrelenmemiş özel anahtarı bulduysa, şifre olmadan cüzdani acabilir. Bu nedenle, şifrelenmiş sürümlerini genellikle tavsiye edilir. ',
x_Save : 'Indir ',
x_TXT : 'TXT dosya (şifrelenmemis) ',
x_Wallet : 'Cüzdan ',
/* Header */
MEW_Warning_1 : 'Cüzdani acmadan veya yeni bir cüzdan olusturmadan önce her zaman URL\'yi gözden geçirin. Phishing sitelerinden dikkat! ',
CX_Warning_1 : 'Burda kaydetdigin tüm cüzdanlari **yedeklediginden** emin ol. Make sure you have **external backups** of any wallets you store here. Many things could happen that would cause you to lose the data in this Chrome Extension, including uninstalling and reinstalling the extension. This extension is a way to easily access your wallets, **not** a way to back them up. ',
MEW_Tagline : 'Acik kaynak JavaScript Client-Side Ether cüzdani ',
CX_Tagline : 'Acik kaynak JavaScript Client-Side Ether cüzdani Chrome Extension ',
/* Footer */
FOOTER_1 : 'Acik kaynak, javascript Client-Side Ethererum cüzdani ve Ether islemleri ',
FOOTER_1b : 'tarafından yaratıldı ',
FOOTER_2 : 'Bağışlar büyük takdirle kabuledilir: ',
FOOTER_3 : 'Client-side cüzdan olusturma ',
FOOTER_4 : 'Disclaimer ',
/* Sidebar */
sidebar_AccountInfo : 'Hesap bilgiler ',
sidebar_AccountAddr : 'Hesap adres ',
sidebar_AccountBal : 'Bakiye ',
sidebar_TokenBal : 'Token bakiye ',
sidebar_Equiv : 'Eşdeğer Değerler ', /* Belki sadece Esdeger yeter */
sidebar_TransHistory : 'Işlem geçmişi ',
sidebar_donation : 'MyEtherWallet gizlilik ve güvenlike adanmış ücretsiz ve açık kaynak kodlu bir hizmettir. Ne kadar cok bagis edilirse o kadar cok yeni özellik programlamaya, görüşlerinizi işlemeye yatitim yapabiliriz. Biz sadece dünyayı değiştirmek isteyen iki kişiyiz. Bize yardım edermisin? ',
sidebar_donate : 'Bağışta bulun ',
sidebar_thanks : 'TEŞEKKÜRLER!!! ',
sidebar_DisplayOnTrezor : 'Display address on TREZOR',
sidebar_DisplayOnLedger : 'Display address on Ledger',
/* Decrypt Panel */
decrypt_Access : 'Cüzdanını nasıl acmak istersin? ',
decrypt_Title : 'Özel anahtarinin formatini sec: ',
decrypt_Select : 'Bir cüzdan sec: ',
/* Add Wallet */
ADD_Label_1 : 'Ne yapmak istiyorsunuz? ',
ADD_Radio_1 : 'Yeni cüzdan olusturun ',
ADD_Radio_2 : 'Cüzdan dosyayi sec (Keystore / JSON) ',
ADD_Radio_2_alt : 'Cüzdan dosyayi sec ',
ADD_Radio_2_short : 'CÜZDAN DOSYAYI SEC... ',
ADD_Radio_3 : 'Özel anahatarini Yaspistir/Yaz ',
ADD_Radio_4 : 'Izlenecek hesap adresi ekle ', /* maybe another word for watch/izlencek --> Takip edilecek? */
ADD_Radio_5 : 'Paste Your Mnemonic ',
ADD_Radio_5_Path : 'Select HD derivation path ',
ADD_Radio_5_woTrezor : '(Jaxx, Metamask, Exodus, imToken) ',
ADD_Radio_5_withTrezor : '(Jaxx, Metamask, Exodus, imToken, TREZOR) ',
ADD_Radio_5_PathAlternative : '(Ledger) ',
ADD_Radio_5_PathTrezor : '(TREZOR) ',
ADD_Radio_5_PathCustom : '(Custom) ',
ADD_Label_2 : 'Bir takma isim oluştur ',
ADD_Label_3 : 'Cüzdan şifrelidir. Parolayi yaz: ',
ADD_Label_4 : 'Izlenecek hesap adresi ekle ',
ADD_Warning_1 : 'Özel anahtar yazmadan cüzdan sekmesinde "izlemek" için herhangi bir hesap ekleyebilirsin. Bu demek ** degilki ** cüzdani acabilirsin, veya ondan Ether aktarabilirsin. ',
ADD_Label_5 : 'Adresi ekle ',
ADD_Label_6 : 'Cüzdani ac ',
ADD_Label_6_short : 'Ac ',
ADD_Label_7 : 'Hesap ekle ',
ADD_Label_8 : 'Password (optional): ',
/* Generate Wallets */
GEN_desc : 'Birden birkac cüzdan olusturmak istiryorsan burdan yapabilirsin ',
GEN_Label_1 : 'Güçlü bir şifre sec (en az 9 sembol) ',
GEN_Placeholder_1 : 'Bunu kaydetmeyi unutma! ',
GEN_SuccessMsg : 'Başarı! Cüzdan oluşturuldu. ',
GEN_Label_2 : 'Keystore dosyayi veya özel anahtranini kaydet. Yukarıdaki şifreni unutma. ',
GEN_Label_3 : 'Adresini kaydet. ',
GEN_Label_4 : 'Isteye bagli: Cüzdanin kağıt versiyonunu yazdir veya QR code versiyonunu sakla.',
/* Bulk Generate Wallets */
BULK_Label_1 : 'Oluşturulacak cüzdan sayısı ',
BULK_Label_2 : 'Cüzdanlari olustur ',
BULK_SuccessMsg : 'Başarı! Cüzdanlarin olusturuldu. ',
/* Sending Ether and Tokens */
SEND_addr : 'Bu Adrese gönder ',
SEND_amount : 'Gönderilecek miktar ',
SEND_amount_short : 'Miktar ',
SEND_custom : 'Kullaniciya özel ', /*maybe another word here too */
SEND_gas : 'Gas ',
SEND_TransferTotal : 'Tüm dengeyi gönder ',
SEND_generate : 'Generate Transaction ',
SEND_raw : 'İşlem oluştur ikili sistem ', /* add Binär */
SEND_signed : 'Imzali İşlem ',
SEND_trans : 'Islemi gönder ',
SENDModal_Title : 'Uyarı! ',
SEND_custom : 'Özel Token Ekle ',
/* full sentence reads "You are about to send "10 ETH" to address "0x1234". Are you sure you want to do this? " */
SENDModal_Content_1 : 'şu an ',
SENDModal_Content_2 : 'bu adresse ',
SENDModal_Content_3 : 'Göndermek üzeresin. Emin misin? ',
SENDModal_Content_4 : 'NOTE: If you encounter an error, you most likely need to add ether to your account to cover the gas cost of sending tokens. Gas is paid in ether. ',
SENDModal_No : 'Hayir, cikar beni burdan! ',
SENDModal_Yes : 'Evet, eminim! Islemi tamamla. ',
/* Tokens */
TOKEN_Addr : 'Token Adres ',
TOKEN_Symbol : 'Token symbolu ',
TOKEN_Dec : 'Ondalık ',
TOKEN_show : 'Tüm tokenleri göster ',
TOKEN_hide : 'Tokenleri gizle ',
/* Send Transaction */
TRANS_desc : 'Eğer Token göndermek istiyorsan, bunun yerine "Token Gönder" sayfasını kullan. ',
TRANS_warning : '"Yalnızca ETH" ya da "Yalnızca ETC" fonksiyonu kullanıyorsan, bir sözleşme yoluyla gönderiyorsun. Bazı servisler bu işlemleri kabul etmekde sorunları var. Daha fazla oku. ',
TRANS_advanced : '+Gelişmiş: Veri ekle ',
TRANS_data : 'Veri ',
TRANS_gas : 'Gas Limiti ',
TRANS_sendInfo : '21000 gaz kullanan standart bir islem 0.000441 ETH ile mal olur. Biz hizli bir sekilde mayinlanmasi için, 0.000000021 ETH\'dan biraz daha yüksek gaz fiyatini kullaniyoruz. Biz herhangi bir islem ücreti almiyoruz. ',
/* Send Transaction Modals */
TRANSModal_Title : '"Yalnızca ETH" and "Yalnızca ETC" islemler ',
TRANSModal_Content_0 : 'Farklı işlemler ve farklı hizmetleri hakkında bir not: ',
TRANSModal_Content_1 : '**ETH (Standart islem): ** Bu standart bir islem baslatir bir hesapten direkt baska bir hesapa. Standart gas 21000\'dir. Bu islemlen gönderileren islem ETC zincirinde\'de yer bulabilir. ',
TRANSModal_Content_2 : '**Only ETH: ** This sends via [Timon Rapp\'s replay protection contract (as recommended by VB)](https://blog.ethereum.org/2016/07/26/onward_from_the_hard_fork/) so that you only send on the **ETH** chain. ',
TRANSModal_Content_3 : '**Only ETC: ** This sends via [Timon Rapp\'s replay protection contract (as recommended by VB)](https://blog.ethereum.org/2016/07/26/onward_from_the_hard_fork/) so that you only send on the **ETC** chain. ',
TRANSModal_Content_4 : '**Coinbase & ShapeShift: ** Only send via Standard Transaction. If you send via the "Only" contracts, you will need to reach out to their support staff to manually add your balance or refund you. [You can try Shapeshift\'s "split" tool as well.](https://split.shapeshift.io/) ',
TRANSModal_Content_5 : '**Kraken & Poloniex:** Bilinen sorun yok, ikisinden birini sec ', /* */
TRANSModal_Yes : 'Güzel, şimdi anladım. ',
TRANSModal_No : 'Aman, kafam daha cok karıştı. Yardım et. ',
/* Offline Transaction */
OFFLINE_Title : 'Olustur & offline islem ',
OFFLINE_Desc : 'Offline islemler 3. adimda gerçeklestirir. 1. ve 3. adimi internete bagli olan bir bilgisayarda, 2. adimi offline olan bir bilgisayarda tamamliyorsun. Böyle özel anahtarinin internete bagli olan bir bilgisayara ulasmasini engelnenir. ',
OFFLLINE_Step1_Title : 'Adim 1: Bilgileri olustur (Online Bilgisayar) ',
OFFLINE_Step1_Button : 'Bilgileri olustur ',
OFFLINE_Step1_Label_1 : 'Adres\'den ',
OFFLINE_Step1_Label_2 : 'Nota: This is the FROM address, not the TO address. Nonce is generated from the originating account. If using an airgapped computer, it would be the address of the cold-storage account. ',
OFFLINE_Step2_Title : 'Adim 2: Generate Transaction (Offline Computer) ',
OFFLINE_Step2_Label_1 : 'Adrese ',
OFFLINE_Step2_Label_2 : 'Gönderilen miktar ',
OFFLINE_Step2_Label_3 : 'Gas fiyati ',
OFFLINE_Step2_Label_3b : 'Bu online bilgisayarınızda adım 1\'de gösterilirdi. ',
OFFLINE_Step2_Label_4 : 'Gas limiti ',
OFFLINE_Step2_Label_4b : '21000 is the default gas limit. When you send contracts or add\'l data, this may need to be different. Any unused gas will be returned to you. ',
OFFLINE_Step2_Label_5 : 'Veri ',
OFFLINE_Step2_Label_5b : 'Bu online bilgisayarınızda adım 1\'de gösterilir. ',
OFFLINE_Step2_Label_6 : 'Veri ',
OFFLINE_Step2_Label_6b : 'Bu istege bagli. Data is often used when you send transactions to contracts. ',
OFFLINE_Step2_Label_7 : 'Enter / Select your Private Key / JSON. ',
OFFLINE_Step3_Title : 'Adim 3: Gönder / Islemi duyur (Online Computer) ',
OFFLINE_Step3_Label_1 : 'Paste the signed transaction from Step 2 here and press the "SEND TRANSACTION" button. ',
/* Deploy Contracts */
DEP_generate : 'Bytecode oluştur ',
DEP_generated : 'Oluşturulan Bytecode ',
DEP_signtx : 'Islmemi Imzala ',
DEP_interface : 'Interface oluştur ',
/* My Wallet */
MYWAL_Nick : 'Cüzdan nickname ',
MYWAL_Address : 'Cüzdan adres ',
MYWAL_Bal : 'Bakiye ',
MYWAL_Edit : 'Düzenle ',
MYWAL_View : 'Göster ',
MYWAL_Remove : 'Kaldir ',
MYWAL_RemoveWal : 'Cüzdani kaldir: ',
MYWAL_WatchOnly : 'Sadece izlenen hesaplar ',
MYWAL_Viewing : 'Izlenen Cüzdan ',
MYWAL_Hide : 'Cüzdan bilgilerini gizle ',
MYWAL_Edit_2 : 'Cüzdani düzenle ',
MYWAL_Name : 'Cüzdan Ismi ',
MYWAL_Content_1 : 'Uyarı! Cüzdanını silmek üzeresin ',
MYWAL_Content_2 : 'Bunu kaldırmadan önce ** özel anahtarıni ve / veya Keystore Dosya ve şifreni ** kaydettiğinden emin ol. ',
MYWAL_Content_3 : 'If you want to use this wallet with your MyEtherWallet CX in the future, you will need to manually re-add it using the private key/JSON and password. ',
/* Node Switcher */
NODE_Title : 'Set Up Your Custom Node',
NODE_Subtitle : 'To connect to a local node...',
NODE_Warning : 'Your node must be HTTPS in order to connect to it via MyEtherWallet.com. You can [download the MyEtherWallet repo & run it locally](https://github.com/kvhnuke/etherwallet/releases/latest) to connect to any node. Or, get free SSL certificate via [LetsEncrypt](https://letsencrypt.org/)',
NODE_Name : 'Node Name',
NODE_Port : 'Node Port',
NODE_CTA : 'Save & Use Custom Node',
/* Contracts */
CONTRACT_Title : 'Sözleşme adresi ',
CONTRACT_Title_2 : 'Mevcut sözleşmeyi seç ',
CONTRACT_Json : 'ABI / JSON arabirimi ',
CONTRACT_Interact_Title : 'Sözleşmesi oku / yaz ',
CONTRACT_Interact_CTA : 'Select a function ',
CONTRACT_ByteCode : 'Byte Code ',
CONTRACT_Read : 'OKU ',
CONTRACT_Write : 'YAZ ',
/* Swap / Exchange */
SWAP_rates : "Güncel Fiyatlar ",
SWAP_init_1 : "I want to swap my ",
SWAP_init_2 : " for ", // "I want to swap my X ETH for X BTC"
SWAP_init_CTA : "Let's do this! ", // or "Continue"
SWAP_information : "Bilgilerin ",
SWAP_send_amt : "Gönderilecek miktar ",
SWAP_rec_amt : "Alınacak miktar ",
SWAP_your_rate : "Oran ",
SWAP_rec_add : "Alınan Adres ",
SWAP_start_CTA : "Start Swap ",
SWAP_ref_num : "Your reference number ",
SWAP_time : "Time remaining to send ",
SWAP_elapsed : "Time elapsed since sent ",
SWAP_progress_1 : "Order Initiated ",
SWAP_progress_2 : "Waiting for your ", // Waiting for your BTC...
SWAP_progress_3 : "Received! ", // ETH Received!
SWAP_progress_4 : "Sending your {{orderResult.output.currency}} ",
SWAP_progress_5 : "Order Complete ",
SWAP_order_CTA : "Please send ", // Please send 1 ETH...
SWAP_unlock : "Unlock your wallet to send ETH or Tokens directly from this page. ",
/* Sign Message */
MSG_message : 'Mesaj ',
MSG_date : 'Tarih ',
MSG_signature : 'İmza ',
MSG_verify : 'Mesajı Doğrula ',
MSG_info1 : 'Include the current date so the signature cannot be reused on a different date. ',
MSG_info2 : 'Include your nickname and where you use the nickname so someone else cannot use it. ',
MSG_info3 : 'Include a specific reason for the message so it cannot be reused for a different purpose. ',
/* Mnemonic */
MNEM_1 : 'Please select the address you would like to interact with. ',
MNEM_2 : 'Your single HD mnemonic phrase can access a number of wallets / addresses. Please select the address you would like to interact with at this time. ',
MNEM_more : 'Daha Fazla Adres ',
MNEM_prev : 'Önceki Adresler ',
/* Hardware wallets */
x_Ledger : 'Ledger Wallet ',
ADD_Ledger_1 : 'Connect your Ledger Wallet ',
ADD_Ledger_2 : 'Open the Ethereum application (or a contract application) ',
ADD_Ledger_2_Exp : 'Open the Expanse application (or a contract application) ',
ADD_Ledger_2_Ubq : 'Open the Ubiq application (or a contract application) ',
ADD_Ledger_3 : 'Verify that Browser Support is enabled in Settings ',
ADD_Ledger_4 : 'If no Browser Support is found in settings, verify that you have [Firmware >1.2](https://www.ledgerwallet.com/apps/manager) ',
ADD_Ledger_0a : 'Please use MyEtherWallet on a secure (SSL / HTTPS) connection to connect. ',
ADD_Ledger_0b : 'Re-open MyEtherWallet using [Chrome](https://www.google.com/chrome/browser/desktop/) or [Opera](https://www.opera.com/) ',
ADD_Ledger_scan : 'Ledger Wallet\'e bağlan ',
ADD_MetaMask : 'Connect to MetaMask ',
x_Trezor : 'TREZOR ',
ADD_Trezor_scan : 'TREZOR\'a bağlan ',
ADD_Trezor_select : 'This is a TREZOR seed ',
x_DigitalBitbox : 'Digital Bitbox ',
ADD_DigitalBitbox_0a : 'Re-open MyEtherWallet on a secure (SSL) connection ',
ADD_DigitalBitbox_0b : 'Re-open MyEtherWallet using [Chrome](https://www.google.com/chrome/browser/desktop/) or [Opera](https://www.opera.com/) ',
ADD_DigitalBitbox_scan : 'Connect your Digital Bitbox ',
x_Secalot : 'Secalot ',
ADD_Secalot_0a : 'Re-open MyEtherWallet on a secure (SSL) connection ',
ADD_Secalot_0b : 'Re-open MyEtherWallet using [Chrome](https://www.google.com/chrome/browser/desktop/) or [Opera](https://www.opera.com/) ',
ADD_Secalot_scan : 'Connect your Secalot ',
/* View Wallet Details */
VIEWWALLET_Subtitle : 'This allows you to download different versions of private keys and re-print your paper wallet. You may want to do this in order to [import your account into Geth/Mist](http://ethereum.stackexchange.com/questions/465/how-to-import-a-plain-private-key-into-geth/). If you want to check your balance, we recommend using a blockchain explorer like [etherscan.io](https://etherscan.io/). ',
VIEWWALLET_Subtitle_Short : 'This allows you to download different versions of private keys and re-print your paper wallet. ',
VIEWWALLET_SuccessMsg : 'Success! Here are your wallet details. ',
VIEWWALLET_ShowPrivKey : '(show)',
VIEWWALLET_HidePrivKey : '(hide)',
/* Chrome Extension */
CX_error_1 : 'Su an cüzdan eklenmemis. Cüzdan ekle tikla ["Add Wallet"](/cx-wallet.html#add-wallet) ve bir cüzdan ekle! ',
CX_quicksend : 'Gönder ', // if no appropriate translation, just use "Send" maybe add hizli
/* Error Messages */
ERROR_0 : 'Geçerli tutar yaz. ',
ERROR_1 : 'Sifren en az 9 sembol\'dan olusmasi lazim. Güçlü bir parola sectiginden emin ol. ',
ERROR_2 : 'Sorry! We don\'t recognize this type of wallet file. ',
ERROR_3 : 'Geçerli bir cüzdan dosyası değil. ',
ERROR_4 : 'Var olmayan bir birim, bu birimlerden birini kullan lütfen ',
ERROR_5 : 'Geçersiz adres. ',
ERROR_6 : 'Geçersiz parola. ',
ERROR_7 : 'Yetersiz bakiye. (Must be integer. Try 0-18.) ', // 7
ERROR_8 : 'Geçersiz gas limit. (Must be integer. Try 21000-4000000.) ', // 8
ERROR_9 : 'Geçersiz data value. (Must be hex.) ', // 9
ERROR_10 : 'Yetersiz gas. (Must be integer. Try 20 GWEI / 20000000000 WEI.) ',
ERROR_11 : 'Geçersiz veri. (Must be integer.) ', // 11
ERROR_12 : 'Geçersiz imzali isleme. ',
ERROR_13 : 'Secdigin Nickname baska bir cüzdanda kullaniliyor. ',
ERROR_14 : 'Cüzdan bulunmadi. ',
ERROR_15 : 'Whoops. It doesn\'t look like a proposal with this ID exists yet or there is an error reading this proposal. ', // 15 - NOT USED
ERROR_16 : 'Bu adrese sahip bir cüzdan zaten depolama alanında mevcut. Lütfen cüzdan sayfanızı kontrol edin. ',
ERROR_17 : 'Account you are sending from does not have enough funds. If sending tokens, you must have 0.01 ETH in your account to cover the cost of gas. ', // 17
ERROR_18 : 'All gas would be used on this transaction. This means you have already voted on this proposal or the debate period has ended.', // 18
ERROR_19 : 'Geçersiz sembol ',
ERROR_20 : 'Geçersiz ERC-20 token\'i ',
ERROR_21 : 'Could not estimate gas. There are not enough funds in the account, or the receiving contract address would throw an error. Feel free to manually set the gas and proceed. The error message upon sending may be more informative. ',
ERROR_22 : 'Please enter valid node name ',
ERROR_23 : 'Lütfen geçerli URL yaz. If you are connecting via HTTPS, your node must be over HTTPS ',
ERROR_24 : 'Lütfen geçerli port numarası yaz ',
ERROR_25 : 'Lütfen geçerli zincir kimliği (ID) yaz ',
ERROR_26 : 'Lütfen geçerli bir ABI yaz ',
ERROR_27 : 'Minimum miktar: 0.01. Maximum miktar: ',
ERROR_28 : 'Ilerde cüzdanini acmak icin **Keystore dosyan ve parolan veya özel anahtarin** lazim olacak. Lütfen kaydet ve dista yedekle! Kaydedilmemiş cüzdanini kurtarmanin hiçbir yolu yoktur. Talimatlar icin yardim [help page](https://www.myetherwallet.com/#help) sayfasini oku ',
ERROR_29 : 'Lütfen geçerli kullanıcı ve şifreyi yaz ',
ERROR_30 : 'Lütfen geçerli ENS isim yaz ',
ERROR_31 : 'Geçersiz gizli cümle (phrase) ',
ERROR_32 : 'Düğüme bağlanılamadı. Refresh your page, try a different node (upper right corner), check your firewall settings. If custom node, check your configs.', // 32
ERROR_33 : 'The wallet you have unlocked does not match the owner\'s address. ', // 33
ERROR_34 : 'The name you are attempting to reveal does not match the name you have entered. ', // 34
ERROR_35 : 'Input address is not checksummed. <a href="https://kb.myetherwallet.com/addresses/what-does-checksummed-mean.html" target="_blank" rel="noopener noreferrer"> More info</a>', // 35
ERROR_36 : 'Enter valid TX hash', // 36
ERROR_37 : 'Enter valid hex string (0-9, a-f)', // 37
SUCCESS_1 : 'Geçerli adres ',
SUCCESS_2 : 'Cüzdan basariyla desifre edildi ',
SUCCESS_3 : 'TX blockchain\'e yayınlandı. İşleminizi görmek ve mayınlandığını doğrulamak veya herhangi bir gaz veya sözleşme yürütme hatası bulunmadığını görmek için tıklayın. TX Kimliği: ', //'İşlem teslim edildi TX Hash ',
SUCCESS_4 : 'Cüzdanın başarıyla eklendi ',
SUCCESS_5 : 'Dosya secildi ',
SUCCESS_6 : 'Başarıyla bağlandı ',
SUCCESS_7 : 'Mesaj imzası doğrulandi ',
WARN_Send_Link : 'Işlem türü (gönderme modu), adres, miktar, gaz ve veri alanları sizin için doldurulmus olan bir bağlantı\'ile geldiniz. Göndermeden önce herhangi bir bilgiyi değiştirebilirsiniz. Başlamak için cüzdanınızı açın. ',
/* Geth Error Messages */
GETH_InvalidSender : 'Geçersiz gönderici ',
GETH_Nonce : 'Nonce too low ',
GETH_Cheap : 'Gas price too low for acceptance ',
GETH_Balance : 'Insufficient balance ',
GETH_NonExistentAccount : 'Account does not exist or account balance too low ',
GETH_InsufficientFunds : 'Insufficient funds for gas * price + value ',
GETH_IntrinsicGas : 'Intrinsic gas too low ',
GETH_GasLimit : 'Gaz block sınırını aşıyor ',
GETH_NegativeValue : 'Negatif değer ',
/* Parity Error Messages */
PARITY_AlreadyImported : "Transaction with the same hash was already imported.",
PARITY_Old : "Transaction nonce is too low. Try incrementing the nonce.",
PARITY_TooCheapToReplace : "Transaction fee is too low. There is another transaction with same nonce in the queue. Try increasing the fee or incrementing the nonce.",
PARITY_LimitReached : "There are too many transactions in the queue. Your transaction was dropped due to limit. Try increasing the fee.",
PARITY_InsufficientGasPrice : "Transaction fee is too low. It does not satisfy your node's minimal fee (minimal: {}, got: {}). Try increasing the fee.",
PARITY_InsufficientBalance : "Insufficient funds. Account you try to send transaction from does not have enough funds. Required {} wei and got: {} wei.",
PARITY_GasLimitExceeded : "Transaction cost exceeds current gas limit. Limit: {}, got: {}. Try decreasing supplied gas.",
PARITY_InvalidGasLimit : "Supplied gas is beyond limit.",
/* Tranlsation Info */
translate_version : '0.3 ',
Translator_Desc : 'Thank you to our translators ',
TranslatorName_1 : '[ffidan61](https://www.myetherwallet.com/?gaslimit=21000&to=0xF1Fdf8635cc35a084B97905F62a021cAd71fbC21&value=1.0#send-transaction) ',
TranslatorAddr_1 : '0xF1Fdf8635cc35a084B97905F62a021cAd71fbC21 ',
/* Translator 1 : Insert Comments Here */
TranslatorName_2 : '',
TranslatorAddr_2 : '',
/* Translator 2 : Insert Comments Here */
TranslatorName_3 : '',
TranslatorAddr_3 : '',
/* Translator 3 : Insert Comments Here */
TranslatorName_4 : '',
TranslatorAddr_4 : '',
/* Translator 4 : Insert Comments Here */
TranslatorName_5 : '',
TranslatorAddr_5 : '',
/* Translator 5 : Insert Comments Here */
/* Help - Nothing after this point has to be translated. If you feel like being extra helpful, go for it. */
HELP_Warning : 'If you created a wallet -or- downloaded the repo before **Dec. 31st, 2015**, please check your wallets & download a new version of the repo. Click for details. ',
HELP_Desc : 'Do you see something missing? Have another question? [Get in touch with us](mailto:support@myetherwallet.com), and we will not only answer your question, we will update this page to be more useful to people in the future! ',
HELP_Remind_Title : 'Some reminders ',
HELP_Remind_Desc_1 : '**Ethereum, MyEtherWallet.com & MyEtherWallet CX, and some of the underlying Javascript libraries we use are under active development.** While we have thoroughly tested & tens of thousands of wallets have been successfully created by people all over the globe, there is always the remote possibility that something unexpected happens that causes your ETH to be lost. Please do not invest more than you are willing to lose, and please be careful. If something were to happen, we are sorry, but **we are not responsible for the lost Ether**. ',
HELP_Remind_Desc_2 : 'MyEtherWallet.com & MyEtherWallet CX are not "web wallets". You do not create an account or give us your Ether to hold onto. All data never leaves your computer/your browser. We make it easy for you to create, save, and access your information and interact with the blockchain. ',
HELP_Remind_Desc_3 : 'If you do not save your private key & password, there is no way to recover access to your wallet or the funds it holds. Back them up in multiple physical locations – not just on your computer! ',
HELP_0_Title : '0) I\'m new. What do I do? ',
HELP_0_Desc_1 : 'MyEtherWallet gives you the ability to generate new wallets so you can store your Ether yourself, not on an exchange. This process happens entirely on your computer, not our servers. Therefore, when you generate a new wallet, **you are responsible for safely backing it up**. ',
HELP_0_Desc_2 : 'Create a new wallet. ',
HELP_0_Desc_3 : 'Back the wallet up. ',
HELP_0_Desc_4 : 'Verify you have access to this new wallet and have correctly saved all necessary information. ',
HELP_0_Desc_5 : 'Transfer Ether to this new wallet. ',
HELP_1_Title : '1) How do I create a new wallet? ',
HELP_1_Desc_1 : 'Go to the "Generate Wallet" page. ',
HELP_1_Desc_2 : 'Go to the "Add Wallet" page & select "Generate New Wallet" ',
HELP_1_Desc_3 : 'Enter a strong password. If you think you may forget it, save it somewhere safe. You will need this password to send transactions. ',
HELP_1_Desc_4 : 'Click "GENERATE". ',
HELP_1_Desc_5 : 'Your wallet has now been generated. ',
HELP_2a_Desc_1 : 'You should always back up your wallet externally and in multiple physical locations - like on a USB drive and/or a piece of paper. ',
HELP_2a_Desc_2 : 'Save the address. You can keep it to yourself or share it with others. That way, others can transfer ether to you. ',
HELP_2a_Desc_3 : 'Save versions of the private key. Do not share it with anyone else. Your private key is necessary when you want to access your Ether to send it! There are 3 types of private keys: ',
HELP_2a_Desc_4 : 'Place your address, versions of the private key, and the PDF version of your paper wallet in a folder. Save this on your computer and a USB drive. ',
HELP_2a_Desc_5 : 'Print the wallet if you have a printer. Otherwise, write down your private key and address on a piece of paper. Store this as a secure location, separate from your computer and the USB drive. ',
HELP_2a_Desc_6 : 'Keep in mind, you must prevent loss of the keys and password due to loss or failure of you hard drive failure, or USB drive, or piece of paper. You also must keep in mind physical loss / damage of an entire area (think fire or flood). ',
HELP_2b_Title : '2b) How do I safely / offline / cold storage with MyEtherWallet? ',
HELP_2b_Desc_1 : 'Go to [https://github.com/kvhnuke/etherwallet/releases/latest](https://github.com/kvhnuke/etherwallet/releases/latest). ',
HELP_2b_Desc_2 : 'Click on `etherwallet-vX.X.X.X.zip`. ',
HELP_2b_Desc_3 : 'Move zip to an airgapped computer. ',
HELP_2b_Desc_4 : 'Unzip it and double-click `index.html`. ',
HELP_2b_Desc_5 : 'Generate a wallet with a strong password. ',
HELP_2b_Desc_6 : 'Save the address. Save versions of the private key. Save the password if you might not remember it forever. ',
HELP_2b_Desc_7 : 'Store these papers / USBs in multiple physically separate locations. ',
HELP_2b_Desc_8 : 'Go to the "View Wallet Info" page and type in your private key / password to ensure they are correct and access your wallet. Check that the address you wrote down is the same. ',
HELP_3_Title : '3) How do I verify I have access to my new wallet? ',
HELP_3_Desc_1 : '**Before you send any Ether to your new wallet**, you should ensure you have access to it. ',
HELP_3_Desc_2 : 'Navigate to the "View Wallet Info" page. ',
HELP_3_Desc_3 : 'Navigate to the MyEtherWallet.com "View Wallet Info" page. ',
HELP_3_Desc_4 : 'Select your wallet file -or- your private key and unlock your wallet. ',
HELP_3_Desc_5 : 'If the wallet is encrypted, a text box will automatically appear. Enter the password. ',
HELP_3_Desc_6 : 'Click the "Unlock Wallet" button. ',
HELP_3_Desc_7 : 'Your wallet information should show up. Find your account address, next to a colorful, circular icon. This icon visually represents your address. Be certain that the address is the address you have saved to your text document and is on your paper wallet. ',
HELP_3_Desc_8 : 'If you are planning on holding a large amount of ether, we recommend that send a small amount of ether from new wallet before depositting a large amount. Send 0.001 ether to your new wallet, access that wallet, send that 0.001 ether to another address, and ensure everything works smoothly. ',
HELP_4_Title : '4) How do I send Ether from one wallet to another? ',
HELP_4_Desc_1 : 'If you plan to move a large amount of ether, you should test sending a small amount to your wallet first to ensure everything goes as planned. ',
HELP_4_Desc_2 : 'Navigate to the "Ether ve Tokens göndermek" page. ',
HELP_4_Desc_3 : 'Select your wallet file -or- your private key and unlock your wallet. ',
HELP_4_Desc_4 : 'If the wallet is encrypted, a text box will automatically appear. Enter the password. ',
HELP_4_Desc_5 : 'Click the "Unlock Wallet" button. ',
HELP_4_Desc_6 : 'Enter the address you would like to send to in the "To Address:" field. ',
HELP_4_Desc_7 : 'Enter the amount you would like to send. You can also click the "Send Entire Balance" link if you would like the transfer the entire balance. ',
HELP_4_Desc_9 : 'Click "Generate Transaction". ',
HELP_4_Desc_10 : 'A couple more fields will appear. This is your browser generating the transaction. ',
HELP_4_Desc_11 : 'Click the blue "Send Transaction" button below that. ',
HELP_4_Desc_12 : 'A pop-up will appear. Verify that the amount and the address you are sending to are correct. Then click "Yes, I am sure! Make transaction." button. ',
HELP_4_Desc_13 : 'The transaction will be submitted. The TX Hash will display. You can click that TX Hash to see it on the blockchain. ',
HELP_4CX_Title : '4) How do I send Ether using MyEtherWallet CX? ',
HELP_4CX_Desc_1 : 'First, you need to add a wallet. Once you have done that, you have 2 options: the "QuickSend" functionality from the Chrome Extension icon or the "Send Ether" page. ',
HELP_4CX_Desc_2 : 'QuickSend: ',
HELP_4CX_Desc_3 : 'Click the Chrome Extension Icon. ',
HELP_4CX_Desc_4 : 'Click the "QuickSend" button. ',
HELP_4CX_Desc_5 : 'Select the wallet you wish to send from. ',
HELP_4CX_Desc_6 : 'Enter the address you would like to send to in the "To Address:" field. ',
HELP_4CX_Desc_7 : 'Enter the amount you would like to send. You can also click the "Send Entire Balance" link if you would like the transfer the entire balance. ',
HELP_4CX_Desc_8 : 'Click "Send Transaction". ',
HELP_4CX_Desc_9 : 'Verify the address and the amount you are sending is correct. ',
HELP_4CX_Desc_10 : 'Enter the password for that wallet. ',
HELP_4CX_Desc_11 : 'Click "Send Transaction." ',
HELP_4CX_Desc_12 : 'Using "Send Ether" Page ',
HELP_5_Title : '5) How do I run MyEtherWallet.com offline/locally? ',
HELP_5_Desc_1 : 'You can run MyEtherWallet.com on your computer instead of from the GitHub servers. You can generatea a wallet completely offline and send transactions from the "Offline Transaction" page. ',
HELP_5_Desc_7 : 'MyEtherWallet.com is now running entirely on your computer. ',
HELP_5_Desc_8 : 'In case you are not familiar, you need to keep the entire folder in order to run the website, not just `index.html`. Don\'t touch or move anything around in the folder. If you are storing a backup of the MyEtherWallet repo for the future, we recommend just storing the ZIP so you can be sure the folder contents stay intact. ',
HELP_5_Desc_9 : 'As we are constantly updating MyEtherWallet.com, we recommend you periodically update your saved version of the repo. ',
HELP_5CX_Title : '5) How can I install this extension from the repo instead of the Chrome Store? ',
HELP_5CX_Desc_2 : 'Click on `chrome-extension-vX.X.X.X.zip` and unzip it. ',
HELP_5CX_Desc_3 : 'Go to Google Chrome and find you settings (in the menu in the upper right). ',
HELP_5CX_Desc_4 : 'Click "Extensions" on the left. ',
HELP_5CX_Desc_5 : 'Check the "Developer Mode" button at the top of that page. ',
HELP_5CX_Desc_6 : 'Click the "Load unpacked extension..." button. ',
HELP_5CX_Desc_7 : 'Navigate to the now-unzipped folder that you downloaded earlier. Click "select". ',
HELP_5CX_Desc_8 : 'The extension should now show up in your extensions and in your Chrome Extension bar. ',
HELP_7_Title : '7) How do I send Tokens & add custom tokens? ',
HELP_7_Desc_0 : '[Ethplorer.io](https://ethplorer.io/) is a great way to explore tokens and find the decimals of a token. ',
HELP_7_Desc_1 : 'Navigate to the "Ether ve Tokens göndermeks" page. ',
HELP_7_Desc_2 : 'Unlock your wallet. ',
HELP_7_Desc_3 : 'Enter the address you would like to send to in the "To Address:" field. ',
HELP_7_Desc_4 : 'Enter the amount you would like to send. ',
HELP_7_Desc_5 : 'Select which token you would like to send. ',
HELP_7_Desc_6 : 'If you do not see the token listed: ',
HELP_7_Desc_7 : 'Click "Custom". ',
HELP_7_Desc_8 : 'Enter the address, name, and decimals of the token. These are provided by the developers of the token and are also needed when you "Add a Watch Token" to Mist. ',
HELP_7_Desc_9 : 'Click "Save". ',
HELP_7_Desc_10 : 'You can now send that token as well as see it\'s balance in the sidebar. ',
HELP_7_Desc_11 : 'Click "Generate Transaction". ',
HELP_7_Desc_12 : 'A couple more fields will appear. This is your browser generating the transaction. ',
HELP_7_Desc_13 : 'Click the blue "Send Transaction" button below that. ',
HELP_7_Desc_14 : 'A pop-up will appear. Verify that the amount and the address you are sending to are correct. Then click "Yes, I am sure! Make transaction." button. ',
HELP_7_Desc_15 : 'The transaction will be submitted. The TX Hash will display. You can click that TX Hash to see it on the blockchain. ',
HELP_8_Title : '8) What happens if your site goes down? ',
HELP_8_Desc_1 : 'MyEtherWallet is not a web wallet. You don\'t have a login and nothing ever gets saved to our servers. It is simply an interface that allows you interact with the blockchain. ',
HELP_8_Desc_2 : 'If MyEtherWallet.com goes down, you would have to find another way (like geth or Ethereum Wallet / Mist) to do what we are doing. But you wouldn\'t have to "get" your Ether out of MyEtherWallet because it\'s not in MyEtherWallet. It\'s in whatever wallet your generated via our site. ',
HELP_8_Desc_3 : 'You can import your unencrypted private key and your Geth/Mist Format (encrypted) files directly into geth / Ethereum Wallet / Mist very easily now. See question #12 below. ',
HELP_8_Desc_4 : 'In addition, the likelihood of us taking MyEtherWallet down is slim to none. It costs us almost nothing to maintain as we aren\'t storing any information. If we do take the domain down, it still is, and always will be, publicly available at [https://github.com/kvhnuke/etherwallet](https://github.com/kvhnuke/etherwallet/tree/gh-pages). You can download the ZIP there and run it locally. ',
HELP_8CX_Title : '8) What happens if MyEtherWallet CX disappears? ',
HELP_8CX_Desc_1 : 'First, all data is saved on your computer, not our servers. I know it can be confusing, but when you look at the Chrome Extension, you are NOT looking at stuff saved on our servers somewhere - it\'s all saved on your own computer. ',
HELP_8CX_Desc_2 : 'That said, it is **very important** that you back up all your information for any new wallets generated with MyEtherWallet CX. That way if anything happens to MyEtherWallet CX or your computer, you still have all the information necessary to access your Ether. See the #2a for how to back up your wallets. ',
HELP_8CX_Desc_3 : 'If for some reason MyEtherWallet CX disappears from the Chrome Store, you can find the source on Github and load it manually. See #5 above. ',
HELP_9_Title : '9) Is the "Send Ether" page offline? ',
HELP_9_Desc_1 : 'No. It needs the internet in order to get the current gas price, nonce of your account, and broadcast the transaction (aka "send"). However, it only sends the signed transaction. Your private key safely stays with you. We also now provide an "Offline Transaction" page so that you can ensure your private keys are on an offline/airgapped computer at all times. ',
HELP_10_Title : '10) How do I make an offline transaction? ',
HELP_10_Desc_1 : 'Navigate to the "Offline Transaction" page via your online computer. ',
HELP_10_Desc_2 : 'Enter the "From Address". Please note, this is the address you are sending FROM, not TO. This generates the nonce and gas price. ',
HELP_10_Desc_3 : 'Move to your offline computer. Enter the "TO ADDRESS" and the "AMOUNT" you wish to send. ',
HELP_10_Desc_4 : 'Enter the "GAS PRICE" as it was displayed to you on your online computer in step #1. ',
HELP_10_Desc_5 : 'Enter the "NONCE" as it was displayed to you on your online computer in step #1. ',
HELP_10_Desc_6 : 'The "GAS LIMIT" has a default value of 21000. This will cover a standard transaction. If you are sending to a contract or are including additional data with your transaction, you will need to increase the gas limit. Any excess gas will be returned to you. ',
HELP_10_Desc_7 : 'If you wish, enter some data. If you enter data, you will need to include more than the 21000 default gas limit. All data is in HEX format. ',
HELP_10_Desc_8 : 'Select your wallet file -or- your private key and unlock your wallet. ',
HELP_10_Desc_9 : 'Press the "GENERATE SIGNED TRANSACTION" button. ',
HELP_10_Desc_10 : 'The data field below this button will populate with your signed transaction. Copy this and move it back to your online computer. ',
HELP_10_Desc_11 : 'On your online computer, paste the signed transaction into the text field in step #3 and click "SEND Ether". This will broadcast your transaction. ',
HELP_12_Title : '12) How do I import a wallet created with MyEtherWallet into geth / Ethereum Wallet / Mist? ',
HELP_12_Desc_1 : 'Using an Geth/Mist JSON file from MyEtherWallet v2+.... ',
HELP_12_Desc_2 : 'Go to the "View Wallet Info" page. ',
HELP_12_Desc_3 : 'Unlock your wallet using your **encrypted** private key or JSON file. ',
HELP_12_Desc_4 : 'Go to the "My Wallets" page. ',
HELP_12_Desc_5 : 'Select the wallet you want to import into Mist, click the "View" icon, enter your password, and access your wallet. ',
HELP_12_Desc_6 : 'Find the "Download JSON file - Geth/Mist Format (encrypted)" section. Press the "Download" button below that. You now have your keystore file. ',
HELP_12_Desc_7 : 'Open the Ethereum Wallet application. ',
HELP_12_Desc_8 : 'In the menu bar, go "Accounts" -> "Backup" -> "Accounts" ',
HELP_12_Desc_9 : 'This will open your keystore folder. Copy the file you just downloaded (`UTC--2016-04-14......../`) into that keystore folder. ',
HELP_12_Desc_10 : 'Your account should show up immediately under "Accounts." ',
HELP_12_Desc_11 : 'Using your unencrypted private key... ',
HELP_12_Desc_12 : 'If you do not already have your unencrypted private key, navigate to the "View Wallet Details" page. ',
HELP_12_Desc_13 : 'Select your wallet file -or- enter/paste your private key to unlock your wallet. ',
HELP_12_Desc_14 : 'Copy Your Private Key (unencrypted). ',
HELP_12_Desc_15 : 'If you are on a Mac: ',
HELP_12_Desc_15b : 'If you are on a PC: ',
HELP_12_Desc_16 : 'Open Text Edit and paste this private key. ',
HELP_12_Desc_17 : 'Go to the menu bar and click "Format" -> "Make Plain Text". ',
HELP_12_Desc_18 : 'Save this file to your `desktop/` as `nothing_special_delete_me.txt`. Make sure it says "UTF-8" and "If no extension is provided use .txt" in the save dialog. ',
HELP_12_Desc_19 : 'Open terminal and run the following command: `geth account import ~/Desktop/nothing_special_delete_me.txt` ',
HELP_12_Desc_20 : 'This will prompt you to make a new password. This is the password you will use in geth / Ethereum Wallet / Mist whenever you send a transaction, so don\'t forget it. ',
HELP_12_Desc_21 : 'After successful import, delete `nothing_special_delete_me.txt` ',
HELP_12_Desc_22 : 'The next time you open the Ethereum Wallet application, your account will be listed under "Accounts". ',
HELP_12_Desc_23 : 'Open Notepad & paste the private key ',
HELP_12_Desc_24 : 'Save the file as `nothing_special_delete_me.txt` at `C:` ',
HELP_12_Desc_25 : 'Run the command, `geth account import C:\\nothing_special_delete_me.txt` ',
HELP_12_Desc_26 : 'This will prompt you to make a new password. This is the password you will use in geth / Ethereum Wallet / Mist whenever you send a transaction, so don\'t forget it. ',
HELP_12_Desc_27 : 'After successful import, delete `nothing_special_delete_me.txt` ',
HELP_12_Desc_28 : 'The next time you open the Ethereum Wallet application, your account will be listed under "Accounts". ',
HELP_13_Title : '13) What does "Insufficient funds. Account you try to send transaction from does not have enough funds. Required XXXXXXXXXXXXXXXXXXX and got: XXXXXXXXXXXXXXXX." Mean? ',
HELP_13_Desc_1 : 'This means you do not have enough Ether in your account to cover the cost of gas. Each transaction (including token and contract transactions) require gas and that gas is paid in Ether. The number displayed is the amount required to cover the cost of the transaction in Wei. Take that number, divide by `1000000000000000000`, and subtract the amount of Ether you were trying to send (if you were attempting to send Ether). This will give you the amount of Ether you need to send to that account to make the transaction. ',
HELP_14_Title : '14) Some sites randomize (seed) the private key generation via mouse movements. MyEtherWallet.com doesn\'t do this. Is the random number generation for MyEtherWallet safe? ',
HELP_14_Desc_1 : 'While the mouse moving thing is clever and we understand why people like it, the reality is window.crypto ensures more entropy than your mouse movements. The mouse movements aren\'t unsafe, it\'s just that we (and tons of other crypto experments) believe in window.crypto. In addition, MyEtherWallet.com can be used on touch devices. Here\'s a [conversation between an angry redditor and Vitalik Buterin regarding mouse movements v. window.crypto](https://www.reddit.com/r/ethereum/comments/2bilqg/note_there_is_a_paranoid_highsecurity_way_to/cj5sgrm) and here is the [the window.crypto w3 spec](https://dvcs.w3.org/hg/webcrypto-api/raw-file/tip/spec/Overview.html#dfn-GlobalCrypto). ',
HELP_15_Title : '15) Why hasn\'t the account I just created show up in the blockchain explorer? (ie: etherchain, etherscan) ',
HELP_15_Desc_1 : 'Accounts will only show up in a blockchain explorer once the account has activity on it—for example, once you have transferred some Ether to it. ',
HELP_16_Title : '16) How do I check the balance of my account? ',
HELP_16_Desc_1 : 'You can use a blockchain explorer like [etherscan.io](https://etherscan.io/). Paste your address into the search bar and it will pull up your account and transaction history. For example, here\'s what our [donation account](https://etherscan.io/address/0xDECAF9CD2367cdbb726E904cD6397eDFcAe6068D) looks like on etherscan.io ',
HELP_17_Title : '17) Why isn\'t my balance showing up when I unlock my wallet? ',
HELP_17_Desc_1 : 'This is most likely due to the fact that you are behind a firewall. The API that we use to get the balance and convert said balance is often blocked by firewalls for whatever reason. You will still be able to send transactions, you just need to use a different method to see said balance, like etherscan.io ',
HELP_18_Title : '18) Where is my geth wallet file? ',
HELP_19_Title : '19) Where is my Mist wallet file? ',
HELP_19_Desc_1 : 'Mist files are typically found in the file locations above, but it\'s much easier to open Mist, select "Accounts" in the top bar, select "Backup", and select "Accounts". This will open the folder where your files are stored. ',
HELP_20_Title : '20) Where is my pre-sale wallet file? ',
HELP_20_Desc_1 : 'Wherever you saved it. ;) It also was emailed to you, so check there. Look for the file called `"ethereum_wallet_backup.json"` and select that file. This wallet file will be encrypted with a password that you created during the purchase of the pre-sale. ',
HELP_21_Title : '21) Couldn\'t everybody put in random private keys, look for a balance, and send to their own account? ',
HELP_21_Desc_1 : 'Short version: yes, but finding an account with a balance would take longer than the universe...so...no. ',
HELP_21_Desc_2 : 'Long ELI5 Version: So Ethereum is based on [Public Key Cryptography](https://en.wikipedia.org/wiki/Public-key_cryptography), specifically [Elliptic curve cryptography](https://eprint.iacr.org/2013/734.pdf) which is very widely used, not just in Ethereum. Most servers are protected via ECC. Bitcoin uses the same, as well as SSH and TLS and a lot of other stuff. The Ethereum keys specifically are 256-bit keys, which are stronger than 128-bit and 192-bit, which are also widely used and still considered secure by experts. ',
HELP_21_Desc_3 : 'In this you have a private key and a public key. The private key can derive the public key, but the public key cannot be turned back into the private key. The fact that the internet and the world’s secrets are using this cryptography means that if there is a way to go from public key to private key, your lost ether is the least of everyone’s problems. ',
HELP_21_Desc_4 : 'Now, that said, YES if someone else has your private key then they can indeed send ether from your account. Just like if someone has your password to your email, they can read and send your email, or the password to your bank account, they could make transfers. You could download the Keystore version of your private key which is the private key that is encrypted with a password. This is like having a password that is also protected by another password. ',
HELP_21_Desc_5 : 'And YES, in theory you could just type in a string of 64 hexadecimal characters until you got one that matched. In fact, smart people could write a program to very quickly check random private keys. This is known as "brute-forcing" or "mining" private keys. People have thought about this long and hard. With a few very high end servers, they may be able to check 1M+ keys / second. However, even checking that many per second would not yield access to make the cost of running those servers even close to worthwhile - it is more likely you, and your great-grandchildren, will die before getting a match. ',
HELP_21_Desc_6 : 'If you know anything about Bitcoin, [this will put it in perspective:](http://bitcoin.stackexchange.com/questions/32331/two-people-with-same-public-address-how-will-people-network-know-how-to-deliver) *To illustrate how unlikely this is: suppose every satoshi of every bitcoin ever to be generated was sent to its own unique private keys. The probability that among those keys there could be two that would correspond to the same address is roughly one in 100 quintillion. ',
HELP_21_Desc_7 : '[If you want something a bit more technical:](http://security.stackexchange.com/questions/25375/why-not-use-larger-cipher-keys/25392#25392) *These numbers have nothing to do with the technology of the devices; they are the maximums that thermodynamics will allow. And they strongly imply that brute-force attacks against 256-bit keys will be infeasible until computers are built from something other than matter and occupy something other than space. ',
HELP_21_Desc_8 : 'Of course, this all assumes that keys are generated in a truly random way & with sufficient entropy. The keys generated here meet that criteria, as do Jaxx and Mist/geth. The Ethereum wallets are all pretty good. Keys generated by brainwallets do not, as a person\'s brain is not capable of creating a truly random seed. There have been a number of other issues regarding lack of entropy or seeds not being generated in a truly random way in Bitcoin-land, but that\'s a separate issue that can wait for another day. ',
HELP_SecCX_Title : 'Güvenlik - MyEtherWallet CX ',
HELP_SecCX_Desc_1 : 'Where is this extension saving my information? ',
HELP_SecCX_Desc_2 : 'The information you store in this Chrome Extension is saved via [chrome.storage](http://chrome.storage/). - this is the same place your passwords are saved when you save your password in Chrome. ',
HELP_SecCX_Desc_3 : 'What information is saved? ',
HELP_SecCX_Desc_4 : 'The address, nickname, private key is stored in chrome.storage. The private key is encrypted using the password you set when you added the wallet. The nickname and wallet address is not encrypted. ',
HELP_SecCX_Desc_5 : 'Why aren\'t the nickname and wallet address encrypted? ',
HELP_SecCX_Desc_6 : 'If we were to encrypt these items, you would need to enter a password each time you wanted to view your account balance or view the nicknames. If this concerns you, we recommend you use MyEtherWallet.com instead of this Chrome Extension. ',
HELP_Sec_Title : 'Güvenlik ',
HELP_Sec_Desc_1 : 'If one of your first questions is "Why should I trust these people?", that is a good thing. Hopefully the following will help ease your fears. ',
HELP_Sec_Desc_2 : 'We\'ve been up and running since August 2015. If you search for ["myetherwallet" on reddit](https://www.reddit.com/search?q=myetherwallet), you can see numerous people who use us with great success. ',
HELP_Sec_Desc_3 : 'We aren\'t going to take your money or steal your private key(s). There is no malicious code on this site. In fact the "GENERATE WALLET" pages are completely client-side. That means that all the code is executed on ** your computer** and it is never saved and transmitted anywhere. ',
HELP_Sec_Desc_4 : 'Check the URL -- This site is being served through GitHub and you can see the source code here: [https://github.com/kvhnuke/etherwallet/tree/gh-pages](https://github.com/kvhnuke/etherwallet/tree/gh-pages) to [https://www.myetherwallet.com](https://www.myetherwallet.com). ',
HELP_Sec_Desc_5 : 'For generating wallets, you can download the [source code and run it locally](https://github.com/kvhnuke/etherwallet/releases/latest). See #5 above. ',
HELP_Sec_Desc_6 : 'Generate a test wallet and check and see what network activity is happening. The easiest way for you to do this is to right click on the page and click "inspect element". Go to the "Network" tab. Generate a test wallet. You will see there is no network activity. You may see something happening that looks like data:image/gif and data:image/png. Those are the QR codes being generated...on your computer...by your computer. No bytes were transferred. ',
HELP_Sec_Desc_8 : 'If you do not feel comfortable using this tool, then by all means, do not use it. We created this tool as a helpful way for people to generate wallets and make transactions without needing to dive into command line or run a full node. Again, feel free to reach out if you have concerns and we will respond as quickly as possible. Thanks! ',
HELP_FAQ_Title : 'Sık Sorulan Sorulara DaWays to Get in Touchha Yararlı Yanıtlar ',
HELP_Contact_Title : 'İletişim Kurma Yolları'
};
module.exports = tr;
|
#!/usr/bin/env python
#
#===- clang-format-diff.py - ClangFormat Diff Reformatter ----*- python -*--===#
#
# The LLVM Compiler Infrastructure
#
# This file is distributed under the University of Illinois Open Source
# License. See LICENSE.TXT for details.
#
#===------------------------------------------------------------------------===#
r"""
ClangFormat Diff Reformatter
============================
This script reads input from a unified diff and reformats all the changed
lines. This is useful to reformat all the lines touched by a specific patch.
Example usage for git/svn users:
git diff -U0 HEAD^ | clang-format-diff.py -p1 -i
svn diff --diff-cmd=diff -x-U0 | clang-format-diff.py -i
"""
import argparse
import difflib
import re
import string
import subprocess
import sys
import io
def main():
parser = argparse.ArgumentParser(description=
'Reformat changed lines in diff. Without -i '
'option just output the diff that would be '
'introduced.')
parser.add_argument('-i', action='store_true', default=False,
help='apply edits to files instead of displaying a diff')
parser.add_argument('-p', metavar='NUM', default=0,
help='strip the smallest prefix containing P slashes')
parser.add_argument('-regex', metavar='PATTERN', default=None,
help='custom pattern selecting file paths to reformat '
'(case sensitive, overrides -iregex)')
parser.add_argument('-iregex', metavar='PATTERN', default=
r'.*\.(cpp|cc|c\+\+|cxx|c|cl|h|hpp|m|mm|inc|js|ts|proto'
r'|protodevel|java)',
help='custom pattern selecting file paths to reformat '
'(case insensitive, overridden by -regex)')
parser.add_argument('-sort-includes', action='store_true', default=False,
help='let clang-format sort include blocks')
parser.add_argument('-v', '--verbose', action='store_true',
help='be more verbose, ineffective without -i')
parser.add_argument('-style',
help='formatting style to apply (LLVM, Google, Chromium, '
'Mozilla, WebKit)')
parser.add_argument('-binary', default='clang-format',
help='location of binary to use for clang-format')
args = parser.parse_args()
# Extract changed lines for each file.
filename = None
lines_by_file = {}
for line in sys.stdin:
match = re.search('^\+\+\+\ (.*?/){%s}(\S*)' % args.p, line)
if match:
filename = match.group(2)
if filename == None:
continue
if args.regex is not None:
if not re.match('^%s$' % args.regex, filename):
continue
else:
if not re.match('^%s$' % args.iregex, filename, re.IGNORECASE):
continue
match = re.search('^@@.*\+(\d+)(,(\d+))?', line)
if match:
start_line = int(match.group(1))
line_count = 1
if match.group(3):
line_count = int(match.group(3))
if line_count == 0:
continue
end_line = start_line + line_count - 1;
lines_by_file.setdefault(filename, []).extend(
['-lines', str(start_line) + ':' + str(end_line)])
# Reformat files containing changes in place.
for filename, lines in lines_by_file.items():
if args.i and args.verbose:
print('Formatting', filename)
command = [args.binary, filename]
print(filename.split('/', 1)[1])
if args.i:
command.append('-i')
if args.sort_includes:
command.append('-sort-includes')
command.extend(lines)
if args.style:
command.extend(['-style', args.style])
p = subprocess.Popen(command, stdout=subprocess.PIPE,
stderr=None, stdin=subprocess.PIPE)
stdout, stderr = p.communicate()
if p.returncode != 0:
sys.exit(p.returncode);
# Removed this code as it does not concern us and also I coulnd't get it to run under python3 :)
if __name__ == '__main__':
main()
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyFenicsDolfinx(PythonPackage):
"""Python interface library to Next generation FEniCS problem solving
environment"""
homepage = "https://github.com/FEniCS/dolfinx"
url = "https://github.com/FEniCS/dolfinx/archive/v0.1.0.tar.gz"
git = "https://github.com/FEniCS/dolfinx.git"
maintainers = ["js947", "chrisrichardson", "garth-wells"]
version("main", branch="main")
version("0.3.0", sha256="4857d0fcb44a4e9bf9eb298ba5377abdee17a7ad0327448bdd06cce73d109bed")
version("0.2.0", sha256="4c9b5a5c7ef33882c99299c9b4d98469fb7aa470a37a91bc5be3bb2fc5b863a4")
version("0.1.0", sha256="0269379769b5b6d4d1864ded64402ecaea08054c2a5793c8685ea15a59af5e33")
depends_on("cmake@3.18:", type="build")
depends_on("hdf5", type="build")
depends_on("pkgconfig", type=("build", "run"))
depends_on('python@3.7:', type=('build', 'run'))
depends_on("py-setuptools", type="build")
depends_on("fenics-dolfinx@main", when="@main")
depends_on("fenics-dolfinx@0.3.0", when="@0.3.0")
depends_on("fenics-dolfinx@0.2.0", when="@0.2.0")
depends_on("fenics-dolfinx@0.1.0", when="@0.1.0")
depends_on("fenics-basix@main", type=("build", "link"), when="@main")
depends_on("fenics-basix@0.3.0", type=("build", "link"), when="@0.3.0")
depends_on("fenics-basix@0.2.0", type=("build", "link"), when="@0.2.0")
depends_on("fenics-basix@0.1.0", type=("build", "link"), when="@0.1.0")
depends_on("py-numpy@:1.20.3", type=("build", "run"))
depends_on("py-mpi4py", type=("build", "run"))
depends_on("py-petsc4py", type=("build", "run"))
depends_on("py-pybind11@2.6.2:2.7.99", type=("build", "run"))
depends_on("xtensor@0.23.10:", type="build")
depends_on("py-fenics-ffcx@main", type="run", when="@main")
depends_on("py-fenics-ffcx@0.3.0", type="run", when="@0.3.0")
depends_on("py-fenics-ffcx@0.2.0", type="run", when="@0.2.0")
depends_on("py-fenics-ffcx@0.1.0", type="run", when="@0.1.0")
depends_on("py-fenics-ufl@main", type="run", when="@main")
depends_on("py-fenics-ufl@2021.1.0", type="run", when="@0.1:")
depends_on("py-cffi", type="run")
depends_on("py-numpy", type="run")
phases = ['build_ext', 'build', 'install']
build_directory = 'python'
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
DATA PREPARATION FOR THE PRACTICAL WORK OF UNSUPERVISED CLASSIFICATION WITH METEONET DATA.
@creationdate : 08/07/2021
@author : Thomas Rieutord (thomas.rieutord@meteo.fr)
"""
import os
import sys
import pandas as pd
import numpy as np
import datetime as dt
mtoparams = ["t","ff","precip"]
predictors = ["Tn","Tx","FFmax","RR"]
input_file = os.path.join("no_save", "SE2018.csv")
output_file = os.path.join("..", "data", "obs_SE_2018.csv")
# OBSERVATIONS
#==============
print("Reading observation data...")
# Read data
#-----------
df = pd.read_csv(input_file,parse_dates=[4],infer_datetime_format=True)
d0 = df['date'][0]
ndaysbatch = 1
datebins = [(d0 + k*dt.timedelta(days=ndaysbatch)) for k in range(int(365/ndaysbatch) + 1)]
stations = np.unique(df['number_sta'])
n_sta = np.unique(df['number_sta']).size
# Filter missing data
#---------------------
toolbar_width = int(n_sta/50) + 1
sys.stdout.write(
"\nFilter missing data : [%s]" % ("." * toolbar_width)
)
sys.stdout.flush()
sys.stdout.write("\b" * (toolbar_width + 1))
valid_stations = []
valid_lat = []
valid_lon = []
i=0
for sta in stations:
if np.mod(i, 50) == 0:
sys.stdout.write("*")
sys.stdout.flush()
idx = df["number_sta"]==sta
lat, lon = df.loc[idx,['lat','lon']].values[0]
df_sta = df.loc[idx,['date']+mtoparams]
date1j = np.digitize(
[d.timestamp() for d in df_sta['date']],
[d.timestamp() for d in datebins]
)
data = df_sta.loc[:,mtoparams].groupby(date1j).mean().values.ravel()
if np.isnan(data).sum()==0:
valid_stations.append(sta)
valid_lat.append(lat)
valid_lon.append(lon)
i+=1
sys.stdout.write("]\n")
N = len(valid_stations)
p = len(predictors)
print(N, "valid stations (",N/n_sta*100,"%)")
# Extract relevant data
#-----------------------
toolbar_width = int(N/50) + 1
sys.stdout.write(
"\nExtract relevant data : [%s]" % ("." * toolbar_width)
)
sys.stdout.flush()
sys.stdout.write("\b" * (toolbar_width + 1))
X = np.zeros((N,p))
i=0
for sta in valid_stations:
if np.mod(i, 50) == 0:
sys.stdout.write("*")
sys.stdout.flush()
idx = df["number_sta"]==sta
df_sta = df.loc[idx,['date']+mtoparams]
date1j = np.digitize(
[d.timestamp() for d in df_sta['date']],
[d.timestamp() for d in datebins]
)
for j in range(len(predictors)):
pred = predictors[j]
if pred=="Tn":
X[i,j] = df_sta.loc[:,"t"].groupby(date1j).min().mean()
elif pred=="Tx":
X[i,j] = df_sta.loc[:,"t"].groupby(date1j).max().mean()
elif pred=="FFmax":
X[i,j] = df_sta.loc[:,"ff"].groupby(date1j).max().mean()
elif pred=="RR":
X[i,j] = df_sta.loc[:,"precip"].sum()
else:
raise ValueError("Invalid predictor alias")
i+=1
sys.stdout.write("]\n")
# Save data in a csv file
#-------------------------
df_obs = pd.DataFrame(index=valid_stations, columns=predictors, data=X)
df_obs['lat'] = valid_lat
df_obs['lon'] = valid_lon
if df_obs.isna().sum().sum() != 0:
raise ValueError("Remaing NaN in the dataset")
df_obs.to_csv(output_file)
print("Data from observations saved in ", output_file)
|
import json
import os
import re
import textwrap
import unittest
from conans.model.ref import ConanFileReference
from conans.paths import CONANFILE
from conans.test.utils.cpp_test_files import cpp_hello_conan_files
from conans.test.utils.tools import TestClient, GenConanfile
from conans.util.files import load, save
class InfoTest(unittest.TestCase):
def not_found_package_dirty_cache_test(self):
# Conan does a lock on the cache, and even if the package doesn't exist
# left a trailing folder with the filelocks. This test checks
# it will be cleared
client = TestClient()
client.run("info nothing/0.1@user/testing", assert_error=True)
self.assertEqual(os.listdir(client.cache.store), [])
# This used to fail in Windows, because of the different case
client.save({"conanfile.py": GenConanfile().with_name("Nothing").with_version("0.1")})
client.run("export . user/testing")
def failed_info_test(self):
client = TestClient()
client.save({"conanfile.py": GenConanfile().with_require_plain("Pkg/1.0.x@user/testing")})
client.run("info .", assert_error=True)
self.assertIn("Pkg/1.0.x@user/testing: Not found in local cache", client.out)
client.run("search")
self.assertIn("There are no packages", client.out)
self.assertNotIn("Pkg/1.0.x@user/testing", client.out)
def _create(self, number, version, deps=None, deps_dev=None, export=True):
files = cpp_hello_conan_files(number, version, deps, build=False)
files[CONANFILE] = files[CONANFILE].replace("config(", "configure(")
if deps_dev:
files[CONANFILE] = files[CONANFILE].replace("exports = '*'", """exports = '*'
dev_requires=%s
""" % ",".join('"%s"' % d for d in deps_dev))
self.client.save(files, clean_first=True)
if export:
self.client.run("export . lasote/stable")
expected_output = textwrap.dedent(
"""\
[HOOK - attribute_checker.py] pre_export(): WARN: Conanfile doesn't have 'url'. It is recommended to add it as attribute
[HOOK - attribute_checker.py] pre_export(): WARN: Conanfile doesn't have 'license'. It is recommended to add it as attribute
[HOOK - attribute_checker.py] pre_export(): WARN: Conanfile doesn't have 'description'. It is recommended to add it as attribute
""")
self.assertIn(expected_output, self.client.out)
if number != "Hello2":
files[CONANFILE] = files[CONANFILE].replace('version = "0.1"',
'version = "0.1"\n'
' url= "myurl"\n'
' license = "MIT"\n'
' description = "blah"')
else:
files[CONANFILE] = files[CONANFILE].replace('version = "0.1"',
'version = "0.1"\n'
' url= "myurl"\n'
' license = "MIT", "GPL"\n'
' description = """Yo no creo en brujas,\n'
' pero que las hay,\n'
' las hay"""')
self.client.save(files)
if export:
self.client.run("export . lasote/stable")
self.assertNotIn("WARN: Conanfile doesn't have 'url'", self.client.out)
def install_folder_test(self):
conanfile = GenConanfile("Pkg", "0.1").with_setting("build_type")
client = TestClient()
client.save({"conanfile.py": conanfile})
client.run("info . -s build_type=Debug")
self.assertNotIn("ID: 4024617540c4f240a6a5e8911b0de9ef38a11a72", client.out)
self.assertIn("ID: 5a67a79dbc25fd0fa149a0eb7a20715189a0d988", client.out)
client.run('install . -s build_type=Debug')
client.run("info .") # Re-uses debug from curdir
self.assertNotIn("ID: 4024617540c4f240a6a5e8911b0de9ef38a11a72", client.out)
self.assertIn("ID: 5a67a79dbc25fd0fa149a0eb7a20715189a0d988", client.out)
client.run('install . -s build_type=Release --install-folder=MyInstall')
client.run("info . --install-folder=MyInstall") # Re-uses debug from MyInstall folder
self.assertIn("ID: 4024617540c4f240a6a5e8911b0de9ef38a11a72", client.out)
self.assertNotIn("ID: 5a67a79dbc25fd0fa149a0eb7a20715189a0d988", client.out)
client.run('install . -s build_type=Debug --install-folder=MyInstall')
client.run("info . --install-folder=MyInstall") # Re-uses debug from MyInstall folder
self.assertNotIn("ID: 4024617540c4f240a6a5e8911b0de9ef38a11a72", client.out)
self.assertIn("ID: 5a67a79dbc25fd0fa149a0eb7a20715189a0d988", client.out)
# Both should raise
client.run("info . --install-folder=MyInstall -s build_type=Release",
assert_error=True) # Re-uses debug from MyInstall folder
self.assertIn("--install-folder cannot be used together with a"
" host profile (-s, -o, -e or -pr)", client.out)
def graph_test(self):
self.client = TestClient()
test_deps = {
"Hello0": ["Hello1", "Hello2", "Hello3"],
"Hello1": ["Hello4"],
"Hello2": [],
"Hello3": ["Hello7"],
"Hello4": ["Hello5", "Hello6"],
"Hello5": [],
"Hello6": [],
"Hello7": ["Hello8"],
"Hello8": ["Hello9", "Hello10"],
"Hello9": [],
"Hello10": [],
}
def create_export(test_deps, name):
deps = test_deps[name]
for dep in deps:
create_export(test_deps, dep)
expanded_deps = ["%s/0.1@lasote/stable" % dep for dep in deps]
export = False if name == "Hello0" else True
self._create(name, "0.1", expanded_deps, export=export)
def check_ref(ref):
self.assertEqual(ref.version, "0.1")
self.assertEqual(ref.user, "lasote")
self.assertEqual(ref.channel, "stable")
def check_digraph_line(line):
self.assertTrue(dot_regex.match(line))
node_matches = node_regex.findall(line)
parent_reference = node_matches[0]
deps_ref = [ConanFileReference.loads(references) for references in node_matches[1:]]
if parent_reference == "conanfile.py (Hello0/0.1)":
parent_ref = ConanFileReference("Hello0", None, None, None, validate=False)
else:
parent_ref = ConanFileReference.loads(parent_reference)
check_ref(parent_ref)
for dep in deps_ref:
check_ref(dep)
self.assertIn(dep.name, test_deps[parent_ref.name])
def check_file(filename):
with open(filename) as dot_file_contents:
lines = dot_file_contents.readlines()
self.assertEqual(lines[0], "digraph {\n")
for line in lines[1:-1]:
check_digraph_line(line)
self.assertEqual(lines[-1], "}\n")
create_export(test_deps, "Hello0")
node_regex = re.compile(r'"([^"]+)"')
dot_regex = re.compile(r'^\s+"[^"]+" -> "[^"]+"\s+$')
self.client.run("info . --graph", assert_error=True)
# arbitrary case - file will be named according to argument
arg_filename = "test.dot"
self.client.run("info . --graph=%s" % arg_filename)
dot_file = os.path.join(self.client.current_folder, arg_filename)
check_file(dot_file)
def graph_html_test(self):
self.client = TestClient()
test_deps = {
"Hello0": ["Hello1"],
"Hello1": [],
}
def create_export(test_deps, name):
deps = test_deps[name]
for dep in deps:
create_export(test_deps, dep)
expanded_deps = ["%s/0.1@lasote/stable" % dep for dep in deps]
export = False if name == "Hello0" else True
self._create(name, "0.1", expanded_deps, export=export)
create_export(test_deps, "Hello0")
# arbitrary case - file will be named according to argument
arg_filename = "test.html"
self.client.run("info . --graph=%s" % arg_filename)
html = self.client.load(arg_filename)
self.assertIn("<body>", html)
self.assertIn("{ from: 0, to: 1 }", html)
self.assertIn("id: 0,\n label: 'Hello0/0.1',", html)
def graph_html_embedded_visj_test(self):
client = TestClient()
visjs_path = os.path.join(client.cache_folder, "vis.min.js")
viscss_path = os.path.join(client.cache_folder, "vis.min.css")
save(visjs_path, "")
save(viscss_path, "")
client.save({"conanfile.txt": ""})
client.run("info . --graph=file.html")
html = client.load("file.html")
self.assertIn("<body>", html)
self.assertNotIn("cloudflare", html)
self.assertIn(visjs_path, html)
self.assertIn(viscss_path, html)
def info_build_requires_test(self):
client = TestClient()
conanfile = """from conans import ConanFile
class AConan(ConanFile):
pass
"""
client.save({"conanfile.py": conanfile})
client.run("create . tool/0.1@user/channel")
client.run("create . dep/0.1@user/channel")
conanfile = conanfile + 'requires = "dep/0.1@user/channel"'
client.save({"conanfile.py": conanfile})
client.run("export . Pkg/0.1@user/channel")
client.run("export . Pkg2/0.1@user/channel")
client.save({"conanfile.txt": "[requires]\nPkg/0.1@user/channel\nPkg2/0.1@user/channel",
"myprofile": "[build_requires]\ntool/0.1@user/channel"}, clean_first=True)
client.run("info . -pr=myprofile --dry-build=missing")
# Check that there is only 1 output for tool, not repeated many times
pkgs = [line for line in str(client.out).splitlines() if line.startswith("tool")]
self.assertEqual(len(pkgs), 1)
client.run("info . -pr=myprofile --dry-build=missing --graph=file.html")
html = client.load("file.html")
self.assertIn("html", html)
# To check that this node is not duplicated
self.assertEqual(1, html.count("label: 'dep/0.1'"))
self.assertIn("label: 'Pkg2/0.1',\n "
"shape: 'box',\n "
"color: { background: 'Khaki'},", html)
self.assertIn("label: 'Pkg/0.1',\n "
"shape: 'box',\n "
"color: { background: 'Khaki'},", html)
self.assertIn("label: 'tool/0.1',\n "
"shape: 'ellipse',\n "
"color: { background: 'SkyBlue'},", html)
def only_names_test(self):
self.client = TestClient()
self._create("Hello0", "0.1")
self._create("Hello1", "0.1", ["Hello0/0.1@lasote/stable"])
self._create("Hello2", "0.1", ["Hello1/0.1@lasote/stable"], export=False)
self.client.run("info . --only None")
self.assertEqual(["Hello0/0.1@lasote/stable", "Hello1/0.1@lasote/stable",
"conanfile.py (Hello2/0.1)"],
str(self.client.out).splitlines()[-3:])
self.client.run("info . --only=date")
lines = [(line if "date" not in line else "Date")
for line in str(self.client.out).splitlines()]
self.assertEqual(["Hello0/0.1@lasote/stable", "Date",
"Hello1/0.1@lasote/stable", "Date",
"conanfile.py (Hello2/0.1)"], lines)
self.client.run("info . --only=invalid", assert_error=True)
self.assertIn("Invalid --only value", self.client.out)
self.assertNotIn("with --path specified, allowed values:", self.client.out)
self.client.run("info . --paths --only=bad", assert_error=True)
self.assertIn("Invalid --only value", self.client.out)
self.assertIn("with --path specified, allowed values:", self.client.out)
def test_cwd(self):
self.client = TestClient()
conanfile = """from conans import ConanFile
from conans.util.files import load, save
class MyTest(ConanFile):
name = "Pkg"
version = "0.1"
settings = "build_type"
"""
self.client.save({"subfolder/conanfile.py": conanfile})
self.client.run("export ./subfolder lasote/testing")
self.client.run("info ./subfolder")
self.assertIn("conanfile.py (Pkg/0.1)", self.client.out)
self.client.run("info ./subfolder --build-order "
"Pkg/0.1@lasote/testing --json=jsonfile.txt")
path = os.path.join(self.client.current_folder, "jsonfile.txt")
self.assertTrue(os.path.exists(path))
def info_virtual_test(self):
# Checking that "Required by: virtual" doesnt appear in the output
self.client = TestClient()
self._create("Hello", "0.1")
self.client.run("info Hello/0.1@lasote/stable")
self.assertNotIn("virtual", self.client.out)
self.assertNotIn("Required", self.client.out)
def test_reuse(self):
self.client = TestClient()
self._create("Hello0", "0.1")
self._create("Hello1", "0.1", ["Hello0/0.1@lasote/stable"])
self._create("Hello2", "0.1", ["Hello1/0.1@lasote/stable"], export=False)
self.client.run("info . -u")
self.assertIn("Creation date: ", self.client.out)
self.assertIn("ID: ", self.client.out)
self.assertIn("BuildID: ", self.client.out)
expected_output = textwrap.dedent(
"""\
Hello0/0.1@lasote/stable
Remote: None
URL: myurl
License: MIT
Description: blah
Recipe: No remote%s
Binary: Missing
Binary remote: None
Required by:
Hello1/0.1@lasote/stable
Hello1/0.1@lasote/stable
Remote: None
URL: myurl
License: MIT
Description: blah
Recipe: No remote%s
Binary: Missing
Binary remote: None
Required by:
conanfile.py (Hello2/0.1)
Requires:
Hello0/0.1@lasote/stable
conanfile.py (Hello2/0.1)
URL: myurl
Licenses: MIT, GPL
Description: Yo no creo en brujas,
pero que las hay,
las hay
Requires:
Hello1/0.1@lasote/stable""")
expected_output = expected_output % (
"\n Revision: 4dfe7e755ac2ce2b39f1da54151c7636",
"\n Revision: e003760cfa649c4ac4680fec3271b17a") \
if self.client.cache.config.revisions_enabled else expected_output % ("", "")
def clean_output(output):
return "\n".join([line for line in str(output).splitlines()
if not line.strip().startswith("Creation date") and
not line.strip().startswith("ID") and
not line.strip().startswith("BuildID") and
not line.strip().startswith("export_folder") and
not line.strip().startswith("build_folder") and
not line.strip().startswith("source_folder") and
not line.strip().startswith("package_folder")])
# The timestamp is variable so we can't check the equality
self.assertIn(expected_output, clean_output(self.client.out))
self.client.run("info . -u --only=url")
expected_output = textwrap.dedent(
"""\
Hello0/0.1@lasote/stable
URL: myurl
Hello1/0.1@lasote/stable
URL: myurl
conanfile.py (Hello2/0.1)
URL: myurl""")
self.assertIn(expected_output, clean_output(self.client.out))
self.client.run("info . -u --only=url --only=license")
expected_output = textwrap.dedent(
"""\
Hello0/0.1@lasote/stable
URL: myurl
License: MIT
Hello1/0.1@lasote/stable
URL: myurl
License: MIT
conanfile.py (Hello2/0.1)
URL: myurl
Licenses: MIT, GPL""")
self.assertIn(expected_output, clean_output(self.client.out))
self.client.run("info . -u --only=url --only=license --only=description")
expected_output = textwrap.dedent(
"""\
Hello0/0.1@lasote/stable
URL: myurl
License: MIT
Description: blah
Hello1/0.1@lasote/stable
URL: myurl
License: MIT
Description: blah
conanfile.py (Hello2/0.1)
URL: myurl
Licenses: MIT, GPL
Description: Yo no creo en brujas,
pero que las hay,
las hay""")
self.assertIn(expected_output, clean_output(self.client.out))
def test_json_info_outputs(self):
self.client = TestClient()
self._create("LibA", "0.1")
self._create("LibE", "0.1")
self._create("LibF", "0.1")
self._create("LibB", "0.1", ["LibA/0.1@lasote/stable", "LibE/0.1@lasote/stable"])
self._create("LibC", "0.1", ["LibA/0.1@lasote/stable", "LibF/0.1@lasote/stable"])
self._create("LibD", "0.1", ["LibB/0.1@lasote/stable", "LibC/0.1@lasote/stable"],
export=False)
json_file = os.path.join(self.client.current_folder, "output.json")
self.client.run("info . -u --json=\"{}\"".format(json_file))
# Check a couple of values in the generated JSON
content = json.loads(load(json_file))
self.assertEqual(content[0]["reference"], "LibA/0.1@lasote/stable")
self.assertEqual(content[0]["license"][0], "MIT")
self.assertEqual(content[0]["description"], "blah")
self.assertEqual(content[1]["url"], "myurl")
self.assertEqual(content[1]["required_by"][0], "conanfile.py (LibD/0.1)")
def build_order_test(self):
self.client = TestClient()
self._create("Hello0", "0.1")
self._create("Hello1", "0.1", ["Hello0/0.1@lasote/stable"])
self._create("Hello2", "0.1", ["Hello1/0.1@lasote/stable"], export=False)
self.client.run("info ./conanfile.py -bo=Hello0/0.1@lasote/stable")
self.assertIn("[Hello0/0.1@lasote/stable], [Hello1/0.1@lasote/stable]",
self.client.out)
self.client.run("info conanfile.py -bo=Hello1/0.1@lasote/stable")
self.assertIn("[Hello1/0.1@lasote/stable]", self.client.out)
self.client.run("info ./ -bo=Hello1/0.1@lasote/stable -bo=Hello0/0.1@lasote/stable")
self.assertIn("[Hello0/0.1@lasote/stable], [Hello1/0.1@lasote/stable]",
self.client.out)
self.client.run("info Hello1/0.1@lasote/stable -bo=Hello0/0.1@lasote/stable")
self.assertIn("[Hello0/0.1@lasote/stable], [Hello1/0.1@lasote/stable]\n", self.client.out)
self.client.run("info Hello1/0.1@lasote/stable -bo=Hello0/0.1@lasote/stable "
"--json=file.json")
self.assertEqual('{"groups": [["Hello0/0.1@lasote/stable"], ["Hello1/0.1@lasote/stable"]]}',
self.client.load("file.json"))
self.client.run("info Hello1/0.1@lasote/stable -bo=Hello0/0.1@lasote/stable --json")
self.assertIn('{"groups": [["Hello0/0.1@lasote/stable"], ["Hello1/0.1@lasote/stable"]]}',
self.client.out)
self.client.run("info Hello1/0.1@lasote/stable --build-order=Hello0/0.1@lasote/stable "
"--graph=index.html", assert_error=True)
self.assertIn("--build-order cannot be used together with --graph", self.client.out)
def build_order_build_requires_test(self):
# https://github.com/conan-io/conan/issues/3267
client = TestClient()
conanfile = """from conans import ConanFile
class AConan(ConanFile):
pass
"""
client.save({"conanfile.py": conanfile})
client.run("create . tool/0.1@user/channel")
client.run("create . dep/0.1@user/channel")
conanfile = conanfile + 'requires = "dep/0.1@user/channel"'
client.save({"conanfile.py": conanfile})
client.run("export . Pkg/0.1@user/channel")
client.run("export . Pkg2/0.1@user/channel")
client.save({"conanfile.txt": "[requires]\nPkg/0.1@user/channel\nPkg2/0.1@user/channel",
"myprofile": "[build_requires]\ntool/0.1@user/channel"}, clean_first=True)
client.run("info . -pr=myprofile -bo=tool/0.1@user/channel")
self.assertIn("[tool/0.1@user/channel], [Pkg/0.1@user/channel, Pkg2/0.1@user/channel]",
client.out)
def build_order_privates_test(self):
# https://github.com/conan-io/conan/issues/3267
client = TestClient()
conanfile = """from conans import ConanFile
class AConan(ConanFile):
pass
"""
client.save({"conanfile.py": conanfile})
client.run("create . tool/0.1@user/channel")
conanfile_dep = conanfile + 'requires = "tool/0.1@user/channel"'
client.save({"conanfile.py": conanfile_dep})
client.run("create . dep/0.1@user/channel")
conanfile_pkg = conanfile + 'requires = ("dep/0.1@user/channel", "private"),'
client.save({"conanfile.py": conanfile_pkg})
client.run("export . Pkg/0.1@user/channel")
client.run("export . Pkg2/0.1@user/channel")
client.save({"conanfile.txt": "[requires]\nPkg/0.1@user/channel\nPkg2/0.1@user/channel"},
clean_first=True)
client.run("info . -bo=tool/0.1@user/channel")
self.assertIn("[tool/0.1@user/channel], [dep/0.1@user/channel], "
"[Pkg/0.1@user/channel, Pkg2/0.1@user/channel]",
client.out)
def diamond_build_order_test(self):
self.client = TestClient()
self._create("LibA", "0.1")
self._create("LibE", "0.1")
self._create("LibF", "0.1")
self._create("LibB", "0.1", ["LibA/0.1@lasote/stable", "LibE/0.1@lasote/stable"])
self._create("LibC", "0.1", ["LibA/0.1@lasote/stable", "LibF/0.1@lasote/stable"])
self._create("LibD", "0.1", ["LibB/0.1@lasote/stable", "LibC/0.1@lasote/stable"],
export=False)
self.client.run("info . -bo=LibA/0.1@lasote/stable")
self.assertIn("[LibA/0.1@lasote/stable], "
"[LibB/0.1@lasote/stable, LibC/0.1@lasote/stable]",
self.client.out)
self.client.run("info . -bo=LibB/0.1@lasote/stable")
self.assertIn("[LibB/0.1@lasote/stable]", self.client.out)
self.client.run("info . -bo=LibE/0.1@lasote/stable")
self.assertIn("[LibE/0.1@lasote/stable], [LibB/0.1@lasote/stable]",
self.client.out)
self.client.run("info . -bo=LibF/0.1@lasote/stable")
self.assertIn("[LibF/0.1@lasote/stable], [LibC/0.1@lasote/stable]",
self.client.out)
self.client.run("info . -bo=Dev1/0.1@lasote/stable")
self.assertEqual("WARN: Usage of `--build-order` argument is deprecated and can return wrong"
" results. Use `conan graph build-order ...` instead.\n\n", self.client.out)
self.client.run("info . -bo=LibG/0.1@lasote/stable")
self.assertEqual("WARN: Usage of `--build-order` argument is deprecated and can return wrong"
" results. Use `conan graph build-order ...` instead.\n\n", self.client.out)
self.client.run("info . --build-order=ALL")
self.assertIn("[LibA/0.1@lasote/stable, LibE/0.1@lasote/stable, LibF/0.1@lasote/stable], "
"[LibB/0.1@lasote/stable, LibC/0.1@lasote/stable]",
self.client.out)
self.client.run("info . --build-order=ALL")
self.assertIn("[LibA/0.1@lasote/stable, LibE/0.1@lasote/stable, "
"LibF/0.1@lasote/stable], [LibB/0.1@lasote/stable, LibC/0.1@lasote/stable]",
self.client.out)
def wrong_path_parameter_test(self):
self.client = TestClient()
self.client.run("info", assert_error=True)
self.assertIn("ERROR: Exiting with code: 2", self.client.out)
self.client.run("info not_real_path", assert_error=True)
self.assertIn("ERROR: Conanfile not found", self.client.out)
self.client.run("info conanfile.txt", assert_error=True)
self.assertIn("ERROR: Conanfile not found", self.client.out)
def test_common_attributes(self):
self.client = TestClient()
conanfile = """from conans import ConanFile
from conans.util.files import load, save
class MyTest(ConanFile):
name = "Pkg"
version = "0.1"
settings = "build_type"
"""
self.client.save({"subfolder/conanfile.py": conanfile})
self.client.run("export ./subfolder lasote/testing")
self.client.run("info ./subfolder")
self.assertIn("conanfile.py (Pkg/0.1)", self.client.out)
self.assertNotIn("License:", self.client.out)
self.assertNotIn("Author:", self.client.out)
self.assertNotIn("Topics:", self.client.out)
self.assertNotIn("Homepage:", self.client.out)
self.assertNotIn("URL:", self.client.out)
def test_full_attributes(self):
client = TestClient()
conanfile = """from conans import ConanFile
class MyTest(ConanFile):
name = "Pkg"
version = "0.2"
settings = "build_type"
author = "John Doe"
license = "MIT"
url = "https://foo.bar.baz"
homepage = "https://foo.bar.site"
topics = ("foo", "bar", "qux")
"""
client.save({"subfolder/conanfile.py": conanfile})
client.run("export ./subfolder lasote/testing")
client.run("info ./subfolder")
self.assertIn("conanfile.py (Pkg/0.2)", client.out)
self.assertIn("License: MIT", client.out)
self.assertIn("Author: John Doe", client.out)
self.assertIn("Topics: foo, bar, qux", client.out)
self.assertIn("URL: https://foo.bar.baz", client.out)
self.assertIn("Homepage: https://foo.bar.site", client.out)
def topics_graph_test(self):
conanfile = """from conans import ConanFile
class MyTest(ConanFile):
name = "Pkg"
version = "0.2"
topics = ("foo", "bar", "qux")
"""
client = TestClient()
client.save({"conanfile.py": conanfile})
client.run("export . lasote/testing")
# Topics as tuple
client.run("info Pkg/0.2@lasote/testing --graph file.html")
html_content = client.load("file.html")
self.assertIn("<h3>Pkg/0.2@lasote/testing</h3>", html_content)
self.assertIn("<li><b>topics</b>: foo, bar, qux</li>", html_content)
# Topics as a string
conanfile = conanfile.replace("(\"foo\", \"bar\", \"qux\")", "\"foo\"")
client.save({"conanfile.py": conanfile}, clean_first=True)
client.run("export . lasote/testing")
client.run("info Pkg/0.2@lasote/testing --graph file.html")
html_content = client.load("file.html")
self.assertIn("<h3>Pkg/0.2@lasote/testing</h3>", html_content)
self.assertIn("<li><b>topics</b>: foo", html_content)
def wrong_graph_info_test(self):
# https://github.com/conan-io/conan/issues/4443
conanfile = GenConanfile().with_name("Hello").with_version("0.1")
client = TestClient()
client.save({"conanfile.py": str(conanfile)})
client.run("install .")
path = os.path.join(client.current_folder, "graph_info.json")
graph_info = client.load(path)
graph_info = json.loads(graph_info)
graph_info.pop("root")
save(path, json.dumps(graph_info))
client.run("info .")
self.assertIn("conanfile.py (Hello/0.1)", client.out)
save(path, "broken thing")
client.run("info .", assert_error=True)
self.assertIn("ERROR: Error parsing GraphInfo from file", client.out)
def previous_lockfile_error_test(self):
# https://github.com/conan-io/conan/issues/5479
client = TestClient()
client.save({"conanfile.py": GenConanfile().with_name("pkg").with_version("0.1")})
client.run("create . user/testing")
client.save({"conanfile.py": GenConanfile().with_name("other").with_version("0.1")
.with_option("shared", [True, False])
.with_default_option("shared", False)})
client.run("install . -o shared=True")
client.run("info pkg/0.1@user/testing")
self.assertIn("pkg/0.1@user/testing", client.out)
self.assertNotIn("shared", client.out)
|
let address = document.getElementById('connect-address'),
connect = document.getElementById('connect'),
buttonConnect = document.getElementById('connect-button');
let loginShown = true;
// Set function to be called on NetworkTables connect. Not implemented.
//NetworkTables.addWsConnectionListener(onNetworkTablesConnection, true);
// Set function to be called when robot dis/connects
NetworkTables.addRobotConnectionListener(onRobotConnection, false);
// Sets function to be called when any NetworkTables key/value changes
//NetworkTables.addGlobalListener(onValueChanged, true);
// Function for hiding the connect box
onkeydown = key => {
if (key.key === 'Escape') {
document.body.classList.toggle('login', false);
loginShown = false;
}
};
buttonConnect.onclick = () => {
document.body.classList.toggle('login', true);
loginShown = true;
setLogin();
};
/**
* Function to be called when robot connects
* @param {boolean} connected
*/
function onRobotConnection(connected) {
var state = connected ? 'Robot connected!' : 'Robot disconnected.';
console.log(state);
ui.robotState.textContent = state;
if (connected) {
// On connect hide the connect popup
document.body.classList.toggle('login', false);
loginShown = false;
} else if (loginShown) {
setLogin();
}
}
function setLogin() {
// Add Enter key handler
// Enable the input and the button
address.disabled = connect.disabled = false;
connect.textContent = 'Connect';
// Add the default address and select xxxx
address.value = 'roborio-xxxx-frc.local';
address.focus();
address.setSelectionRange(8, 12);
}
// On click try to connect and disable the input and the button
connect.onclick = () => {
ipc.send('connect', address.value);
address.disabled = connect.disabled = true;
connect.textContent = 'Connecting...';
};
address.onkeydown = ev => {
if (ev.key === 'Enter') {
connect.click();
ev.preventDefault();
ev.stopPropagation();
}
};
// Show login when starting
document.body.classList.toggle('login', true);
setLogin();
|
const path = require('path');
const ipc = require('electron').ipcRenderer;
$(document).ready(() => {
ipc.send('changeDiscordRP', `favmatches_activity`)
let rawuserdata = fs.readFileSync(process.env.APPDATA + '/VALTracker/user_data/user_creds.json');
let userdataToRead = JSON.parse(rawuserdata);
var playerName = userdataToRead.playerName
var playerTag = userdataToRead.playerTag
let favrawdata = fs.readFileSync(process.env.APPDATA + '/VALTracker/user_data/favourite_matches/matches.json');
var buffer = Buffer.from(favrawdata)
if (buffer.length == 0) {
var newArrToPush = {
"favourites": [{
}]
};
fs.writeFileSync(process.env.APPDATA + '/VALTracker/user_data/favourite_matches/matches.json', JSON.stringify(newArrToPush))
window.location.href = ""
}
let dataToRead = JSON.parse(favrawdata);
var matchIDarray = [];
for (var count = 0; count < dataToRead.favourites.length; count++) {
var matchID = dataToRead.favourites[count].MatchID
matchIDarray.push(matchID)
if (dataToRead.favourites[count] == undefined) { //No saved matches found at all
$('.loading-icon').fadeTo(150, 0)
setTimeout(function () {
$('.loading-icon').css("display", "none");
$('.loading-layer').css("display", "none");
$('.loading-layer-fallback').css("opacity", "0");
$('.loading-layer-fallback').css("display", "block");
$('.loading-layer-fallback').fadeTo(150, 1)
}, 200)
} else { //saved match found
if (dataToRead.favourites[count].MatchID == undefined) {
$('.loading-icon').fadeTo(150, 0)
setTimeout(function () {
$('.loading-icon').css("display", "none");
$('.loading-layer').css("display", "none");
$('.loading-layer-fallback').css("opacity", "0");
$('.loading-layer-fallback').css("display", "block");
$('.loading-layer-fallback').fadeTo(150, 1)
}, 200)
continue;
}
var checkedFolder = process.env.APPDATA + `/VALTracker/user_data/favourite_matches/matches`
if (fs.existsSync(checkedFolder)) { //Check for folder of saved match data
var checkedPath = process.env.APPDATA + `/VALTracker/user_data/favourite_matches/matches/${matchID}.json`
if (fs.existsSync(checkedPath)) { //check for downloaded match data of current match
let rawmatchdata = fs.readFileSync(process.env.APPDATA + `/VALTracker/user_data/favourite_matches/matches/${matchID}.json`);
const data = JSON.parse(rawmatchdata);
///////////////////////////////////////////////////////////////////////////////////////////////////////////////
var ms = data.data.metadata.game_length;
var d = new Date(1000 * Math.round(ms / 1000)); // round to nearest second
function pad(i) {
return ('0' + i).slice(-2);
}
var str = d.getUTCHours() + ':' + pad(d.getUTCMinutes()) + ':' + pad(d.getUTCSeconds());
var Matchcontainer = document.createElement("div");
Matchcontainer.className = "match-wrapper";
var matchmodeIcon = document.createElement("img");
matchmodeIcon.className = "match-mode-icon";
var matchmode = data.data.metadata.mode
if (matchmode == "Unrated" || matchmode == "Competitive" || matchmode == "Custom Game") {
matchmodeIcon.setAttribute("src", "./assets/img/standard.png")
} else {
matchmodeIcon.setAttribute("src", `./assets/img/${matchmode.toLowerCase()}.png`)
}
var matchMap = document.createElement("img");
matchMap.className = "match-map";
//matchMap.src = `./assets/img/${data.data.metadata.map.toLowerCase()}.png`
matchMap.setAttribute("src", `./assets/img/${data.data.metadata.map.toLowerCase()}.png`)
var playedAgent = document.createElement("img");
playedAgent.className = "match-played-agent";
for (var playerCount = 0; playerCount < data.data.players.all_players.length; playerCount++) {
if (data.data.players.all_players[playerCount].name == playerName && data.data.players.all_players[playerCount].tag == playerTag) {
if (matchmode == "Competitive") {
var matchRRwrapper = document.createElement("div");
matchRRwrapper.className = "match-rr-wrapper";
var matchRRimg = document.createElement("img");
matchRRimg.className = "match-rr-img-pp";
var rankIcons = [
'./assets/img/iron_1.png', './assets/img/iron_2.png', './assets/img/iron_3.png',
'./assets/img/bronze_1.png', './assets/img/bronze_2.png', './assets/img/bronze_3.png',
'./assets/img/silver_1.png', './assets/img/silver_2.png', './assets/img/silver_3.png',
'./assets/img/gold_1.png', './assets/img/gold_2.png', './assets/img/gold_3.png',
'./assets/img/plat_1.png', './assets/img/plat_2.png', './assets/img/plat_3.png',
'./assets/img/dia_1.png', './assets/img/dia_2.png', './assets/img/dia_3.png',
'./assets/img/immortal_1.png', './assets/img/immortal_2.png', './assets/img/immortal_3.png',
'./assets/img/radiant.png',
'./assets/img/unranked.png',
]
matchRRimg.setAttribute("src", `${rankIcons[data.data.players.all_players[playerCount].currenttier -3]}`)
matchRRwrapper.appendChild(matchRRimg)
var matchRRspan = document.createElement("span");
}
playedAgent.src = data.data.players.all_players[playerCount].assets.agent.small;
var matchKDA = document.createElement("span");
matchKDA.className = "match-kda";
matchKDA.appendChild(document.createTextNode("KDA: " + data.data.players.all_players[playerCount].stats.kills + "/" + data.data.players.all_players[playerCount].stats.deaths + "/" + data.data.players.all_players[playerCount].stats.assists))
var scoreArray = [];
var playerArray = [];
for (var pcount = 0; pcount < data.data.players.all_players.length; pcount++) {
scoreArray.push(data.data.players.all_players[pcount].stats.score)
playerArray.push(data.data.players.all_players[pcount].name + "#" + data.data.players.all_players[pcount].tag)
}
var highestScore = Math.max(...scoreArray)
for (var arrcount = 0; arrcount < scoreArray.length; arrcount++) {
if (scoreArray[arrcount] == highestScore) {
break;
}
}
if (playerArray[arrcount] == playerName + "#" + playerTag) {
matchKDA.classList.add("MatchMVP")
} else {
for (var psearch = 0; psearch < data.data.players.all_players.length; psearch++) {
if (data.data.players.all_players[psearch].name + "#" + data.data.players.all_players[psearch].tag == playerName + "#" + playerTag) {
break;
}
}
var teamScoreArray = [];
var teamPlayerArray = [];
if (data.data.players.all_players[psearch].team == "Blue") {
for (var pcount = 0; pcount < data.data.players.red.length; pcount++) {
teamScoreArray.push(data.data.players.red[pcount].stats.score)
teamPlayerArray.push(data.data.players.red[pcount].name + "#" + data.data.players.red[pcount].tag)
}
} else {
for (var pcount = 0; pcount < data.data.players.red.length; pcount++) {
teamScoreArray.push(data.data.players.red[pcount].stats.score)
teamPlayerArray.push(data.data.players.red[pcount].name + "#" + data.data.players.red[pcount].tag)
}
}
var highestScore = Math.max(...teamScoreArray)
for (var arrcount = 0; arrcount < teamScoreArray.length; arrcount++) {
if (teamScoreArray[arrcount] == highestScore) {
break;
}
}
if (teamPlayerArray[arrcount] == playerName + "#" + playerTag) {
matchKDA.classList.add("TeamMVP")
}
}
var matchStanding = document.createElement("div");
var result = document.createElement("span");
result.className = "result-header"
result.appendChild(document.createTextNode("RESULT"))
matchStanding.appendChild(result)
if (data.data.teams.red.has_won == null) {
if (data.data.players.all_players[playerCount].stats.kills == 40) {
matchStanding.className = "match-result-won";
matchStanding.appendChild(document.createTextNode("WIN"));
} else {
matchStanding.className = "match-result-lost";
matchStanding.appendChild(document.createTextNode("LOSE"));
}
} else {
if (data.data.rounds[data.data.rounds.length - 1].end_type == "SRNDRed") {
if (data.data.players.all_players[playerCount].team == data.data.rounds[data.data.rounds.length - 1].winning_team) {
matchStanding.className = "match-result-won-favmatch";
if (matchmode == "Competitive") {
matchRRspan.className = `match-rr-pp-win`;
matchRRspan.setAttribute("id", "match-rr-id-" + count);
}
matchStanding.appendChild(document.createTextNode("SRNDR"));
} else {
matchStanding.className = "match-result-lost-favmatch";
if (matchmode == "Competitive") {
matchRRspan.className = `match-rr-pp-lose`;
matchRRspan.setAttribute("id", "match-rr-id-" + count);
}
matchStanding.appendChild(document.createTextNode("SRNDR"));
}
} else {
if (data.data.players.all_players[playerCount].team == "Blue") {
if (data.data.teams.blue.rounds_won == data.data.teams.blue.rounds_lost) {
matchStanding.className = "match-result-draw-favmatch";
if (matchmode == "Competitive") {
matchRRspan.className = `match-rr-pp-draw`;
matchRRspan.setAttribute("id", "match-rr-id-" + count);
}
matchStanding.appendChild(document.createTextNode(data.data.teams.blue.rounds_won + " : " + data.data.teams.blue.rounds_lost));
} else {
if (data.data.teams.blue.has_won == false) {
matchStanding.className = "match-result-lost-favmatch";
if (matchmode == "Competitive") {
matchRRspan.className = `match-rr-pp-lose`;
matchRRspan.setAttribute("id", "match-rr-id-" + count);
}
matchStanding.appendChild(document.createTextNode(data.data.teams.blue.rounds_won + " : " + data.data.teams.blue.rounds_lost));
} else {
matchStanding.className = "match-result-won-favmatch";
if (matchmode == "Competitive") {
matchRRspan.className = `match-rr-pp-win`;
matchRRspan.setAttribute("id", "match-rr-id-" + count);
}
matchStanding.appendChild(document.createTextNode(data.data.teams.blue.rounds_won + " : " + data.data.teams.blue.rounds_lost));
}
}
} else {
if (data.data.teams.blue.rounds_won == data.data.teams.blue.rounds_lost) {
matchStanding.className = "match-result-draw-favmatch";
if (matchmode == "Competitive") {
matchRRspan.className = `match-rr-pp-draw`;
matchRRspan.setAttribute("id", "match-rr-id-" + count);
}
matchStanding.appendChild(document.createTextNode(data.data.teams.blue.rounds_won + " : " + data.data.teams.blue.rounds_lost));
} else {
if (data.data.teams.red.has_won == false) {
matchStanding.className = "match-result-lost-favmatch";
if (matchmode == "Competitive") {
matchRRspan.className = `match-rr-pp-lose`;
matchRRspan.setAttribute("id", "match-rr-id-" + count);
}
matchStanding.appendChild(document.createTextNode(data.data.teams.red.rounds_won + " : " + data.data.teams.red.rounds_lost));
} else {
matchStanding.className = "match-result-won-favmatch";
if (matchmode == "Competitive") {
matchRRspan.className = `match-rr-pp-win`;
matchRRspan.setAttribute("id", "match-rr-id-" + count);
}
matchStanding.appendChild(document.createTextNode(data.data.teams.red.rounds_won + " : " + data.data.teams.red.rounds_lost));
}
}
}
}
}
} else {
continue;
}
}
var startedOn = document.createElement("span");
startedOn.className = "match-time";
startedOn.appendChild(document.createTextNode(data.data.metadata.game_start_patched));
var hiddenMatchID = document.createElement("span");
hiddenMatchID.className = "hidden-matchid"
hiddenMatchID.appendChild(document.createTextNode(data.data.metadata.matchid))
Matchcontainer.appendChild(hiddenMatchID);
Matchcontainer.setAttribute("onclick", "loadMatchView(this.firstChild.textContent, window.location.pathname)")
Matchcontainer.appendChild(playedAgent);
Matchcontainer.appendChild(matchmodeIcon);
Matchcontainer.appendChild(matchKDA);
Matchcontainer.appendChild(matchStanding);
Matchcontainer.appendChild(startedOn);
if (matchmode == "Competitive") {
matchRRwrapper.appendChild(matchRRspan)
Matchcontainer.appendChild(matchRRwrapper);
}
Matchcontainer.appendChild(matchMap);
var favStarIcon = document.createElement("i")
favStarIcon.classList.add("fas", "fa-star", )
favStarIcon.setAttribute("id", "favStarFavs")
favStarIcon.setAttribute("onclick", "loadRemovePromt(this.parentElement.firstChild.textContent, this.parentElement, this.parentElement.children[1].src, this.parentElement.children[3].textContent, this.parentElement.children[4].textContent); event.stopPropagation();");
Matchcontainer.appendChild(favStarIcon)
var wrapper = document.getElementById("loading-layer");
var nextElement = document.getElementById("lastElement");
wrapper.insertBefore(Matchcontainer, nextElement);
setTimeout(function () {
$('.loading-icon').css("display", "none");
$('.loading-layer').css("opacity", "0");
$('.loading-layer').css("display", "block");
$('.loading-layer').fadeTo(150, 1)
}, 200)
///////////////////////////////////////////////////////////////////////////////////////////////////////////////
} else { //Downloaded Match not found
$.ajax({
dataType: "json",
url: `https://api.henrikdev.xyz/valorant/v2/match/${matchID}`,
type: 'get',
async: false,
success: function (APIdata, xhr) {
console.log("DOWNLOADED MATCH NOT FOUND, DOWNLOADING DATA")
fs.writeFileSync(checkedPath, JSON.stringify(APIdata));
window.location.href = ""
},
error: function (jqXHR) {
createErrorCard(this.url, jqXHR.status);
}
});
}
} else { //No folder found
fs.mkdirSync(checkedFolder)
console.log("NO FOLDER FOUND, CREATING FOLDER AND DOWNLOADING DATA")
window.location.href = ""
}
}
}
fs.readdir(process.env.APPDATA + `/VALTracker/user_data/favourite_matches/matches`, (err, files) => {
if (err) {
console.log(err);
fs.mkdirSync(checkedFolder)
console.log("NO FOLDER FOUND, CREATING FOLDER")
fs.writeFileSync(checkedPath, JSON.stringify(APIdata));
window.location.href = ""
} else {
files.forEach(file => {
if (!matchIDarray.includes(path.parse(file).name)) {
const deleteFile = process.env.APPDATA + `/VALTracker/user_data/favourite_matches/matches/` + file
fs.unlink(deleteFile, (err) => {
if (err) {
console.log(err);
} else {
console.log('Deleted ' + file);
}
})
}
})
}
})
})
|
#!/usr/bin/env python
"""
Trains a model using one or more GPUs.
"""
from multiprocessing import Process
from time import sleep
import caffe, sys
def train(
solver, # solver proto definition
snapshot, # solver snapshot to restore
gpus, # list of device ids
timing=False, # show timing info for compute and communications
):
# caffe.set_mode_gpu()
# caffe.set_device(1)
# NCCL uses a uid to identify a session
uid = caffe.NCCL.new_uid()
print uid
# caffe.init_log()
# caffe.log('Using devices %s' % str(gpus))
procs = []
for rank in range(len(gpus)):
p = Process(target=solve,
args=(solver, snapshot, gpus, timing, uid, rank))
p.daemon = True
p.start()
procs.append(p)
for p in procs:
p.join()
def time(solver, nccl):
fprop = []
bprop = []
total = caffe.Timer()
allrd = caffe.Timer()
for _ in range(len(solver.net.layers)):
fprop.append(caffe.Timer())
bprop.append(caffe.Timer())
display = solver.param.display
def show_time():
if solver.iter % display == 0:
s = '\n'
for i in range(len(solver.net.layers)):
s += 'forw %3d %8s ' % (i, solver.net._layer_names[i])
s += ': %.2f\n' % fprop[i].ms
for i in range(len(solver.net.layers) - 1, -1, -1):
s += 'back %3d %8s ' % (i, solver.net._layer_names[i])
s += ': %.2f\n' % bprop[i].ms
s += 'solver total: %.2f\n' % total.ms
s += 'allreduce: %.2f\n' % allrd.ms
caffe.log(s)
solver.net.before_forward(lambda layer: fprop[layer].start())
solver.net.after_forward(lambda layer: fprop[layer].stop())
solver.net.before_backward(lambda layer: bprop[layer].start())
solver.net.after_backward(lambda layer: bprop[layer].stop())
solver.add_callback(lambda: total.start(), lambda: (total.stop(), allrd.start()))
solver.add_callback(nccl)
solver.add_callback(lambda: '', lambda: (allrd.stop(), show_time()))
def solve(proto, snapshot, gpus, timing, uid, rank):
caffe.set_mode_gpu()
caffe.set_device(gpus[rank])
caffe.set_solver_count(len(gpus))
caffe.set_solver_rank(rank)
caffe.set_multiprocess(True)
solver = caffe.SGDSolver(proto)
if snapshot and len(snapshot) != 0:
solver.restore(snapshot)
nccl = caffe.NCCL(solver, uid)
nccl.bcast()
if timing and rank == 0:
time(solver, nccl)
else:
solver.add_callback(nccl)
# if solver.param.layer_wise_reduce:
# solver.net.after_backward(nccl)
while solver.iter < solver.param.max_iter:
solver.step(100)
sys.stderr.write("rank: {} iter: {}\n".format(rank, solver.iter))
if rank == 1:
sleep(1)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--solver", required=True, help="Solver proto definition.")
parser.add_argument("--snapshot", help="Solver snapshot to restore.")
parser.add_argument("--gpus", type=int, nargs='+', default=[0],
help="List of device ids.")
parser.add_argument("--timing", action='store_true', help="Show timing info.")
args = parser.parse_args()
train(args.solver, args.snapshot, args.gpus, args.timing)
|
//@target: ES6
function* g() {
let x = {
*[yield 0] () {}
};
}
|
/*
- these statements have no requirements
- code at multiple places depend on these
*/
Object.defineProperty(String.prototype, 'filename', {
value: function (extension) {
let s = this.replace(/\\/g, '/');
s = s.substring(s.lastIndexOf('/') + 1);
return extension ? s.replace(/[?#].+$/, '') : s.split('.')[0];
}
});
// Check if an array contains another array. Used to enable random categories set (jewerly & fossils)
Object.defineProperty(Array.prototype, 'arrayContains', {
value: function (sub) {
const result = sub.filter(item => this.indexOf(item) > -1);
return result.length > 0;
}
});
jQuery.fn.firstAncestorOrSelf = function (func) {
'use strict';
if (this.length !== 1) throw new TypeError('Not implemented (yet?) for selection length != 1.');
let node = this[0];
while (node) {
if (func(node)) return this.pushStack([node]);
node = node.parentNode;
}
}
jQuery.fn.propSearchUp = function (property) {
'use strict';
const element = this.firstAncestorOrSelf(element => element[property]);
return element && element.prop(property);
}
let uniqueSearchMarkers = [];
const categories = [
'arrowhead', 'bottle', 'bracelet', 'coastal', 'coin', 'cups', 'earring', 'egg',
'fast_travel', 'flower', 'fossils_random', 'heirlooms',
'jewelry_random', 'megafauna', 'nazar', 'necklace', 'oceanic', 'pentacles',
'random', 'ring', 'swords', 'treasure', 'user_pins', 'wands', 'weekly', 'legendary_animals'
];
const parentCategories = {
jewelry_random: ['bracelet', 'earring', 'necklace', 'ring'],
fossils_random: ['coastal', 'megafauna', 'oceanic']
};
let enabledCategories = JSON.parse(localStorage.getItem("enabled-categories")) || [...categories];
/*
- Leaflet extentions require Leaflet loaded
- guaranteed by this script’s position in index.html
*/
L.DivIcon.DataMarkup = L.DivIcon.extend({
_setIconStyles: function (img, name) {
L.DivIcon.prototype._setIconStyles.call(this, img, name);
if (this.options.marker)
img.dataset.marker = this.options.marker;
if (this.options.time) {
const from = parseInt(this.options.time[0]);
const to = parseInt(this.options.time[1]);
img.dataset.time = timeRange(from, to);
}
if (this.options.tippy)
img.dataset.tippy = this.options.tippy;
}
});
// Glowing icon (legendary animals)
L.Icon.TimedData = L.Icon.extend({
_setIconStyles: function (img, name) {
L.Icon.prototype._setIconStyles.call(this, img, name);
if (this.options.time && this.options.time !== []) {
img.dataset.time = this.options.time;
}
},
});
/*
- DOM will be ready, all scripts will be loaded (all loaded via DOM script elements)
- everything in this file here will be executed
- they can depend on their order here
- unfortunately some async dependencies are not properly taken care of (yet)
*/
$(function () {
try {
init();
} catch (e) {
if (getParameterByName('show-alert') == '1') {
alert(e);
}
console.error(e);
}
});
function init() {
try {
Sentry.init({ release: nocache, tracesSampleRate: isLocalHost() ? 1 : 0.3 });
} catch (err) {
console.log(`Sentry: ${err}`);
}
const navLang = navigator.language;
SettingProxy.addSetting(Settings, 'language', {
default: Language.availableLanguages.includes(navLang) ? navLang : 'en',
});
Settings.language = Language.availableLanguages.includes(Settings.language) ? Settings.language : 'en';
Menu.init();
const lootTables = MapBase.loadLootTable();
const itemsCollectionsWeekly = Item.init(); // Item.items (without .markers), Collection.collections, Collection.weekly*
itemsCollectionsWeekly.then(MapBase.loadOverlays);
MapBase.mapInit(); // MapBase.map
Language.init();
Language.setMenuLanguage();
Pins.init();
changeCursor();
// MapBase.markers (without .lMarker), Item.items[].markers
const markers = Promise.all([itemsCollectionsWeekly, lootTables]).then(Marker.init);
const cycles = Promise.all([itemsCollectionsWeekly, markers]).then(Cycles.load);
Inventory.init();
MapBase.loadFastTravels();
const filters = MapBase.loadFilters();
FME.init();
const treasures = Treasure.init();
const legendaries = Legendary.init();
Promise.all([cycles, markers]).then(MapBase.afterLoad);
Routes.init();
Promise.all([itemsCollectionsWeekly, markers, cycles, treasures, legendaries, filters])
.then(Loader.resolveMapModelLoaded);
if (!MapBase.isPreviewMode)
MadamNazar.loadMadamNazar();
if (Settings.isMenuOpened) $('.menu-toggle').click();
$('.map-alert').toggle(!Settings.alertClosed);
$('#language').val(Settings.language);
$('#marker-opacity').val(Settings.markerOpacity);
$('#filter-type').val(Settings.filterType);
$('#marker-size').val(Settings.markerSize);
$('#reset-markers').prop("checked", Settings.resetMarkersDaily);
$('#marker-cluster').prop("checked", Settings.isMarkerClusterEnabled);
$('#tooltip-map').prop('checked', Settings.showTooltipsMap);
$('#enable-marker-popups').prop("checked", Settings.isPopupsEnabled);
$('#enable-marker-popups-hover').prop("checked", Settings.isPopupsHoverEnabled);
$('#enable-marker-shadows').prop("checked", Settings.isShadowsEnabled);
$('#enable-legendary-backgrounds').prop("checked", Settings.isLaBgEnabled);
$('#legendary-animal-marker-type').val(Settings.legendarySpawnIconType);
$('#legendary-animal-marker-size').val(Settings.legendarySpawnIconSize);
$('#enable-dclick-zoom').prop("checked", Settings.isDoubleClickZoomEnabled);
$('#pins-place-mode').prop("checked", Settings.isPinsPlacingEnabled);
$('#pins-edit-mode').prop("checked", Settings.isPinsEditingEnabled);
$('#show-help').prop("checked", Settings.showHelp);
$('#show-coordinates').prop("checked", Settings.isCoordsOnClickEnabled);
$('#map-boundaries').prop("checked", Settings.isMapBoundariesEnabled);
$('#timestamps-24').prop("checked", Settings.isClock24Hour);
$('#enable-cycles').prop("checked", Settings.isCyclesVisible);
$('#enable-cycle-input').prop("checked", Settings.isCycleInputEnabled);
$("#enable-right-click").prop('checked', Settings.isRightClickEnabled);
$("#enable-debug").prop('checked', Settings.isDebugEnabled);
$("#enable-cycle-changer").prop('checked', Settings.isCycleChangerEnabled);
$("#timezone-offset").val(Settings.timeZoneOffset);
$("#show-utilities").prop('checked', Settings.showUtilitiesSettings);
$("#show-customization").prop('checked', Settings.showCustomizationSettings);
$("#show-routes").prop('checked', Settings.showRoutesSettings);
$("#show-import-export").prop('checked', Settings.showImportExportSettings);
$("#show-debug").prop('checked', Settings.showDebugSettings);
$("#help-container").toggle(Settings.showHelp);
$('.input-cycle').toggleClass('hidden', !Settings.isCycleInputEnabled);
$('.cycle-icon').toggleClass('hidden', !Settings.isCyclesVisible || Settings.isCycleInputEnabled);
$('.cycle-display').toggleClass('hidden', !Settings.isCyclesVisible);
$('#cycle-changer-container').toggleClass('hidden', !(Settings.isCycleChangerEnabled));
$("#utilities-container").toggleClass('opened', Settings.showUtilitiesSettings);
$("#customization-container").toggleClass('opened', Settings.showCustomizationSettings);
$("#routes-container").toggleClass('opened', Settings.showRoutesSettings);
$("#import-export-container").toggleClass('opened', Settings.showImportExportSettings);
$("#debug-container").toggleClass('opened', Settings.showDebugSettings);
if (!MapBase.isPreviewMode)
Updates.init();
updateTopWidget();
/*
- clockTick() relies on DOM and jquery
- guaranteed only by this script’s position at end of index.html
*/
setInterval(clockTick, 1000);
}
function isLocalHost() {
return location.hostname === "localhost" || location.hostname === "127.0.0.1";
}
function changeCursor() {
if (Settings.isCoordsOnClickEnabled || RouteSettings.customRouteEnabled)
$('.leaflet-grab').css('cursor', 'pointer');
else {
$('.leaflet-grab').css('cursor', 'grab');
$('.lat-lng-container').css('display', 'none');
}
}
function updateTopWidget() {
const pElements = $('.top-widget > p');
[].forEach.call(pElements, (element, index) => {
$(element).toggleClass('hidden', Settings.topWidgetState !== index);
});
}
function getParameterByName(name, url) {
if (!url) url = window.location.href;
name = name.replace(/[\[\]]/g, '\\$&');
const regex = new RegExp('[?&]' + name + '(=([^&#]*)|&|#|$)'),
results = regex.exec(url);
if (!results) return null;
if (!results[2]) return '';
return decodeURIComponent(results[2].replace(/\+/g, ' '));
}
function setClipboardText(text) {
const el = document.createElement('textarea');
el.value = text;
document.body.appendChild(el);
el.select();
document.execCommand('copy');
document.body.removeChild(el);
}
function downloadAsFile(filename, text) {
const element = document.createElement('a');
element.setAttribute('href', 'data:text/plain;charset=utf-8,' + encodeURIComponent(text));
element.setAttribute('download', filename);
element.style.display = 'none';
document.body.appendChild(element);
element.click();
document.body.removeChild(element);
}
function clockTick() {
'use strict';
const now = new Date();
now.setHours((now.getHours() + Settings.timeZoneOffset))
const gameTime = new Date(now * 30);
const gameHour = gameTime.getUTCHours();
const nightTime = gameHour >= 22 || gameHour < 5;
const clockFormat = {
timeZone: 'UTC',
hour: 'numeric',
minute: '2-digit',
hourCycle: Settings.isClock24Hour ? 'h23' : 'h12',
};
$('#time-in-game').text(gameTime.toLocaleString(Settings.language, clockFormat));
// Preview mode can remove this.
if ($('#day-cycle').length) {
const file = $('#day-cycle').attr('src').filename();
if ((nightTime && file !== "moon") || (!nightTime && file !== "sun"))
$('#day-cycle').removeClass('hidden').attr('src', `./assets/images/${nightTime ? 'moon' : 'sun'}.png`);
}
const cycleResetTime = Date.UTC(now.getUTCFullYear(), now.getUTCMonth(), now.getUTCDate() + 1);
const delta = new Date(cycleResetTime - now);
const deltaFormat = {
timeZone: 'UTC',
hour: '2-digit',
minute: '2-digit',
second: '2-digit',
hourCycle: 'h23',
};
$('#countdown').text(delta.toLocaleString([], deltaFormat));
$('[data-marker*="flower_agarita"], [data-marker*="flower_blood"]').css('filter', (function () {
if (MapBase.isPreviewMode) return 'none';
const isImportant = $(this).hasClass('highlight-items');
const whiteGlow = 'drop-shadow(0 0 .5rem #fff) drop-shadow(0 0 .3rem #fff)';
const redGlow = 'drop-shadow(0 0 .5rem #cc0000) drop-shadow(0 0 .4rem #cc0000';
const pinkGlow = 'drop-shadow(0 0 .5rem #ff6fc7) drop-shadow(0 0 .3rem #ff6fc7';
if (isImportant && nightTime)
return pinkGlow;
if (isImportant)
return redGlow;
return nightTime ? whiteGlow : 'none';
}));
$('.leaflet-marker-icon[data-time]').each(function () {
let time = $(this).data('time') + '';
if (time === null || time === '') return;
if (time.split(',').includes(gameHour + '') && !MapBase.isPreviewMode) {
$(this).css('filter', 'drop-shadow(0 0 .5rem #fff) drop-shadow(0 0 .25rem #fff)');
} else {
$(this).css('filter', 'none');
}
});
}
/*
- rest of file: event handler registrations (and some more functions)
- registrations require DOM ready and jquery
- guaranteed only by this script’s position at end of index.html
- some handlers require scripts to be initialized and data loaded
- NOT GUARANTEED
- only hope: user does not do anything until that happens
- please move them out of here to their respective owners
*/
$('.side-menu').on('scroll', function () {
// These are not equality checks because of mobile weirdness.
const atTop = $(this).scrollTop() <= 0;
const atBottom = $(this).scrollTop() + $(document).height() >= $(this).prop("scrollHeight");
$('.scroller-line-tp').toggle(atTop);
$('.scroller-arrow-tp').toggle(!atTop);
$('.scroller-line-bt').toggle(atBottom);
$('.scroller-arrow-bt').toggle(!atBottom);
});
$('.top-widget > p').on('click', function () {
const pElements = $('.top-widget > p').length;
Settings.topWidgetState = (Settings.topWidgetState + 1) % pElements;
updateTopWidget();
});
$("#show-all-markers").on("change", function () {
MapBase.showAllMarkers = $("#show-all-markers").prop('checked');
MapBase.addMarkers();
});
$('#enable-right-click').on("change", function () {
Settings.isRightClickEnabled = $("#enable-right-click").prop('checked');
});
$("#show-utilities").on("change", function () {
Settings.showUtilitiesSettings = $("#show-utilities").prop('checked');
$("#utilities-container").toggleClass('opened', Settings.showUtilitiesSettings);
});
$("#show-customization").on("change", function () {
Settings.showCustomizationSettings = $("#show-customization").prop('checked');
$("#customization-container").toggleClass('opened', Settings.showCustomizationSettings);
});
$("#show-routes").on("change", function () {
Settings.showRoutesSettings = $("#show-routes").prop('checked');
$("#routes-container").toggleClass('opened', Settings.showRoutesSettings);
});
$("#show-import-export").on("change", function () {
Settings.showImportExportSettings = $("#show-import-export").prop('checked');
$("#import-export-container").toggleClass('opened', Settings.showImportExportSettings);
});
$("#show-debug").on("change", function () {
Settings.showDebugSettings = $("#show-debug").prop('checked');
$("#debug-container").toggleClass('opened', Settings.showDebugSettings);
});
$('#enable-debug').on("change", function () {
Settings.isDebugEnabled = $("#enable-debug").prop('checked');
});
$('#timezone-offset').on("change", function () {
Settings.timeZoneOffset = parseInt($("#timezone-offset").val());
});
$('#enable-cycle-changer').on("change", function () {
Settings.isCycleChangerEnabled = $("#enable-cycle-changer").prop('checked');
$('#cycle-changer-container').toggleClass('hidden', !Settings.isCycleChangerEnabled);
if (!Settings.isCycleChangerEnabled) {
Cycles.resetCycle();
}
});
// “random” category still needs this (other collectibles have handlers in their class)
$('.menu-option.clickable input').on('click', function (event) {
event.stopPropagation();
});
$('.menu-option.clickable input').on('change', function (event) {
const el = $(event.target);
Cycles.categories[el.attr("name")] = parseInt(el.val());
MapBase.addMarkers();
Menu.refreshMenu();
});
$("#search").on("input", function () {
MapBase.onSearch($('#search').val());
$("#filter-type").val('none');
});
$("#copy-search-link").on("click", function () {
setClipboardText(`http://jeanropke.github.io/RDR2CollectorsMap/?search=${$('#search').val()}`);
});
$("#clear-search").on("click", function () {
$("#search").val('').trigger("input");
});
$("#reset-markers").on("change", function () {
Settings.resetMarkersDaily = $("#reset-markers").prop('checked');
});
$("#clear-markers").on("click", function () {
$.each(MapBase.markers, function (key, marker) {
marker.isCollected = false;
});
Menu.refreshMenu();
MapBase.addMarkers();
});
$("#clear-inventory").on("click", function () {
Item.items.forEach(item => item.amount = 0);
Inventory.updateItemHighlights();
Menu.refreshMenu();
MapBase.addMarkers();
});
$("#custom-routes").on("change", function () {
RouteSettings.customRouteEnabled = $("#custom-routes").prop('checked');
changeCursor();
const mapRoute = Routes.customRouteConnections.join(',');
RouteSettings.customRoute = mapRoute;
});
$("#clear-custom-routes").on("click", Routes.clearCustomRoutes);
$('.map-alert').on('click', function () {
Settings.alertClosed = true;
$('.map-alert').addClass('hidden');
});
$('.map-cycle-alert').on('click', function () {
$('.map-cycle-alert').addClass('hidden');
});
$('#show-coordinates').on('change', function () {
Settings.isCoordsOnClickEnabled = $("#show-coordinates").prop('checked');
changeCursor();
});
$('#map-boundaries').on('change', function () {
Settings.isMapBoundariesEnabled = $("#map-boundaries").prop('checked');
MapBase.map.setMaxBounds(); //Remove boundaries
MapBase.updateMapBoundaries();
});
$('#timestamps-24').on('change', function () {
Settings.isClock24Hour = $("#timestamps-24").prop('checked');
clockTick();
});
$("#language").on("change", function () {
Settings.language = $("#language").val();
Language.setMenuLanguage();
Menu.refreshMenu();
Cycles.setLocaleDate();
MapBase.addMarkers();
Treasure.onLanguageChanged();
Legendary.onLanguageChanged();
});
$("#marker-opacity").on("change", function () {
Settings.markerOpacity = Number($("#marker-opacity").val());
MapBase.addMarkers();
});
$("#marker-size").on("change", function () {
Settings.markerSize = Number($("#marker-size").val());
MapBase.addMarkers();
Treasure.onSettingsChanged();
Legendary.onSettingsChanged();
});
$('#filter-type').on('change', function () {
Settings.filterType = $(this).val();
});
$('#filter-min-amount-items').on("change", function () {
InventorySettings.maxAmountLowInventoryItems = $(this).val();
});
$("#enable-cycles").on("change", function () {
Settings.isCyclesVisible = $("#enable-cycles").prop('checked');
$('.cycle-icon').toggleClass('hidden', !Settings.isCyclesVisible || Settings.isCycleInputEnabled);
$('.cycle-display').toggleClass('hidden', !Settings.isCyclesVisible);
MapBase.addMarkers();
});
$("#enable-cycle-input").on("change", function () {
Settings.isCycleInputEnabled = $("#enable-cycle-input").prop('checked');
$('.input-cycle').toggleClass('hidden', !Settings.isCycleInputEnabled);
$('.cycle-icon').toggleClass('hidden', !Settings.isCyclesVisible || Settings.isCycleInputEnabled);
});
// Remove item from map when using the menu
$(document).on('click', '.collectible-wrapper[data-type]', function () {
const collectible = $(this).data('type');
const category = $(this).parent().data('type');
MapBase.removeItemFromMap(Cycles.categories[category], collectible, collectible, category, !InventorySettings.isMenuUpdateEnabled);
});
$('.menu-toggle').on('click', function () {
const menu = $('.side-menu').toggleClass('menu-opened');
Settings.isMenuOpened = menu.hasClass('menu-opened');
$('.menu-toggle').text(Settings.isMenuOpened ? 'X' : '>');
$('.top-widget').toggleClass('top-widget-menu-opened', Settings.isMenuOpened);
$('#fme-container').toggleClass('fme-menu-opened', Settings.isMenuOpened);
});
$('#tooltip-map').on('change', function () {
Settings.showTooltipsMap = $('#tooltip-map').prop('checked');
MapBase.updateTippy('tooltip');
});
$('#marker-cluster').on("change", function () {
Settings.isMarkerClusterEnabled = $("#marker-cluster").prop('checked');
MapBase.addMarkers();
});
$('#enable-marker-popups').on("change", function () {
Settings.isPopupsEnabled = $("#enable-marker-popups").prop('checked');
MapBase.addMarkers();
});
$('#enable-marker-popups-hover').on("change", function () {
Settings.isPopupsHoverEnabled = $("#enable-marker-popups-hover").prop('checked');
});
$('#enable-marker-shadows').on("change", function () {
Settings.isShadowsEnabled = $("#enable-marker-shadows").prop('checked');
Treasure.onSettingsChanged();
Legendary.onSettingsChanged();
MapBase.addMarkers();
});
$("#enable-legendary-backgrounds").on("change", function () {
Settings.isLaBgEnabled = $("#enable-legendary-backgrounds").prop('checked');
Legendary.onSettingsChanged();
});
$("#legendary-animal-marker-type").on("change", function () {
Settings.legendarySpawnIconType = $("#legendary-animal-marker-type").val();
Legendary.onSettingsChanged();
});
$("#legendary-animal-marker-size").on("change", function () {
Settings.legendarySpawnIconSize = Number($("#legendary-animal-marker-size").val());
Legendary.onSettingsChanged();
});
$('#enable-dclick-zoom').on("change", function () {
Settings.isDoubleClickZoomEnabled = $("#enable-dclick-zoom").prop('checked');
if (Settings.isDoubleClickZoomEnabled) {
MapBase.map.doubleClickZoom.enable();
} else {
MapBase.map.doubleClickZoom.disable();
}
});
$('#enable-inventory').on("change", function () {
InventorySettings.isEnabled = $("#enable-inventory").prop('checked');
MapBase.addMarkers();
$('#inventory-container').toggleClass("opened", InventorySettings.isEnabled);
});
$('#enable-inventory-popups').on("change", function () {
InventorySettings.isPopupsEnabled = $("#enable-inventory-popups").prop('checked');
MapBase.addMarkers();
});
$('#reset-inventory-daily').on("change", function () {
InventorySettings.resetInventoryDaily = $("#reset-inventory-daily").prop('checked');
});
$('#enable-additional-inventory-options').on("change", function () {
InventorySettings.enableAdvancedInventoryOptions = $("#enable-additional-inventory-options").prop('checked');
});
$('#highlight_low_amount_items').on("change", function () {
InventorySettings.highlightLowAmountItems = $('#highlight_low_amount_items').prop("checked");
MapBase.addMarkers();
});
$('#enable-inventory-menu-update').on("change", function () {
InventorySettings.isMenuUpdateEnabled = $("#enable-inventory-menu-update").prop('checked');
});
$('#auto-enable-sold-items').on("change", function () {
InventorySettings.autoEnableSoldItems = $('#auto-enable-sold-items').prop('checked');
});
$('#inventory-stack').on("change", function () {
let inputValue = parseInt($('#inventory-stack').val());
inputValue = !isNaN(inputValue) ? inputValue : 10;
InventorySettings.stackSize = inputValue;
});
$('#soft-flowers-inventory-stack').on("change", function () {
let inputValue = parseInt($('#soft-flowers-inventory-stack').val());
inputValue = !isNaN(inputValue) ? inputValue : 10;
InventorySettings.flowersSoftStackSize = inputValue;
});
$('#cookie-export').on("click", function () {
try {
let settings = localStorage;
const exportDate = new Date().toISOUTCDateString();
localStorage.setItem('main.date', exportDate);
// Remove irrelevant properties (permanently from localStorage):
delete settings.randid;
// Remove irrelevant properties (from COPY of localStorage, only to do not export them):
settings = $.extend(true, {}, localStorage);
delete settings['pinned-items'];
delete settings['routes.customRoute'];
// Set file version
settings.version = 2;
const settingsJson = JSON.stringify(settings, null, 4);
downloadAsFile(`collectible-map-settings-(${exportDate}).json`, settingsJson);
} catch (error) {
console.error(error);
alert(Language.get('alerts.feature_not_supported'));
}
});
function setSettings(settings) {
// Sorry, old settings! :-(
if (settings.version === undefined) {
location.reload();
return;
}
delete settings.version;
$.each(settings, function (key, value) {
localStorage.setItem(key, value);
});
// Do this for now, maybe look into refreshing the menu completely (from init) later.
location.reload();
}
$('#cookie-import').on('click', function () {
try {
let settings = null;
const file = $('#cookie-import-file').prop('files')[0];
let fallback = false;
if (!file) {
alert(Language.get('alerts.file_not_found'));
return;
}
try {
file.text().then((text) => {
try {
settings = JSON.parse(text);
setSettings(settings);
} catch (error) {
alert(Language.get('alerts.file_not_valid'));
return;
}
});
} catch (error) {
fallback = true;
}
if (fallback) {
const reader = new FileReader();
reader.addEventListener('loadend', (e) => {
const text = e.srcElement.result;
try {
settings = JSON.parse(text);
setSettings(settings);
} catch (error) {
alert(Language.get('alerts.file_not_valid'));
return;
}
});
reader.readAsText(file);
}
$.each(localStorage, function (key, value) {
localStorage.removeItem(key);
});
} catch (error) {
console.error(error);
alert(Language.get('alerts.feature_not_supported'));
}
});
$('#generate-route-generate-on-visit').on("change", function () {
RouteSettings.runOnStart = $("#generate-route-generate-on-visit").prop('checked');
});
$('#generate-route-ignore-collected').on("change", function () {
RouteSettings.ignoreCollected = $("#generate-route-ignore-collected").prop('checked');
Routes.generatePath();
});
$('#generate-route-important-only').on("change", function () {
RouteSettings.importantOnly = $("#generate-route-important-only").prop('checked');
Routes.generatePath();
});
$('#generate-route-auto-update').on("change", function () {
RouteSettings.autoUpdatePath = $("#generate-route-auto-update").prop('checked');
});
$('#generate-route-distance').on("change", function () {
let inputValue = parseInt($('#generate-route-distance').val());
inputValue = !isNaN(inputValue) && inputValue > 0 ? inputValue : 25;
RouteSettings.maxDistance = inputValue;
Routes.generatePath();
});
$('#generate-route-start').on("change", function () {
let inputValue = $('#generate-route-start').val();
let startLat = null;
let startLng = null;
$('#generate-route-start-lat').parent().hide();
$('#generate-route-start-lng').parent().hide();
switch (inputValue) {
case "Custom":
$('#generate-route-start-lat').parent().show();
$('#generate-route-start-lng').parent().show();
return;
case "N":
startLat = -11.875;
startLng = 86.875;
break;
case "NE":
startLat = -27.4375;
startLng = 161.2813;
break;
case "SE":
startLat = -100.75;
startLng = 131.125;
break;
case "SW":
default:
startLat = -119.9063;
startLng = 8.0313;
break;
}
$('#generate-route-start-lat').val(startLat);
$('#generate-route-start-lng').val(startLng);
RouteSettings.genPathStart = inputValue;
RouteSettings.startMarkerLat = startLat;
RouteSettings.startMarkerLng = startLng;
Routes.generatePath();
});
$('#generate-route-start-lat').on("change", function () {
let inputValue = parseFloat($('#generate-route-start-lat').val());
inputValue = !isNaN(inputValue) ? inputValue : -119.9063;
RouteSettings.startMarkerLat = inputValue;
Routes.generatePath();
});
$('#generate-route-start-lng').on("change", function () {
let inputValue = parseFloat($('#generate-route-start-lng').val());
inputValue = !isNaN(inputValue) ? inputValue : 8.0313;
RouteSettings.startMarkerLng = inputValue;
Routes.generatePath();
});
$('#generate-route-use-pathfinder').on("change", function () {
RouteSettings.usePathfinder = $("#generate-route-use-pathfinder").prop('checked');
// Hide incompatible options.
if (RouteSettings.usePathfinder) {
$('#generate-route-distance').parent().hide();
$('#generate-route-auto-update').parent().parent().hide();
$('#generate-route-fasttravel-weight').parent().show();
$('#generate-route-railroad-weight').parent().show();
} else {
$('#generate-route-distance').parent().show();
$('#generate-route-auto-update').parent().parent().show();
$('#generate-route-fasttravel-weight').parent().hide();
$('#generate-route-railroad-weight').parent().hide();
}
// Prevent both routes being stuck on screen.
Routes.clearPath();
Routes.generatePath();
});
$('#generate-route-fasttravel-weight').on("change", function () {
RouteSettings.fasttravelWeight = parseFloat($("#generate-route-fasttravel-weight").val());
Routes.generatePath();
});
$('#generate-route-railroad-weight').on("change", function () {
RouteSettings.railroadWeight = parseFloat($("#generate-route-railroad-weight").val());
Routes.generatePath();
});
$('#show-help').on("change", function () {
Settings.showHelp = $("#show-help").prop('checked');
$("#help-container").toggle(Settings.showHelp);
});
$(document).on('contextmenu', function (e) {
if (!Settings.isRightClickEnabled) e.preventDefault();
});
$('#delete-all-settings').on('click', function () {
$.each(localStorage, function (key) {
localStorage.removeItem(key);
});
location.reload(true);
});
/*
Reload convenience shortcut requested by @Adam Norton#6811.
Map’s tile area is reduced to a smaller area after lock-unlock cycle on iOS
if opened via iOS homescreen bookmarks. (Which has no reload button.)
*/
$('#reload-map').on('click', function () {
location.reload(true);
});
$('#open-clear-markers-modal').on('click', function () {
$('#clear-markers-modal').modal();
});
$('#open-clear-important-items-modal').on('click', function () {
$('#clear-important-items-modal').modal();
});
$('#open-clear-inventory-modal').on('click', function () {
$('#clear-inventory-modal').modal();
});
$('#open-clear-routes-modal').on('click', function () {
$('#clear-routes-modal').modal();
});
$('#open-delete-all-settings-modal').on('click', function () {
$('#delete-all-settings-modal').modal();
});
$('#open-remove-all-pins-modal').on('click', function () {
$('#remove-all-pins-modal').modal();
});
$('#open-updates-modal').on('click', function () {
Updates.showModal();
});
function formatLootTableLevel(table, rate = 1, level = 0) {
const result = $("<div>");
const items = MapBase.lootTables.loot[table];
const hasItems = !!items;
// Max. 2 digits but no trailing.
const formatted = Number((rate * 100).toPrecision(2));
if (hasItems) {
const title = $(`<span class="loot-table-title level-${level + 1}">`);
if (level === 0) {
title.append($(`<h4 data-text="menu.${table}">`));
} else {
title.append($(`<h5 data-text="menu.${table}">`));
title.append($(`<h5 class="rate">`).text(formatted + "%"));
}
result.append(title);
const wrapper = $(`<div class="loot-table-wrapper level-${level + 1}">`);
Object.keys(items).forEach(key => {
wrapper.append(formatLootTableLevel(key, rate * items[key], level + 1));
});
result.append(wrapper);
} else {
const item = $(`<div class="loot-table-item"><span data-text="${table}.name"></span><span class="rate">~${formatted}%</span></div>`);
result.append(item);
}
return result.children();
}
$('#loot-table-modal').on('show.bs.modal', function (event) {
// Get related loot table.
const button = $(event.relatedTarget);
const table = button.attr('data-loot-table');
let wrapper = $('<div class="loot-tables-wrapper">');
// Format loot table.
const tables = MapBase.lootTables.categories[table];
tables.forEach(table => {
wrapper.append(formatLootTableLevel(table));
});
// Append loot table to modal.
const translatedContent = Language.translateDom(wrapper)[0];
$('#loot-table-modal #loot').html(translatedContent);
});
$('#open-custom-marker-color-modal').on('click', event => {
const markerColors = ['aquagreen', 'beige', 'black', 'blue', 'brown', 'cadetblue', 'darkblue', 'darkgreen', 'darkorange', 'darkpurple', 'darkred', 'gray', 'green', 'lightblue', 'lightgray', 'lightgreen', 'lightorange', 'lightred', 'orange', 'pink', 'purple', 'red', 'white', 'yellow']
.sort((...args) => {
const [a, b] = args.map(color => Language.get(`map.user_pins.color.${color}`));
return a.localeCompare(b, Settings.language, { sensitivity: 'base' });
});
const baseColors = { arrowhead: 'purple', bottle: 'brown', coin: 'darkorange', egg: 'white', flower: 'red', fossils_random: 'darkgreen', cups: 'blue', swords: 'blue', wands: 'blue', pentacles: 'blue', jewelry_random: 'yellow', bracelet: 'yellow', necklace: 'yellow', ring: 'yellow', earring: 'yellow', heirlooms: 'pink', random: 'lightgray', random_spot_metal: 'lightgray', random_spot_shovel: 'lightgray' };
const randomCategories = ['random_spot_metal', 'random_spot_shovel']; // divide random spots to metal detector and shovel
const itemCollections = Collection.collections;
const possibleCategories = [...new Set(MapBase.markers.map(({ category }) => category))]
// fossils categories => fossils_random, random => random_spot_metal & random_spot_shovel
.filter(category => !['coastal', 'oceanic', 'megafauna', 'random'].includes(category));
const categories = [
...possibleCategories,
...randomCategories,
].sort((...args) => {
const [a, b] = args.map(element => {
const index = itemCollections.map(({ category }) => category).indexOf(element);
return index !== -1 ? index : itemCollections.length;
});
return a - b;
});
const savedColors = Object.assign(baseColors, JSON.parse(localStorage.getItem('customMarkersColors')) || {});
const wrapper = $('<div id="custom-markers-colors"></div>');
categories.forEach(category => {
const snippet = $(`
<div class="input-container" id="${category}-custom-color" data-help="custom_marker_color">
<label for="custom-marker-color" data-text="menu.${category}"></label>
<select class="input-text wide-select-menu" id="${category}-custom-marker-color"></select>
</div>`);
markerColors.forEach(color => {
const option = $(`<option value="${color}" data-text="map.user_pins.color.${color}"></option>`)
.attr('selected', savedColors[category] === color);
$('select', snippet).append(option);
});
wrapper.append(snippet);
});
const translatedContent = Language.translateDom(wrapper);
$('#custom-marker-color-modal #custom-colors').html(translatedContent);
$('#custom-marker-color-modal').modal('show');
$('.input-container', wrapper).on('change', event => {
baseColors[event.target.id.split('-')[0]] = event.target.value;
localStorage.setItem('customMarkersColors', JSON.stringify(baseColors));
MapBase.addMarkers();
});
});
function filterMapMarkers() {
uniqueSearchMarkers = [];
let filterType = () => true;
let enableMainCategory = true;
if (Settings.filterType === 'none') {
if ($('#search').val())
MapBase.onSearch($('#search').val());
else
uniqueSearchMarkers = MapBase.markers;
MapBase.addMarkers();
return;
}
else if (['moonshiner', 'naturalist'].includes(Settings.filterType)) {
const roleItems = [].concat(...Object.values(MapBase.filtersData[Settings.filterType]));
filterType = marker => roleItems.includes(marker.itemId);
}
else if (Settings.filterType === 'weekly') {
const weeklyItems = Weekly.current.collectibleItems.map(item => item.itemId);
filterType = marker => weeklyItems.includes(marker.itemId);
}
else if (Settings.filterType === 'important') {
const importantItems = Item.items.filter(item => item.isImportant).map(item => item.itemId);
filterType = marker => importantItems.includes(marker.itemId);
}
else if (Settings.filterType === 'static') {
filterType = marker => !marker.legacyItemId.includes('random');
}
// hides only flowers not belongs to any moonshine recipe
else if (Settings.filterType === 'hideFlowers') {
const roleItems = [].concat(...Object.values(MapBase.filtersData['moonshiner']));
filterType = marker => roleItems.includes(marker.itemId) || marker.category !== 'flower';
}
else if (Settings.filterType === 'coinsSpots') {
filterType = marker => ['coin', 'random'].includes(marker.category) && marker.tool === 2;
}
else if (Settings.filterType === 'lowInventoryItems') {
enableMainCategory = false;
const maxAmount = InventorySettings.maxAmountLowInventoryItems;
const lowItems = Item.items.filter(item => item.amount < maxAmount).map(item => item.itemId);
filterType = marker => lowItems.includes(marker.itemId);
}
MapBase.markers
.filter(filterType)
.forEach(marker => {
uniqueSearchMarkers.push(marker);
if (!enabledCategories.includes(marker.category) && enableMainCategory) {
enabledCategories.push(marker.category);
$(`[data-type="${marker.category}"]`).removeClass('disabled');
}
});
MapBase.addMarkers();
}
/**
linear proportion with cut values out of range:
value - number to convert,
iMin - input range minimum,
iMax - input range maximum,
oMin - output range minimum,
oMax - output range maximum;
**/
function linear(value, iMin, iMax, oMin, oMax) {
const clamp = (num, min, max) => {
return num <= min ? min : num >= max ? max : num;
}
return clamp((((value - iMin) / (iMax - iMin)) * (oMax - oMin) + oMin), oMin, oMax);
}
// converts number to correct 12/24 hours time:
function convertToTime(hours = '00', minutes = '00') {
return Settings.isClock24Hour ?
`${hours}:${minutes}` :
`${+hours % 12 || 12}:${minutes} ${+hours >= 12 ? 'PM' : 'AM'}`;
}
// returns an Array with all hours between from...to
function timeRange(from, to) {
const times = [];
let hour = from;
while (hour !== to) {
times.push(hour);
hour = (hour + 1) % 24;
if (times.length >= 24) break;
}
return times;
}
|
import mimetypes
import os
from ... import utils
from ...tl import types
class File:
"""
Convenience class over media like photos or documents, which
supports accessing the attributes in a more convenient way.
If any of the attributes are not present in the current media,
the properties will be `None`.
The original media is available through the ``media`` attribute.
"""
def __init__(self, media):
"""
Initialize the media. media.
Args:
self: (todo): write your description
media: (todo): write your description
"""
self.media = media
@property
def id(self):
"""
The bot-API style ``file_id`` representing this file.
.. note::
This file ID may not work under user accounts,
but should still be usable by bot accounts.
You can, however, still use it to identify
a file in for example a database.
"""
return utils.pack_bot_file_id(self.media)
@property
def name(self):
"""
The file name of this document.
"""
return self._from_attr(types.DocumentAttributeFilename, 'file_name')
@property
def ext(self):
"""
The extension from the mime type of this file.
If the mime type is unknown, the extension
from the file name (if any) will be used.
"""
return (
mimetypes.guess_extension(self.mime_type)
or os.path.splitext(self.name or '')[-1]
or None
)
@property
def mime_type(self):
"""
The mime-type of this file.
"""
if isinstance(self.media, types.Photo):
return 'image/jpeg'
elif isinstance(self.media, types.Document):
return self.media.mime_type
@property
def width(self):
"""
The width in pixels of this media if it's a photo or a video.
"""
return self._from_attr((
types.DocumentAttributeImageSize, types.DocumentAttributeVideo), 'w')
@property
def height(self):
"""
The height in pixels of this media if it's a photo or a video.
"""
return self._from_attr((
types.DocumentAttributeImageSize, types.DocumentAttributeVideo), 'h')
@property
def duration(self):
"""
The duration in seconds of the audio or video.
"""
return self._from_attr((
types.DocumentAttributeAudio, types.DocumentAttributeVideo), 'duration')
@property
def title(self):
"""
The title of the song.
"""
return self._from_attr(types.DocumentAttributeAudio, 'title')
@property
def performer(self):
"""
The performer of the song.
"""
return self._from_attr(types.DocumentAttributeAudio, 'performer')
@property
def emoji(self):
"""
A string with all emoji that represent the current sticker.
"""
return self._from_attr(types.DocumentAttributeSticker, 'alt')
@property
def sticker_set(self):
"""
The :tl:`InputStickerSet` to which the sticker file belongs.
"""
return self._from_attr(types.DocumentAttributeSticker, 'stickerset')
@property
def size(self):
"""
The size in bytes of this file.
"""
if isinstance(self.media, types.Photo):
return utils._photo_size_byte_count(self.media.sizes[-1])
elif isinstance(self.media, types.Document):
return self.media.size
def _from_attr(self, cls, field):
"""
Return an instance of this document.
Args:
self: (todo): write your description
cls: (todo): write your description
field: (todo): write your description
"""
if isinstance(self.media, types.Document):
for attr in self.media.attributes:
if isinstance(attr, cls):
return getattr(attr, field, None)
|
import createSvgIcon from './utils/createSvgIcon.js';
import { jsx as _jsx } from "react/jsx-runtime";
export default createSvgIcon([/*#__PURE__*/_jsx("path", {
d: "M7.64 2.64 6.22 1.22C4.23 3.21 3 5.96 3 9s1.23 5.79 3.22 7.78l1.41-1.41C6.01 13.74 5 11.49 5 9s1.01-4.74 2.64-6.36z"
}, "0"), /*#__PURE__*/_jsx("circle", {
cx: "14",
cy: "9",
r: "2.5"
}, "1"), /*#__PURE__*/_jsx("path", {
d: "M17 20c-.29 0-.56-.06-.76-.15-.71-.37-1.21-.88-1.71-2.38-.51-1.56-1.47-2.29-2.39-3-.79-.61-1.61-1.24-2.32-2.53C9.29 10.98 9 9.93 9 9c0-2.8 2.2-5 5-5s5 2.2 5 5h2c0-3.93-3.07-7-7-7S7 5.07 7 9c0 1.26.38 2.65 1.07 3.9.91 1.65 1.98 2.48 2.85 3.15.81.62 1.39 1.07 1.71 2.05.6 1.82 1.37 2.84 2.73 3.55.51.23 1.07.35 1.64.35 2.21 0 4-1.79 4-4h-2c0 1.1-.9 2-2 2z"
}, "2")], 'HearingTwoTone');
|
import { ResponseError } from "./errors";
function getUrl(url) {
if (typeof url === "string") {
return url;
} else {
return url.href;
}
}
export async function parseBody(res, parse) {
if (parse == undefined || parse === "detect") {
const contentType = res.headers.get("content-type");
if (contentType?.startsWith("application/json")) {
parse = "json";
} else if (contentType?.startsWith("text/")) {
parse = "text";
}
}
switch (parse) {
case "arrayBuffer": {
return await res.arrayBuffer();
}
case "json": {
return await res.json();
}
case "text": {
return await res.text();
}
case "stream": {
return res.body;
}
case "raw": {
return res;
}
}
}
export function getOptions(requestOptions, defaultOptions) {
let options;
if (typeof requestOptions === "string") {
options = {
...defaultOptions,
url: requestOptions,
};
} else {
options = { ...defaultOptions, ...requestOptions };
}
const response =
!options.response || typeof options.response === "string" || typeof options.response === "boolean" ? { body: options.response } : options.response;
const timeout = !options.timeout || typeof options.timeout === "number" ? { total: options.timeout ?? 10000 } : options.timeout;
const retry =
options.retry != undefined
? typeof options.retry === "number" || typeof options.retry === "function"
? { attempts: options.retry }
: options.retry
: undefined;
return {
...options,
url: getUrl(options.url),
response,
timeout,
retry,
};
}
export function checkRetry(attempt, err, options) {
// no retry config
if (options == undefined || options.attempts == undefined) {
return false;
}
// number of retries
if (typeof options.attempts === "number") {
if (attempt > options.attempts) {
return false;
}
// do not retry status codes < 500
if (err instanceof ResponseError && err.status < 500) {
return false;
}
}
// should retry function
if (typeof options.attempts === "function") {
const res = options.attempts(attempt, err);
if (res !== true) {
return res;
}
}
// should retry, find out delay
// delay time
if (typeof options.delay === "number") {
return options.delay;
}
// delay function
if (typeof options.delay === "function") {
return options.delay(attempt, err);
}
// default delay
return Math.min(500 * 2 ** (attempt - 1), 30000);
}
export function sleep(time) {
return new Promise((resolve) => {
setTimeout(resolve, time);
});
}
|
function solution() {
(async () => {
let articleRequest = await fetch('http://localhost:3030/jsonstore/advanced/articles/list');
let article = await articleRequest.json();
article.forEach(el => {
createHtml(el._id, el.title);
})
})();
function createHtml(articleId, articleTitle, articleContent) {
let createDivAccordion = document.createElement('div');
createDivAccordion.classList.add('accordion');
let createDivHead = document.createElement('div');
createDivHead.classList.add('head');
let createSpan = document.createElement('span');
createSpan.textContent = articleTitle;
let createButton = document.createElement('button');
createButton.classList.add('button');
createButton.id = articleId;
createButton.addEventListener('click', buttonHandler);
createButton.textContent = 'More';
createDivHead.appendChild(createSpan);
createDivHead.appendChild(createButton);
createDivAccordion.appendChild(createDivHead);
let main = document.getElementById('main');
main.appendChild(createDivAccordion)
let createDivExtra = document.createElement('div');
createDivExtra.classList.add('extra');
(async () => {
let req = await fetch(`http://localhost:3030/jsonstore/advanced/articles/details/${articleId}`);
let res = await req.json();
let createP = document.createElement('p');
createP.textContent = res.content;
createDivExtra.appendChild(createP);
createDivAccordion.appendChild(createDivExtra);
})();
}
function buttonHandler(e) {
e.target.textContent = e.target.textContent === 'More' ? e.target.textContent = 'Less' : e.target.textContent = 'More';
let parent = e.target.parentNode.parentNode.querySelector('.extra');
parent.style.display = parent.style.display === 'none' || parent.style.display === '' ? parent.style.display = 'block' : parent.style.display = 'none'
}
}
solution();
|
# flake8: noqa: F811, F401
import asyncio
import time
import pytest
import logging
from littlelambocoin.protocols import full_node_protocol
from littlelambocoin.types.peer_info import PeerInfo
from littlelambocoin.util.ints import uint16
from littlelambocoin.wallet.transaction_record import TransactionRecord
from tests.connection_utils import connect_and_get_peer
from tests.setup_nodes import bt, self_hostname, setup_simulators_and_wallets
from tests.time_out_assert import time_out_assert
from tests.core.fixtures import (
default_400_blocks,
)
def wallet_height_at_least(wallet_node, h):
height = wallet_node.wallet_state_manager.blockchain.get_peak_height()
if height == h:
return True
return False
log = logging.getLogger(__name__)
@pytest.fixture(scope="session")
def event_loop():
loop = asyncio.get_event_loop()
yield loop
class TestMempoolPerformance:
@pytest.fixture(scope="module")
async def wallet_nodes(self):
key_seed = bt.farmer_master_sk_entropy
async for _ in setup_simulators_and_wallets(2, 1, {}, key_seed=key_seed):
yield _
@pytest.mark.asyncio
async def test_mempool_update_performance(self, wallet_nodes, default_400_blocks):
blocks = default_400_blocks
full_nodes, wallets = wallet_nodes
wallet_node = wallets[0][0]
wallet_server = wallets[0][1]
full_node_api_1 = full_nodes[0]
full_node_api_2 = full_nodes[1]
server_1 = full_node_api_1.full_node.server
server_2 = full_node_api_2.full_node.server
wallet = wallet_node.wallet_state_manager.main_wallet
ph = await wallet.get_new_puzzlehash()
for block in blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
await wallet_server.start_client(PeerInfo(self_hostname, uint16(server_1._port)), None)
await time_out_assert(60, wallet_height_at_least, True, wallet_node, 399)
big_transaction: TransactionRecord = await wallet.generate_signed_transaction(40000000000000, ph, 2213)
peer = await connect_and_get_peer(server_1, server_2)
await full_node_api_1.respond_transaction(
full_node_protocol.RespondTransaction(big_transaction.spend_bundle), peer, test=True
)
cons = list(server_1.all_connections.values())[:]
for con in cons:
await con.close()
blocks = bt.get_consecutive_blocks(3, blocks)
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(blocks[-3]))
for block in blocks[-2:]:
start_t_2 = time.time()
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
assert time.time() - start_t_2 < 1
|
from os import sys, path
root_path = path.dirname(path.dirname(path.abspath(__file__)))
from StockAnalysisSystem.core.Utility.time_utility import *
from StockAnalysisSystem.core.StockAnalysisSystem import StockAnalysisSystem
def __test_factor_for_one_stock(sas: StockAnalysisSystem, factors: [str]):
if not isinstance(factors, (list, tuple)):
factors = [factors]
data_hub = sas.get_data_hub_entry()
data_center = data_hub.get_data_center()
df = data_center.query_from_factor('Factor.Finance', '000021.SZSE', (default_since(), now()),
fields=factors, readable=True)
print(df)
assert df is not None and len(df) > 0
for fct in factors:
assert fct in df.columns
def __test_factor_for_all_stock(sas: StockAnalysisSystem, factors: [str]):
if not isinstance(factors, (list, tuple)):
factors = [factors]
data_hub = sas.get_data_hub_entry()
data_center = data_hub.get_data_center()
df = data_center.query_from_factor('Factor.Finance', '',
(text_auto_time('2018-12-01'), text_auto_time('2018-12-31')),
fields=factors, readable=True)
print(df)
assert df is not None and len(df) > 0
for fct in factors:
assert fct in df.columns
def test_amazing_formula_factor(sas: StockAnalysisSystem):
__test_factor_for_one_stock(sas, '资本收益率')
__test_factor_for_all_stock(sas, '资本收益率')
def test_roe(sas: StockAnalysisSystem):
__test_factor_for_one_stock(sas, '净资产收益率')
__test_factor_for_all_stock(sas, '净资产收益率')
def test_roa(sas: StockAnalysisSystem):
__test_factor_for_one_stock(sas, '总资产收益率')
__test_factor_for_all_stock(sas, '总资产收益率')
def test_gross_margin(sas: StockAnalysisSystem):
__test_factor_for_one_stock(sas, '毛利率')
__test_factor_for_all_stock(sas, '毛利率')
def test_operating_margin(sas: StockAnalysisSystem):
__test_factor_for_one_stock(sas, '营业利润率')
__test_factor_for_all_stock(sas, '营业利润率')
def test_net_profit_rate(sas: StockAnalysisSystem):
__test_factor_for_one_stock(sas, '净利润率')
__test_factor_for_all_stock(sas, '净利润率')
def test_entry(sas: StockAnalysisSystem):
test_amazing_formula_factor(sas)
test_roe(sas)
test_roa(sas)
test_gross_margin(sas)
test_operating_margin(sas)
test_net_profit_rate(sas)
# ----------------------------------------------------- File Entry -----------------------------------------------------
def main():
test_entry()
# If program reaches here, all test passed.
print('All test passed.')
# ------------------------------------------------- Exception Handling -------------------------------------------------
def exception_hook(type, value, tback):
# log the exception here
print('Exception hook triggered.')
print(type)
print(value)
print(tback)
# then call the default handler
sys.__excepthook__(type, value, tback)
if __name__ == "__main__":
sys.excepthook = exception_hook
try:
main()
except Exception as e:
print('Error =>', e)
print('Error =>', traceback.format_exc())
exit()
finally:
pass
|
import { useState, useEffect } from 'react';
export default function useDebounce(value, delay) {
const [debouncedValue, setValue] = useState(value);
useEffect(() => {
const timer = setTimeout(() => {
setValue(value);
}, delay);
return () => {
clearTimeout(timer);
};
}, [value]);
return debouncedValue;
}
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os
from typing import Optional, Sequence, cast
import gym
import hydra.utils
import numpy as np
import omegaconf
import torch
import mbrl.constants
import mbrl.models
import mbrl.planning
import mbrl.third_party.pytorch_sac_pranz24 as pytorch_sac_pranz24
import mbrl.types
import mbrl.util
import mbrl.util.common
import mbrl.util.math
from mbrl.planning.sac_wrapper import SACAgent
from mbrl.third_party.pytorch_sac import VideoRecorder
MBPO_LOG_FORMAT = mbrl.constants.EVAL_LOG_FORMAT + [
("epoch", "E", "int"),
("rollout_length", "RL", "int"),
]
def rollout_model_and_populate_sac_buffer(
model_env: mbrl.models.ModelEnv,
replay_buffer: mbrl.util.ReplayBuffer,
agent: SACAgent,
sac_buffer: mbrl.util.ReplayBuffer,
sac_samples_action: bool,
rollout_horizon: int,
batch_size: int,
):
batch = replay_buffer.sample(batch_size)
initial_obs, *_ = cast(mbrl.types.TransitionBatch, batch).astuple()
model_state = model_env.reset(
initial_obs_batch=cast(np.ndarray, initial_obs),
return_as_np=True,
)
accum_dones = np.zeros(initial_obs.shape[0], dtype=bool)
obs = initial_obs
for i in range(rollout_horizon):
action = agent.act(obs, sample=sac_samples_action, batched=True)
pred_next_obs, pred_rewards, pred_dones, model_state = model_env.step(
action, model_state, sample=True
)
sac_buffer.add_batch(
obs[~accum_dones],
action[~accum_dones],
pred_next_obs[~accum_dones],
pred_rewards[~accum_dones, 0],
pred_dones[~accum_dones, 0],
)
obs = pred_next_obs
accum_dones |= pred_dones.squeeze()
def evaluate(
env: gym.Env,
agent: SACAgent,
num_episodes: int,
video_recorder: VideoRecorder,
) -> float:
avg_episode_reward = 0
for episode in range(num_episodes):
obs = env.reset()
video_recorder.init(enabled=(episode == 0))
done = False
episode_reward = 0
while not done:
action = agent.act(obs)
obs, reward, done, _ = env.step(action)
video_recorder.record(env)
episode_reward += reward
avg_episode_reward += episode_reward
return avg_episode_reward / num_episodes
def maybe_replace_sac_buffer(
sac_buffer: Optional[mbrl.util.ReplayBuffer],
obs_shape: Sequence[int],
act_shape: Sequence[int],
new_capacity: int,
seed: int,
) -> mbrl.util.ReplayBuffer:
if sac_buffer is None or new_capacity != sac_buffer.capacity:
if sac_buffer is None:
rng = np.random.default_rng(seed=seed)
else:
rng = sac_buffer.rng
new_buffer = mbrl.util.ReplayBuffer(new_capacity, obs_shape, act_shape, rng=rng)
if sac_buffer is None:
return new_buffer
obs, action, next_obs, reward, done = sac_buffer.get_all().astuple()
new_buffer.add_batch(obs, action, next_obs, reward, done)
return new_buffer
return sac_buffer
def train(
env: gym.Env,
test_env: gym.Env,
termination_fn: mbrl.types.TermFnType,
cfg: omegaconf.DictConfig,
silent: bool = False,
work_dir: Optional[str] = None,
) -> np.float32:
# ------------------- Initialization -------------------
debug_mode = cfg.get("debug_mode", False)
obs_shape = env.observation_space.shape
act_shape = env.action_space.shape
mbrl.planning.complete_agent_cfg(env, cfg.algorithm.agent)
agent = SACAgent(
cast(pytorch_sac_pranz24.SAC, hydra.utils.instantiate(cfg.algorithm.agent))
)
work_dir = work_dir or os.getcwd()
# enable_back_compatible to use pytorch_sac agent
logger = mbrl.util.Logger(work_dir, enable_back_compatible=True)
logger.register_group(
mbrl.constants.RESULTS_LOG_NAME,
MBPO_LOG_FORMAT,
color="green",
dump_frequency=1,
)
save_video = cfg.get("save_video", False)
video_recorder = VideoRecorder(work_dir if save_video else None)
rng = np.random.default_rng(seed=cfg.seed)
torch_generator = torch.Generator(device=cfg.device)
if cfg.seed is not None:
torch_generator.manual_seed(cfg.seed)
# -------------- Create initial overrides. dataset --------------
dynamics_model = mbrl.util.common.create_one_dim_tr_model(cfg, obs_shape, act_shape)
use_double_dtype = cfg.algorithm.get("normalize_double_precision", False)
dtype = np.double if use_double_dtype else np.float32
replay_buffer = mbrl.util.common.create_replay_buffer(
cfg,
obs_shape,
act_shape,
rng=rng,
obs_type=dtype,
action_type=dtype,
reward_type=dtype,
)
random_explore = cfg.algorithm.random_initial_explore
mbrl.util.common.rollout_agent_trajectories(
env,
cfg.algorithm.initial_exploration_steps,
mbrl.planning.RandomAgent(env) if random_explore else agent,
{} if random_explore else {"sample": True, "batched": False},
replay_buffer=replay_buffer,
)
# ---------------------------------------------------------
# --------------------- Training Loop ---------------------
rollout_batch_size = (
cfg.overrides.effective_model_rollouts_per_step * cfg.algorithm.freq_train_model
)
trains_per_epoch = int(
np.ceil(cfg.overrides.epoch_length / cfg.overrides.freq_train_model)
)
updates_made = 0
env_steps = 0
model_env = mbrl.models.ModelEnv(
env, dynamics_model, termination_fn, None, generator=torch_generator
)
model_trainer = mbrl.models.ModelTrainer(
dynamics_model,
optim_lr=cfg.overrides.model_lr,
weight_decay=cfg.overrides.model_wd,
logger=None if silent else logger,
)
best_eval_reward = -np.inf
epoch = 0
sac_buffer = None
while env_steps < cfg.overrides.num_steps:
rollout_length = int(
mbrl.util.math.truncated_linear(
*(cfg.overrides.rollout_schedule + [epoch + 1])
)
)
sac_buffer_capacity = rollout_length * rollout_batch_size * trains_per_epoch
sac_buffer_capacity *= cfg.overrides.num_epochs_to_retain_sac_buffer
sac_buffer = maybe_replace_sac_buffer(
sac_buffer, obs_shape, act_shape, sac_buffer_capacity, cfg.seed
)
obs, done = None, False
for steps_epoch in range(cfg.overrides.epoch_length):
if steps_epoch == 0 or done:
obs, done = env.reset(), False
# --- Doing env step and adding to model dataset ---
next_obs, reward, done, _ = mbrl.util.common.step_env_and_add_to_buffer(
env, obs, agent, {}, replay_buffer
)
# --------------- Model Training -----------------
if (env_steps + 1) % cfg.overrides.freq_train_model == 0:
mbrl.util.common.train_model_and_save_model_and_data(
dynamics_model,
model_trainer,
cfg.overrides,
replay_buffer,
work_dir=work_dir,
)
# --------- Rollout new model and store imagined trajectories --------
# Batch all rollouts for the next freq_train_model steps together
rollout_model_and_populate_sac_buffer(
model_env,
replay_buffer,
agent,
sac_buffer,
cfg.algorithm.sac_samples_action,
rollout_length,
rollout_batch_size,
)
if debug_mode:
print(
f"Epoch: {epoch}. "
f"SAC buffer size: {len(sac_buffer)}. "
f"Rollout length: {rollout_length}. "
f"Steps: {env_steps}"
)
# --------------- Agent Training -----------------
for _ in range(cfg.overrides.num_sac_updates_per_step):
use_real_data = rng.random() < cfg.algorithm.real_data_ratio
which_buffer = replay_buffer if use_real_data else sac_buffer
if (env_steps + 1) % cfg.overrides.sac_updates_every_steps != 0 or len(
which_buffer
) < cfg.overrides.sac_batch_size:
break # only update every once in a while
agent.sac_agent.update_parameters(
which_buffer,
cfg.overrides.sac_batch_size,
updates_made,
logger,
reverse_mask=True,
)
updates_made += 1
if not silent and updates_made % cfg.log_frequency_agent == 0:
logger.dump(updates_made, save=True)
# ------ Epoch ended (evaluate and save model) ------
if (env_steps + 1) % cfg.overrides.epoch_length == 0:
avg_reward = evaluate(
test_env, agent, cfg.algorithm.num_eval_episodes, video_recorder
)
logger.log_data(
mbrl.constants.RESULTS_LOG_NAME,
{
"epoch": epoch,
"env_step": env_steps,
"episode_reward": avg_reward,
"rollout_length": rollout_length,
},
)
if avg_reward > best_eval_reward:
video_recorder.save(f"{epoch}.mp4")
best_eval_reward = avg_reward
agent.sac_agent.save_checkpoint(
ckpt_path=os.path.join(work_dir, "sac.pth")
)
epoch += 1
env_steps += 1
obs = next_obs
return np.float32(best_eval_reward)
|
import pandas as pd
import numpy as np
from ..utils import format_data, cache
@cache
def Mnist(onehot=True):
'''Famous digits dataset'''
url = "https://www.python-course.eu/data/mnist/mnist_train.csv"
df = pd.read_csv(url, header=None)
X = df.iloc[:,1:].transpose().values
Y = df.iloc[:,0].values
n = Y.shape[0]
if onehot:
Z = np.zeros((10, n))
Z[Y, np.arange(n)] = 1
Y = Z
return format_data(X, Y, n = 60000, f = 784, out=10)
|
import { StyleSheet, Animated } from 'react-native';
import styled from 'styled-components/native';
export const Container = styled(Animated.ScrollView).attrs({
contentContainerStyle: { alignItems: 'center' },
})`
margin: 0 30px;
`;
export const Top = styled.View`
background: #FFF;
padding: 10px;
`;
export const Logo = styled.Image`
overflow: hidden;
`;
export const Nav = styled.View`
margin-top: 30px;
align-self: stretch;
border-top-width: ${StyleSheet.hairlineWidth}px;
border-top-color: rgba(255, 255, 255, 0.8);
`;
export const NavItem = styled.View`
flex-direction: row;
align-items: center;
padding: 12px 0;
border-bottom-width: ${StyleSheet.hairlineWidth}px;
border-bottom-color: rgba(255, 255, 255, 0.8);
`;
export const NavText = styled.Text`
font-size: 15px;
color: #FFF;
margin-left: 20px;
`;
export const SignOutButton = styled.TouchableOpacity`
border-width: ${StyleSheet.hairlineWidth};
border-color: rgba(255, 255, 255, 0.8);
border-radius: 4px;
justify-content: center;
align-items: center;
padding: 12px;
margin-top: 7px;
align-self: stretch;
`;
export const SignOutButtonText = styled.Text`
color: #FFF;
font-weight: bold;
font-size: 13px;
`;
export const NavItemButton = styled.TouchableOpacity`
flex-direction: row;
align-items: center;
padding: 12px 0;
border-bottom-width: ${StyleSheet.hairlineWidth}px;
border-bottom-color: rgba(255, 255, 255, 0.8);
`;
|
# -*- coding: utf-8 -*-
"""
Created on Mon Jan 28 02:24:47 2019
@author: coby_
Practica 10: uso de librerias
"""
import datetime
hoy = datetime.date.today();
mes = hoy.month
year = hoy.year
if hoy.day>15:
if mes==12:
mes = 1;
year+=1
else:
mes+=1
quincena = datetime.date(year,mes,1)
quincena = quincena - datetime.timedelta(days=1)
else:
quincena = datetime.date(year,mes,15)
falta = quincena - hoy
print("Falta para que paguen ", falta.days )
print("Fecha estimada del pago es ", quincena)
|
# -*- coding: utf8 -*-
'''
Basic load balancer definitions
@author: Luis Barrios Hernández
@version: 1.1
'''
class LoadBalancer(object):
'''
These objects determine the virtual machine server that will host
a virtual machine.
'''
def __init__(self, databaseConnector):
'''
Initializes the load balancer's state
Args:
databaseConnector: a connector to the main server database
'''
self._dbConnector = databaseConnector
def assignVMServer(self, imageId):
'''
Determines what virtual machine server will host an image.
Args:
imageId: the image's ID
Returns:
a tuple (ID, errorMessage), where ID is the virtual machine server's ID
and errorMessage is an error message.
'''
raise NotImplementedError
|
/*******************************************************************************
* Copyright (c) 2009, 2014 IBM Corp.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Eclipse Distribution License v1.0 which accompany this distribution.
*
* The Eclipse Public License is available at
* http://www.eclipse.org/legal/epl-v10.html
* and the Eclipse Distribution License is available at
* http://www.eclipse.org/org/documents/edl-v10.php.
*
* Contributors:
* Ian Craggs - initial API and implementation and/or initial documentation
* Ian Craggs - MQTT 3.1.1 support
* Ian Craggs - test8 - failure callbacks
*******************************************************************************/
/**
* @file
* Tests for the Paho Asynchronous MQTT C client
*/
#include "MQTTAsync.h"
#include <string.h>
#include <stdlib.h>
#if !defined(_WINDOWS)
#include <sys/time.h>
#include <sys/socket.h>
#include <unistd.h>
#include <errno.h>
#else
#include <windows.h>
#endif
#define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0]))
void usage(void)
{
printf("help!!\n");
exit(EXIT_FAILURE);
}
struct Options
{
char* connection; /**< connection to system under test. */
int verbose;
int test_no;
int size; /**< size of big message */
int MQTTVersion;
int iterations;
} options =
{
"iot.eclipse.org:1883",
0,
-1,
10000,
MQTTVERSION_DEFAULT,
1,
};
void getopts(int argc, char** argv)
{
int count = 1;
while (count < argc)
{
if (strcmp(argv[count], "--test_no") == 0)
{
if (++count < argc)
options.test_no = atoi(argv[count]);
else
usage();
}
else if (strcmp(argv[count], "--size") == 0)
{
if (++count < argc)
options.size = atoi(argv[count]);
else
usage();
}
else if (strcmp(argv[count], "--connection") == 0)
{
if (++count < argc)
options.connection = argv[count];
else
usage();
}
else if (strcmp(argv[count], "--MQTTversion") == 0)
{
if (++count < argc)
{
options.MQTTVersion = atoi(argv[count]);
printf("setting MQTT version to %d\n", options.MQTTVersion);
}
else
usage();
}
else if (strcmp(argv[count], "--iterations") == 0)
{
if (++count < argc)
options.iterations = atoi(argv[count]);
else
usage();
}
else if (strcmp(argv[count], "--verbose") == 0)
options.verbose = 1;
count++;
}
}
#if 0
#include <logaX.h> /* For general log messages */
#define MyLog logaLine
#else
#define LOGA_DEBUG 0
#define LOGA_INFO 1
#include <stdarg.h>
#include <time.h>
#include <sys/timeb.h>
void MyLog(int LOGA_level, char* format, ...)
{
static char msg_buf[256];
va_list args;
struct timeb ts;
struct tm *timeinfo;
if (LOGA_level == LOGA_DEBUG && options.verbose == 0)
return;
ftime(&ts);
timeinfo = localtime(&ts.time);
strftime(msg_buf, 80, "%Y%m%d %H%M%S", timeinfo);
sprintf(&msg_buf[strlen(msg_buf)], ".%.3hu ", ts.millitm);
va_start(args, format);
vsnprintf(&msg_buf[strlen(msg_buf)], sizeof(msg_buf) - strlen(msg_buf), format, args);
va_end(args);
printf("%s\n", msg_buf);
fflush(stdout);
}
#endif
#if defined(WIN32) || defined(_WINDOWS)
#define mqsleep(A) Sleep(1000*A)
#define START_TIME_TYPE DWORD
static DWORD start_time = 0;
START_TIME_TYPE start_clock(void)
{
return GetTickCount();
}
#elif defined(AIX)
#define mqsleep sleep
#define START_TIME_TYPE struct timespec
START_TIME_TYPE start_clock(void)
{
static struct timespec start;
clock_gettime(CLOCK_REALTIME, &start);
return start;
}
#else
#define mqsleep sleep
#define START_TIME_TYPE struct timeval
/* TODO - unused - remove? static struct timeval start_time; */
START_TIME_TYPE start_clock(void)
{
struct timeval start_time;
gettimeofday(&start_time, NULL);
return start_time;
}
#endif
#if defined(WIN32)
long elapsed(START_TIME_TYPE start_time)
{
return GetTickCount() - start_time;
}
#elif defined(AIX)
#define assert(a)
long elapsed(struct timespec start)
{
struct timespec now, res;
clock_gettime(CLOCK_REALTIME, &now);
ntimersub(now, start, res);
return (res.tv_sec)*1000L + (res.tv_nsec)/1000000L;
}
#else
long elapsed(START_TIME_TYPE start_time)
{
struct timeval now, res;
gettimeofday(&now, NULL);
timersub(&now, &start_time, &res);
return (res.tv_sec)*1000 + (res.tv_usec)/1000;
}
#endif
#define assert(a, b, c, d) myassert(__FILE__, __LINE__, a, b, c, d)
#define assert1(a, b, c, d, e) myassert(__FILE__, __LINE__, a, b, c, d, e)
int tests = 0;
int failures = 0;
FILE* xml;
START_TIME_TYPE global_start_time;
char output[3000];
char* cur_output = output;
void write_test_result(void)
{
long duration = elapsed(global_start_time);
fprintf(xml, " time=\"%ld.%.3ld\" >\n", duration / 1000, duration % 1000);
if (cur_output != output)
{
fprintf(xml, "%s", output);
cur_output = output;
}
fprintf(xml, "</testcase>\n");
}
void myassert(char* filename, int lineno, char* description, int value, char* format, ...)
{
++tests;
if (!value)
{
va_list args;
++failures;
printf("Assertion failed, file %s, line %d, description: %s\n", filename, lineno, description);
va_start(args, format);
vprintf(format, args);
va_end(args);
cur_output += sprintf(cur_output, "<failure type=\"%s\">file %s, line %d </failure>\n",
description, filename, lineno);
}
else
MyLog(LOGA_DEBUG, "Assertion succeeded, file %s, line %d, description: %s", filename, lineno, description);
}
volatile int test_finished = 0;
char* test_topic = "async test topic";
void test1_onDisconnect(void* context, MQTTAsync_successData* response)
{
MQTTAsync c = (MQTTAsync)context;
MyLog(LOGA_DEBUG, "In onDisconnect callback %p", c);
test_finished = 1;
}
void test1_onUnsubscribe(void* context, MQTTAsync_successData* response)
{
MQTTAsync c = (MQTTAsync)context;
MQTTAsync_disconnectOptions opts = MQTTAsync_disconnectOptions_initializer;
int rc;
MyLog(LOGA_DEBUG, "In onUnsubscribe onSuccess callback %p", c);
opts.onSuccess = test1_onDisconnect;
opts.context = c;
rc = MQTTAsync_disconnect(c, &opts);
assert("Disconnect successful", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
}
int test1_messageArrived(void* context, char* topicName, int topicLen, MQTTAsync_message* message)
{
MQTTAsync c = (MQTTAsync)context;
static int message_count = 0;
int rc;
MyLog(LOGA_DEBUG, "In messageArrived callback %p", c);
if (++message_count == 1)
{
MQTTAsync_message pubmsg = MQTTAsync_message_initializer;
MQTTAsync_responseOptions opts = MQTTAsync_responseOptions_initializer;
pubmsg.payload = "a much longer message that we can shorten to the extent that we need to payload up to 11";
pubmsg.payloadlen = 11;
pubmsg.qos = 2;
pubmsg.retained = 0;
rc = MQTTAsync_sendMessage(c, test_topic, &pubmsg, &opts);
assert("Good rc from send", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
}
else
{
MQTTAsync_responseOptions opts = MQTTAsync_responseOptions_initializer;
opts.onSuccess = test1_onUnsubscribe;
opts.context = c;
rc = MQTTAsync_unsubscribe(c, test_topic, &opts);
assert("Unsubscribe successful", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
}
MQTTAsync_freeMessage(&message);
MQTTAsync_free(topicName);
return 1;
}
void test1_onSubscribe(void* context, MQTTAsync_successData* response)
{
MQTTAsync c = (MQTTAsync)context;
MQTTAsync_message pubmsg = MQTTAsync_message_initializer;
int rc;
MyLog(LOGA_DEBUG, "In subscribe onSuccess callback %p granted qos %d", c, response->alt.qos);
pubmsg.payload = "a much longer message that we can shorten to the extent that we need to payload up to 11";
pubmsg.payloadlen = 11;
pubmsg.qos = 2;
pubmsg.retained = 0;
rc = MQTTAsync_send(c, test_topic, pubmsg.payloadlen, pubmsg.payload, pubmsg.qos, pubmsg.retained, NULL);
assert("Good rc from send", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
}
void test1_onConnect(void* context, MQTTAsync_successData* response)
{
MQTTAsync c = (MQTTAsync)context;
MQTTAsync_responseOptions opts = MQTTAsync_responseOptions_initializer;
int rc;
MyLog(LOGA_DEBUG, "In connect onSuccess callback, context %p", context);
opts.onSuccess = test1_onSubscribe;
opts.context = c;
rc = MQTTAsync_subscribe(c, test_topic, 2, &opts);
assert("Good rc from subscribe", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
if (rc != MQTTASYNC_SUCCESS)
test_finished = 1;
}
/*********************************************************************
Test1: Basic connect, subscribe send and receive.
*********************************************************************/
int test1(struct Options options)
{
int subsqos = 2;
MQTTAsync c;
MQTTAsync_connectOptions opts = MQTTAsync_connectOptions_initializer;
MQTTAsync_willOptions wopts = MQTTAsync_willOptions_initializer;
int rc = 0;
char* test_topic = "C client test1";
MyLog(LOGA_INFO, "Starting test 1 - asynchronous connect");
fprintf(xml, "<testcase classname=\"test4\" name=\"asynchronous connect\"");
global_start_time = start_clock();
rc = MQTTAsync_create(&c, options.connection, "async_test",
MQTTCLIENT_PERSISTENCE_DEFAULT, NULL);
assert("good rc from create", rc == MQTTASYNC_SUCCESS, "rc was %d\n", rc);
if (rc != MQTTASYNC_SUCCESS)
{
MQTTAsync_destroy(&c);
goto exit;
}
rc = MQTTAsync_setCallbacks(c, c, NULL, test1_messageArrived, NULL);
assert("Good rc from setCallbacks", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
opts.keepAliveInterval = 20;
opts.cleansession = 1;
opts.username = "testuser";
opts.password = "testpassword";
opts.MQTTVersion = options.MQTTVersion;
opts.will = &wopts;
opts.will->message = "will message";
opts.will->qos = 1;
opts.will->retained = 0;
opts.will->topicName = "will topic";
opts.will = NULL;
opts.onSuccess = test1_onConnect;
opts.onFailure = NULL;
opts.context = c;
MyLog(LOGA_DEBUG, "Connecting");
rc = MQTTAsync_connect(c, &opts);
rc = 0;
assert("Good rc from connect", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
if (rc != MQTTASYNC_SUCCESS)
goto exit;
while (!test_finished)
#if defined(WIN32)
Sleep(100);
#else
usleep(10000L);
#endif
MQTTAsync_destroy(&c);
exit:
MyLog(LOGA_INFO, "TEST1: test %s. %d tests run, %d failures.",
(failures == 0) ? "passed" : "failed", tests, failures);
write_test_result();
return failures;
}
int test2_onFailure_called = 0;
void test2_onFailure(void* context, MQTTAsync_failureData* response)
{
MyLog(LOGA_DEBUG, "In connect onFailure callback, context %p", context);
test2_onFailure_called++;
test_finished = 1;
}
void test2_onConnect(void* context, MQTTAsync_successData* response)
{
MyLog(LOGA_DEBUG, "In connect onSuccess callback, context %p\n", context);
assert("Connect should not succeed", 0, "connect success callback was called", 0);
test_finished = 1;
}
/*********************************************************************
Test2: connect timeout
*********************************************************************/
int test2(struct Options options)
{
int subsqos = 2;
MQTTAsync c;
MQTTAsync_connectOptions opts = MQTTAsync_connectOptions_initializer;
MQTTAsync_willOptions wopts = MQTTAsync_willOptions_initializer;
int rc = 0;
char* test_topic = "C client test2";
test_finished = 0;
MyLog(LOGA_INFO, "Starting test 2 - connect timeout");
fprintf(xml, "<testcase classname=\"test4\" name=\"connect timeout\"");
global_start_time = start_clock();
rc = MQTTAsync_create(&c, "tcp://9.20.96.160:66", "connect timeout",
MQTTCLIENT_PERSISTENCE_DEFAULT, NULL);
assert("good rc from create", rc == MQTTASYNC_SUCCESS, "rc was %d\n", rc);
if (rc != MQTTASYNC_SUCCESS)
{
MQTTAsync_destroy(&c);
goto exit;
}
rc = MQTTAsync_setCallbacks(c, c, NULL, test1_messageArrived, NULL);
assert("Good rc from setCallbacks", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
opts.connectTimeout = 5;
opts.keepAliveInterval = 20;
opts.cleansession = 1;
opts.username = "testuser";
opts.binarypwd.data = "testpassword";
opts.binarypwd.len = (int)strlen(opts.binarypwd.data);
opts.MQTTVersion = options.MQTTVersion;
opts.will = &wopts;
opts.will->message = "will message";
opts.will->qos = 1;
opts.will->retained = 0;
opts.will->topicName = "will topic";
opts.will = NULL;
opts.onSuccess = test2_onConnect;
opts.onFailure = test2_onFailure;
opts.context = c;
MyLog(LOGA_DEBUG, "Connecting");
rc = MQTTAsync_connect(c, &opts);
rc = 0;
assert("Good rc from connect", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
if (rc != MQTTASYNC_SUCCESS)
goto exit;
while (!test_finished)
#if defined(WIN32)
Sleep(100);
#else
usleep(10000L);
#endif
MQTTAsync_destroy(&c);
exit:
assert("Connect onFailure should be called once", test2_onFailure_called == 1,
"connect onFailure was called %d times", test2_onFailure_called);
MyLog(LOGA_INFO, "TEST2: test %s. %d tests run, %d failures.",
(failures == 0) ? "passed" : "failed", tests, failures);
write_test_result();
return failures;
}
typedef struct
{
MQTTAsync c;
int index;
char clientid[24];
char test_topic[100];
int message_count;
} client_data;
void test3_onDisconnect(void* context, MQTTAsync_successData* response)
{
client_data* cd = (client_data*)context;
MyLog(LOGA_DEBUG, "In onDisconnect callback for client \"%s\"", cd->clientid);
test_finished++;
}
void test3_onPublish(void* context, MQTTAsync_successData* response)
{
client_data* cd = (client_data*)context;
MyLog(LOGA_DEBUG, "In QoS 0 onPublish callback for client \"%s\"", cd->clientid);
}
void test3_onUnsubscribe(void* context, MQTTAsync_successData* response)
{
client_data* cd = (client_data*)context;
MQTTAsync_disconnectOptions opts = MQTTAsync_disconnectOptions_initializer;
int rc;
MyLog(LOGA_DEBUG, "In onUnsubscribe onSuccess callback \"%s\"", cd->clientid);
opts.onSuccess = test3_onDisconnect;
opts.context = cd;
rc = MQTTAsync_disconnect(cd->c, &opts);
assert("Disconnect successful", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
}
int test3_messageArrived(void* context, char* topicName, int topicLen, MQTTAsync_message* message)
{
client_data* cd = (client_data*)context;
int rc;
MyLog(LOGA_DEBUG, "In messageArrived callback \"%s\" message count ", cd->clientid);
if (++cd->message_count == 1)
{
MQTTAsync_message pubmsg = MQTTAsync_message_initializer;
MQTTAsync_responseOptions opts = MQTTAsync_responseOptions_initializer;
pubmsg.payload = "a much longer message that we can shorten to the extent that we need to payload up to 11";
pubmsg.payloadlen = 25;
pubmsg.qos = 1;
pubmsg.retained = 0;
rc = MQTTAsync_sendMessage(cd->c, cd->test_topic, &pubmsg, &opts);
assert("Good rc from publish", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
}
else if (cd->message_count == 2)
{
MQTTAsync_message pubmsg = MQTTAsync_message_initializer;
MQTTAsync_responseOptions opts = MQTTAsync_responseOptions_initializer;
pubmsg.payload = "a QoS 0 message that we can shorten to the extent that we need to payload up to 11";
pubmsg.payloadlen = 29;
pubmsg.qos = 0;
pubmsg.retained = 0;
opts.context = cd;
opts.onSuccess = test3_onPublish;
rc = MQTTAsync_sendMessage(cd->c, cd->test_topic, &pubmsg, &opts);
assert("Good rc from publish", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
}
else
{
MQTTAsync_responseOptions opts = MQTTAsync_responseOptions_initializer;
opts.onSuccess = test3_onUnsubscribe;
opts.context = cd;
rc = MQTTAsync_unsubscribe(cd->c, cd->test_topic, &opts);
assert("Unsubscribe successful", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
}
MQTTAsync_freeMessage(&message);
MQTTAsync_free(topicName);
return 1;
}
void test3_onSubscribe(void* context, MQTTAsync_successData* response)
{
client_data* cd = (client_data*)context;
MQTTAsync_message pubmsg = MQTTAsync_message_initializer;
int rc;
MyLog(LOGA_DEBUG, "In subscribe onSuccess callback \"%s\"", cd->clientid);
pubmsg.payload = "a much longer message that we can shorten to the extent that we need to payload up to 11";
pubmsg.payloadlen = 11;
pubmsg.qos = 2;
pubmsg.retained = 0;
rc = MQTTAsync_send(cd->c, cd->test_topic, pubmsg.payloadlen, pubmsg.payload, pubmsg.qos, pubmsg.retained, NULL);
assert("Good rc from publish", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
}
void test3_onConnect(void* context, MQTTAsync_successData* response)
{
client_data* cd = (client_data*)context;
MQTTAsync_responseOptions opts = MQTTAsync_responseOptions_initializer;
int rc;
MyLog(LOGA_DEBUG, "In connect onSuccess callback, \"%s\"", cd->clientid);
opts.onSuccess = test3_onSubscribe;
opts.context = cd;
rc = MQTTAsync_subscribe(cd->c, cd->test_topic, 2, &opts);
assert("Good rc from subscribe", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
if (rc != MQTTASYNC_SUCCESS)
test_finished++;
}
void test3_onFailure(void* context, MQTTAsync_failureData* response)
{
client_data* cd = (client_data*)context;
MQTTAsync_responseOptions opts = MQTTAsync_responseOptions_initializer;
assert("Should have connected", 0, "%s failed to connect\n", cd->clientid);
MyLog(LOGA_DEBUG, "In connect onFailure callback, \"%s\" rc %d\n", cd->clientid, response ? response->code : -999);
if (response && response->message)
MyLog(LOGA_DEBUG, "In connect onFailure callback, \"%s\"\n", response->message);
test_finished++;
}
/*********************************************************************
Test3: More than one client object - simultaneous working.
*********************************************************************/
int test3(struct Options options)
{
#define num_clients 10
int subsqos = 2;
MQTTAsync_connectOptions opts = MQTTAsync_connectOptions_initializer;
MQTTAsync_willOptions wopts = MQTTAsync_willOptions_initializer;
int rc = 0;
int i;
client_data clientdata[num_clients];
test_finished = 0;
MyLog(LOGA_INFO, "Starting test 3 - multiple connections");
fprintf(xml, "<testcase classname=\"test4\" name=\"multiple connections\"");
global_start_time = start_clock();
for (i = 0; i < num_clients; ++i)
{
sprintf(clientdata[i].clientid, "async_test3_num_%d", i);
sprintf(clientdata[i].test_topic, "async test3 topic num %d", i);
clientdata[i].index = i;
clientdata[i].message_count = 0;
rc = MQTTAsync_create(&(clientdata[i].c), options.connection, clientdata[i].clientid,
MQTTCLIENT_PERSISTENCE_NONE, NULL);
assert("good rc from create", rc == MQTTASYNC_SUCCESS, "rc was %d\n", rc);
rc = MQTTAsync_setCallbacks(clientdata[i].c, &clientdata[i], NULL, test3_messageArrived, NULL);
assert("Good rc from setCallbacks", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
opts.keepAliveInterval = 20;
opts.cleansession = 1;
opts.username = "testuser";
opts.password = "testpassword";
opts.MQTTVersion = options.MQTTVersion;
opts.will = &wopts;
opts.will->message = "will message";
opts.will->qos = 1;
opts.will->retained = 0;
opts.will->topicName = "will topic";
opts.onSuccess = test3_onConnect;
opts.onFailure = test3_onFailure;
opts.context = &clientdata[i];
MyLog(LOGA_DEBUG, "Connecting");
rc = MQTTAsync_connect(clientdata[i].c, &opts);
assert("Good rc from connect", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
}
while (test_finished < num_clients)
{
MyLog(LOGA_DEBUG, "num_clients %d test_finished %d\n", num_clients, test_finished);
#if defined(WIN32)
Sleep(100);
#else
usleep(10000L);
#endif
}
MyLog(LOGA_DEBUG, "TEST3: destroying clients");
for (i = 0; i < num_clients; ++i)
MQTTAsync_destroy(&clientdata[i].c);
//exit:
MyLog(LOGA_INFO, "TEST3: test %s. %d tests run, %d failures.",
(failures == 0) ? "passed" : "failed", tests, failures);
write_test_result();
return failures;
}
void* test4_payload = NULL;
int test4_payloadlen = 0;
void test4_onPublish(void* context, MQTTAsync_successData* response)
{
MQTTAsync c = (MQTTAsync)context;
MyLog(LOGA_DEBUG, "In publish onSuccess callback, context %p", context);
}
int test4_messageArrived(void* context, char* topicName, int topicLen, MQTTAsync_message* message)
{
MQTTAsync c = (MQTTAsync)context;
static int message_count = 0;
int rc, i;
MyLog(LOGA_DEBUG, "In messageArrived callback %p", c);
assert("Message size correct", message->payloadlen == test4_payloadlen,
"message size was %d", message->payloadlen);
for (i = 0; i < options.size; ++i)
{
if (((char*)test4_payload)[i] != ((char*)message->payload)[i])
{
assert("Message contents correct", ((char*)test4_payload)[i] != ((char*)message->payload)[i],
"message content was %c", ((char*)message->payload)[i]);
break;
}
}
if (++message_count == 1)
{
MQTTAsync_message pubmsg = MQTTAsync_message_initializer;
MQTTAsync_responseOptions opts = MQTTAsync_responseOptions_initializer;
pubmsg.payload = test4_payload;
pubmsg.payloadlen = test4_payloadlen;
pubmsg.qos = 1;
pubmsg.retained = 0;
opts.onSuccess = test4_onPublish;
opts.context = c;
rc = MQTTAsync_sendMessage(c, test_topic, &pubmsg, &opts);
}
else if (message_count == 2)
{
MQTTAsync_message pubmsg = MQTTAsync_message_initializer;
MQTTAsync_responseOptions opts = MQTTAsync_responseOptions_initializer;
pubmsg.payload = test4_payload;
pubmsg.payloadlen = test4_payloadlen;
pubmsg.qos = 0;
pubmsg.retained = 0;
opts.onSuccess = test4_onPublish;
opts.context = c;
rc = MQTTAsync_sendMessage(c, test_topic, &pubmsg, &opts);
}
else
{
MQTTAsync_responseOptions opts = MQTTAsync_responseOptions_initializer;
opts.onSuccess = test1_onUnsubscribe;
opts.context = c;
rc = MQTTAsync_unsubscribe(c, test_topic, &opts);
assert("Unsubscribe successful", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
}
MQTTAsync_freeMessage(&message);
MQTTAsync_free(topicName);
return 1;
}
void test4_onSubscribe(void* context, MQTTAsync_successData* response)
{
MQTTAsync c = (MQTTAsync)context;
MQTTAsync_message pubmsg = MQTTAsync_message_initializer;
int rc, i;
MyLog(LOGA_DEBUG, "In subscribe onSuccess callback %p", c);
pubmsg.payload = test4_payload = malloc(options.size);
pubmsg.payloadlen = test4_payloadlen = options.size;
srand(33);
for (i = 0; i < options.size; ++i)
((char*)pubmsg.payload)[i] = rand() % 256;
pubmsg.qos = 2;
pubmsg.retained = 0;
rc = MQTTAsync_send(c, test_topic, pubmsg.payloadlen, pubmsg.payload, pubmsg.qos, pubmsg.retained, NULL);
}
void test4_onConnect(void* context, MQTTAsync_successData* response)
{
MQTTAsync c = (MQTTAsync)context;
MQTTAsync_responseOptions opts = MQTTAsync_responseOptions_initializer;
int rc;
MyLog(LOGA_DEBUG, "In connect onSuccess callback, context %p", context);
opts.onSuccess = test4_onSubscribe;
opts.context = c;
rc = MQTTAsync_subscribe(c, test_topic, 2, &opts);
assert("Good rc from subscribe", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
if (rc != MQTTASYNC_SUCCESS)
test_finished = 1;
}
/*********************************************************************
Test4: Send and receive big messages
*********************************************************************/
int test4(struct Options options)
{
int subsqos = 2;
MQTTAsync c;
MQTTAsync_connectOptions opts = MQTTAsync_connectOptions_initializer;
MQTTAsync_willOptions wopts = MQTTAsync_willOptions_initializer;
int rc = 0;
char* test_topic = "C client test4";
test_finished = failures = 0;
MyLog(LOGA_INFO, "Starting test 4 - big messages");
fprintf(xml, "<testcase classname=\"test4\" name=\"big messages\"");
global_start_time = start_clock();
rc = MQTTAsync_create(&c, options.connection, "async_test_4",
MQTTCLIENT_PERSISTENCE_DEFAULT, NULL);
assert("good rc from create", rc == MQTTASYNC_SUCCESS, "rc was %d\n", rc);
if (rc != MQTTASYNC_SUCCESS)
{
MQTTAsync_destroy(&c);
goto exit;
}
rc = MQTTAsync_setCallbacks(c, c, NULL, test4_messageArrived, NULL);
assert("Good rc from setCallbacks", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
opts.keepAliveInterval = 20;
opts.cleansession = 1;
opts.username = "testuser";
opts.password = "testpassword";
opts.MQTTVersion = options.MQTTVersion;
opts.will = &wopts;
opts.will->message = "will message";
opts.will->qos = 1;
opts.will->retained = 0;
opts.will->topicName = "will topic";
opts.will = NULL;
opts.onSuccess = test4_onConnect;
opts.onFailure = NULL;
opts.context = c;
MyLog(LOGA_DEBUG, "Connecting");
rc = MQTTAsync_connect(c, &opts);
rc = 0;
assert("Good rc from connect", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
if (rc != MQTTASYNC_SUCCESS)
goto exit;
while (!test_finished)
#if defined(WIN32)
Sleep(100);
#else
usleep(1000L);
#endif
MQTTAsync_destroy(&c);
exit:
MyLog(LOGA_INFO, "TEST4: test %s. %d tests run, %d failures.",
(failures == 0) ? "passed" : "failed", tests, failures);
write_test_result();
return failures;
}
void test5_onConnectFailure(void* context, MQTTAsync_failureData* response)
{
MQTTAsync c = (MQTTAsync)context;
MQTTAsync_responseOptions opts = MQTTAsync_responseOptions_initializer;
MyLog(LOGA_DEBUG, "In connect onFailure callback, context %p", context);
MyLog(LOGA_INFO, "Connack rc is %d", response ? response->code : -999);
test_finished = 1;
}
void test5_onConnect(void* context, MQTTAsync_successData* response)
{
MQTTAsync c = (MQTTAsync)context;
MQTTAsync_responseOptions opts = MQTTAsync_responseOptions_initializer;
MyLog(LOGA_DEBUG, "In connect onFailure callback, context %p", context);
test_finished = 1;
}
/********************************************************************
Test5: Connack return codes
*********************************************************************/
int test5(struct Options options)
{
int subsqos = 2;
MQTTAsync c;
MQTTAsync_connectOptions opts = MQTTAsync_connectOptions_initializer;
MQTTAsync_willOptions wopts = MQTTAsync_willOptions_initializer;
int rc = 0;
char* test_topic = "C client test1";
test_finished = failures = 0;
MyLog(LOGA_INFO, "Starting test 5 - connack return codes");
fprintf(xml, "<testcase classname=\"test4\" name=\"connack return codes\"");
global_start_time = start_clock();
rc = MQTTAsync_create(&c, options.connection, "a clientid that is too long to be accepted",
MQTTCLIENT_PERSISTENCE_DEFAULT, NULL);
assert("good rc from create", rc == MQTTASYNC_SUCCESS, "rc was %d\n", rc);
if (rc != MQTTASYNC_SUCCESS)
{
MQTTAsync_destroy(&c);
goto exit;
}
rc = MQTTAsync_setCallbacks(c, c, NULL, test1_messageArrived, NULL);
assert("Good rc from setCallbacks", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
opts.onSuccess = test5_onConnect;
opts.onFailure = test5_onConnectFailure;
opts.context = c;
MyLog(LOGA_DEBUG, "Connecting");
rc = MQTTAsync_connect(c, &opts);
rc = 0;
assert("Good rc from connect", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
if (rc != MQTTASYNC_SUCCESS)
goto exit;
while (!test_finished)
#if defined(WIN32)
Sleep(100);
#else
usleep(10000L);
#endif
MQTTAsync_destroy(&c);
exit:
MyLog(LOGA_INFO, "TEST5: test %s. %d tests run, %d failures.",
(failures == 0) ? "passed" : "failed", tests, failures);
write_test_result();
return failures;
}
typedef struct
{
MQTTAsync c;
int should_fail;
} test6_client_info;
void test6_onConnectFailure(void* context, MQTTAsync_failureData* response)
{
test6_client_info cinfo = *(test6_client_info*)context;
MyLog(LOGA_DEBUG, "In connect onFailure callback, context %p", context);
if (response)
MyLog(LOGA_INFO, "Connack rc is %d", response->code);
assert("Should fail to connect", cinfo.should_fail, "should_fail was %d", cinfo.should_fail);
test_finished = 1;
}
void test6_onConnect(void* context, MQTTAsync_successData* response)
{
test6_client_info cinfo = *(test6_client_info*)context;
MyLog(LOGA_DEBUG, "In connect success callback, context %p", context);
assert("Should connect correctly", !cinfo.should_fail, "should_fail was %d", cinfo.should_fail);
test_finished = 1;
}
/********************************************************************
Test6: HA connections
*********************************************************************/
int test6(struct Options options)
{
int subsqos = 2;
test6_client_info cinfo;
MQTTAsync_connectOptions opts = MQTTAsync_connectOptions_initializer;
MQTTAsync_willOptions wopts = MQTTAsync_willOptions_initializer;
int rc = 0;
char* test_topic = "C client test1";
char* uris[2] = {options.connection, options.connection};
failures = 0;
MyLog(LOGA_INFO, "Starting test 6 - HA connections");
fprintf(xml, "<testcase classname=\"test4\" name=\"HA connections\"");
global_start_time = start_clock();
test_finished = 0;
cinfo.should_fail = 1; /* fail to connect */
rc = MQTTAsync_create(&cinfo.c, "tcp://rubbish:1883", "async ha connection",
MQTTCLIENT_PERSISTENCE_DEFAULT, NULL);
assert("good rc from create", rc == MQTTASYNC_SUCCESS, "rc was %d\n", rc);
if (rc != MQTTASYNC_SUCCESS)
{
MQTTAsync_destroy(&cinfo.c);
goto exit;
}
rc = MQTTAsync_setCallbacks(cinfo.c, cinfo.c, NULL, test1_messageArrived, NULL);
assert("Good rc from setCallbacks", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
opts.onSuccess = test6_onConnect;
opts.onFailure = test6_onConnectFailure;
opts.context = &cinfo;
opts.MQTTVersion = options.MQTTVersion;
MyLog(LOGA_DEBUG, "Connecting");
rc = MQTTAsync_connect(cinfo.c, &opts);
rc = 0;
assert("Good rc from connect", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
if (rc != MQTTASYNC_SUCCESS)
goto exit;
while (!test_finished)
#if defined(WIN32)
Sleep(100);
#else
usleep(10000L);
#endif
test_finished = 0;
cinfo.should_fail = 0; /* should connect */
rc = MQTTAsync_create(&cinfo.c, "tcp://rubbish:1883", "async ha connection",
MQTTCLIENT_PERSISTENCE_DEFAULT, NULL);
assert("good rc from create", rc == MQTTASYNC_SUCCESS, "rc was %d\n", rc);
if (rc != MQTTASYNC_SUCCESS)
{
MQTTAsync_destroy(&cinfo.c);
goto exit;
}
rc = MQTTAsync_setCallbacks(cinfo.c, cinfo.c, NULL, test1_messageArrived, NULL);
assert("Good rc from setCallbacks", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
opts.onSuccess = test6_onConnect;
opts.onFailure = test6_onConnectFailure;
opts.context = &cinfo;
opts.serverURIs = uris;
opts.serverURIcount = 2;
MyLog(LOGA_DEBUG, "Connecting");
rc = MQTTAsync_connect(cinfo.c, &opts);
rc = 0;
assert("Good rc from connect", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
if (rc != MQTTASYNC_SUCCESS)
goto exit;
while (!test_finished)
#if defined(WIN32)
Sleep(100);
#else
usleep(10000L);
#endif
MQTTAsync_destroy(&cinfo.c);
exit:
MyLog(LOGA_INFO, "TEST6: test %s. %d tests run, %d failures.",
(failures == 0) ? "passed" : "failed", tests, failures);
write_test_result();
return failures;
}
/********************************************************************
Test7: Persistence
*********************************************************************/
char* test7_topic = "C client test7";
int test7_messageCount = 0;
void test7_onDisconnectFailure(void* context, MQTTAsync_failureData* response)
{
MQTTAsync c = (MQTTAsync)context;
MyLog(LOGA_DEBUG, "In onDisconnect failure callback %p", c);
assert("Successful disconnect", 0, "disconnect failed", 0);
test_finished = 1;
}
void test7_onDisconnect(void* context, MQTTAsync_successData* response)
{
MQTTAsync c = (MQTTAsync)context;
MyLog(LOGA_DEBUG, "In onDisconnect callback %p", c);
test_finished = 1;
}
void test7_onUnsubscribe(void* context, MQTTAsync_successData* response)
{
MQTTAsync c = (MQTTAsync)context;
MQTTAsync_disconnectOptions opts = MQTTAsync_disconnectOptions_initializer;
int rc;
MyLog(LOGA_DEBUG, "In onUnsubscribe onSuccess callback %p", c);
opts.onSuccess = test7_onDisconnect;
opts.context = c;
rc = MQTTAsync_disconnect(c, &opts);
assert("Disconnect successful", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
}
int test7_messageArrived(void* context, char* topicName, int topicLen, MQTTAsync_message* message)
{
MQTTAsync c = (MQTTAsync)context;
static int message_count = 0;
MyLog(LOGA_DEBUG, "Test7: received message id %d", message->msgid);
test7_messageCount++;
MQTTAsync_freeMessage(&message);
MQTTAsync_free(topicName);
return 1;
}
static int test7_subscribed = 0;
void test7_onSubscribe(void* context, MQTTAsync_successData* response)
{
MQTTAsync c = (MQTTAsync)context;
MyLog(LOGA_DEBUG, "In subscribe onSuccess callback %p granted qos %d", c, response->alt.qos);
test7_subscribed = 1;
}
void test7_onConnect(void* context, MQTTAsync_successData* response)
{
MQTTAsync c = (MQTTAsync)context;
MQTTAsync_responseOptions opts = MQTTAsync_responseOptions_initializer;
int rc;
MyLog(LOGA_DEBUG, "In connect onSuccess callback, context %p", context);
opts.onSuccess = test7_onSubscribe;
opts.context = c;
rc = MQTTAsync_subscribe(c, test7_topic, 2, &opts);
assert("Good rc from subscribe", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
if (rc != MQTTASYNC_SUCCESS)
test_finished = 1;
}
void test7_onConnectOnly(void* context, MQTTAsync_successData* response)
{
MQTTAsync c = (MQTTAsync)context;
MQTTAsync_disconnectOptions dopts = MQTTAsync_disconnectOptions_initializer;
int rc;
MyLog(LOGA_DEBUG, "In connect onSuccess callback, context %p", context);
dopts.context = context;
dopts.timeout = 1000;
dopts.onSuccess = test7_onDisconnect;
rc = MQTTAsync_disconnect(c, &dopts);
assert("Good rc from disconnect", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
if (rc != MQTTASYNC_SUCCESS)
test_finished = 1;
}
/*********************************************************************
Test7: Pending tokens
*********************************************************************/
int test7(struct Options options)
{
int subsqos = 2;
MQTTAsync c;
MQTTAsync_connectOptions opts = MQTTAsync_connectOptions_initializer;
MQTTAsync_willOptions wopts = MQTTAsync_willOptions_initializer;
int rc = 0;
MQTTAsync_message pubmsg = MQTTAsync_message_initializer;
MQTTAsync_responseOptions ropts = MQTTAsync_responseOptions_initializer;
MQTTAsync_disconnectOptions dopts = MQTTAsync_disconnectOptions_initializer;
MQTTAsync_token* tokens = NULL;
int msg_count = 6;
MyLog(LOGA_INFO, "Starting test 7 - pending tokens");
fprintf(xml, "<testcase classname=\"test4\" name=\"pending tokens\"");
global_start_time = start_clock();
test_finished = 0;
rc = MQTTAsync_create(&c, options.connection, "async_test7",
MQTTCLIENT_PERSISTENCE_DEFAULT, NULL);
assert("good rc from create", rc == MQTTASYNC_SUCCESS, "rc was %d\n", rc);
if (rc != MQTTASYNC_SUCCESS)
{
MQTTAsync_destroy(&c);
goto exit;
}
rc = MQTTAsync_setCallbacks(c, c, NULL, test7_messageArrived, NULL);
assert("Good rc from setCallbacks", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
opts.keepAliveInterval = 20;
opts.username = "testuser";
opts.password = "testpassword";
opts.MQTTVersion = options.MQTTVersion;
opts.will = &wopts;
opts.will->message = "will message";
opts.will->qos = 1;
opts.will->retained = 0;
opts.will->topicName = "will topic";
opts.will = NULL;
opts.onFailure = NULL;
opts.context = c;
opts.cleansession = 1;
opts.onSuccess = test7_onConnectOnly;
MyLog(LOGA_DEBUG, "Connecting to clean up");
rc = MQTTAsync_connect(c, &opts);
rc = 0;
assert("Good rc from connect", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
if (rc != MQTTASYNC_SUCCESS)
goto exit;
while (!test_finished)
#if defined(WIN32)
Sleep(100);
#else
usleep(10000L);
#endif
test_finished = 0;
MyLog(LOGA_DEBUG, "Connecting");
opts.cleansession = 0;
opts.onSuccess = test7_onConnect;
rc = MQTTAsync_connect(c, &opts);
rc = 0;
assert("Good rc from connect", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
if (rc != MQTTASYNC_SUCCESS)
goto exit;
while (!test7_subscribed)
#if defined(WIN32)
Sleep(100);
#else
usleep(10000L);
#endif
pubmsg.payload = "a much longer message that we can shorten to the extent that we need to payload up to 11";
pubmsg.payloadlen = 11;
pubmsg.qos = 2;
pubmsg.retained = 0;
rc = MQTTAsync_send(c, test_topic, pubmsg.payloadlen, pubmsg.payload, pubmsg.qos, pubmsg.retained, &ropts);
MyLog(LOGA_DEBUG, "Token was %d", ropts.token);
rc = MQTTAsync_isComplete(c, ropts.token);
/*assert("0 rc from isComplete", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);*/
rc = MQTTAsync_waitForCompletion(c, ropts.token, 5000L);
assert("Good rc from waitForCompletion", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
rc = MQTTAsync_isComplete(c, ropts.token);
assert("1 rc from isComplete", rc == 1, "rc was %d", rc);
test7_messageCount = 0;
int i = 0;
pubmsg.qos = 2;
for (i = 0; i < msg_count; ++i)
{
pubmsg.payload = "a much longer message that we can shorten to the extent that we need to payload up to 11";
pubmsg.payloadlen = 11;
//pubmsg.qos = (pubmsg.qos == 2) ? 1 : 2;
pubmsg.retained = 0;
rc = MQTTAsync_sendMessage(c, test_topic, &pubmsg, &ropts);
}
/* disconnect immediately without receiving the incoming messages */
dopts.timeout = 0;
dopts.onSuccess = test7_onDisconnect;
dopts.context = c;
MQTTAsync_disconnect(c, &dopts); /* now there should be "orphaned" publications */
while (!test_finished)
#if defined(WIN32)
Sleep(100);
#else
usleep(10000L);
#endif
test_finished = 0;
rc = MQTTAsync_getPendingTokens(c, &tokens);
assert("getPendingTokens rc == 0", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
assert("should get some tokens back", tokens != NULL, "tokens was %p", tokens);
MQTTAsync_free(tokens);
MQTTAsync_destroy(&c); /* force re-reading persistence on create */
MQTTAsync_setTraceLevel(MQTTASYNC_TRACE_ERROR);
rc = MQTTAsync_create(&c, options.connection, "async_test7", MQTTCLIENT_PERSISTENCE_DEFAULT, NULL);
assert("good rc from create", rc == MQTTASYNC_SUCCESS, "rc was %d\n", rc);
if (rc != MQTTASYNC_SUCCESS)
{
MQTTAsync_destroy(&c);
goto exit;
}
rc = MQTTAsync_getPendingTokens(c, &tokens);
assert("getPendingTokens rc == 0", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
assert("should get some tokens back", tokens != NULL, "tokens was %p", tokens);
if (tokens)
{
int i = 0;
while (tokens[i] != -1)
MyLog(LOGA_DEBUG, "Delivery token %d", tokens[i++]);
MQTTAsync_free(tokens);
//The following assertion should work, does with RSMB, but not Mosquitto
//assert1("no of tokens should be count", i == msg_count, "no of tokens %d count %d", i, msg_count);
}
rc = MQTTAsync_setCallbacks(c, c, NULL, test7_messageArrived, NULL);
assert("Good rc from setCallbacks", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
MyLog(LOGA_DEBUG, "Reconnecting");
opts.context = c;
if (MQTTAsync_connect(c, &opts) != 0)
{
assert("Good rc from connect", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
goto exit;
}
#if defined(WIN32)
Sleep(5000);
#else
usleep(5000000L);
#endif
rc = MQTTAsync_getPendingTokens(c, &tokens);
assert("getPendingTokens rc == 0", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
/* assert("should get no tokens back", tokens == NULL, "tokens was %p", tokens);
assert1("no of messages should be count", test7_messageCount == msg_count, "no of tokens %d count %d",
test7_messageCount, msg_count);
assertions fail against Mosquitto - needs testing */
dopts.onFailure = test7_onDisconnectFailure;
dopts.onSuccess = test7_onDisconnect;
dopts.timeout = 1000;
MQTTAsync_disconnect(c, &dopts);
while (!test_finished)
#if defined(WIN32)
Sleep(100);
#else
usleep(10000L);
#endif
MQTTAsync_destroy(&c);
exit:
MyLog(LOGA_INFO, "TEST7: test %s. %d tests run, %d failures.",
(failures == 0) ? "passed" : "failed", tests, failures);
write_test_result();
return failures;
}
/*********************************************************************
Test8: Incomplete commands and requests
*********************************************************************/
char* test8_topic = "C client test8";
int test8_messageCount = 0;
int test8_subscribed = 0;
int test8_publishFailures = 0;
void test8_onPublish(void* context, MQTTAsync_successData* response)
{
MQTTAsync c = (MQTTAsync)context;
MyLog(LOGA_DEBUG, "In publish onSuccess callback %p token %d", c, response->token);
}
void test8_onPublishFailure(void* context, MQTTAsync_failureData* response)
{
MQTTAsync c = (MQTTAsync)context;
MyLog(LOGA_DEBUG, "In onPublish failure callback %p", c);
assert("Response code should be interrupted", response->code == MQTTASYNC_OPERATION_INCOMPLETE,
"rc was %d", response->code);
test8_publishFailures++;
}
void test8_onDisconnectFailure(void* context, MQTTAsync_failureData* response)
{
MQTTAsync c = (MQTTAsync)context;
MyLog(LOGA_DEBUG, "In onDisconnect failure callback %p", c);
assert("Successful disconnect", 0, "disconnect failed", 0);
test_finished = 1;
}
void test8_onDisconnect(void* context, MQTTAsync_successData* response)
{
MQTTAsync c = (MQTTAsync)context;
MyLog(LOGA_DEBUG, "In onDisconnect callback %p", c);
test_finished = 1;
}
void test8_onSubscribe(void* context, MQTTAsync_successData* response)
{
MQTTAsync c = (MQTTAsync)context;
MyLog(LOGA_DEBUG, "In subscribe onSuccess callback %p granted qos %d", c, response->alt.qos);
test8_subscribed = 1;
}
void test8_onConnect(void* context, MQTTAsync_successData* response)
{
MQTTAsync c = (MQTTAsync)context;
MQTTAsync_responseOptions opts = MQTTAsync_responseOptions_initializer;
int rc;
MyLog(LOGA_DEBUG, "In connect onSuccess callback, context %p", context);
opts.onSuccess = test8_onSubscribe;
opts.context = c;
rc = MQTTAsync_subscribe(c, test8_topic, 2, &opts);
assert("Good rc from subscribe", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
if (rc != MQTTASYNC_SUCCESS)
test_finished = 1;
}
int test8_messageArrived(void* context, char* topicName, int topicLen, MQTTAsync_message* message)
{
MQTTAsync c = (MQTTAsync)context;
static int message_count = 0;
MyLog(LOGA_DEBUG, "Test8: received message id %d", message->msgid);
test8_messageCount++;
MQTTAsync_freeMessage(&message);
MQTTAsync_free(topicName);
return 1;
}
int test8(struct Options options)
{
int subsqos = 2;
MQTTAsync c;
MQTTAsync_connectOptions opts = MQTTAsync_connectOptions_initializer;
int rc = 0;
MQTTAsync_message pubmsg = MQTTAsync_message_initializer;
MQTTAsync_responseOptions ropts = MQTTAsync_responseOptions_initializer;
MQTTAsync_disconnectOptions dopts = MQTTAsync_disconnectOptions_initializer;
MQTTAsync_token* tokens = NULL;
int msg_count = 6;
MyLog(LOGA_INFO, "Starting test 8 - incomplete commands");
fprintf(xml, "<testcase classname=\"test4\" name=\"incomplete commands\"");
global_start_time = start_clock();
test_finished = 0;
rc = MQTTAsync_create(&c, options.connection, "async_test8",
MQTTCLIENT_PERSISTENCE_DEFAULT, NULL);
assert("good rc from create", rc == MQTTASYNC_SUCCESS, "rc was %d\n", rc);
if (rc != MQTTASYNC_SUCCESS)
{
MQTTAsync_destroy(&c);
goto exit;
}
rc = MQTTAsync_setCallbacks(c, c, NULL, test8_messageArrived, NULL);
assert("Good rc from setCallbacks", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
opts.keepAliveInterval = 20;
opts.username = "testuser";
opts.password = "testpassword";
opts.MQTTVersion = options.MQTTVersion;
opts.onFailure = NULL;
opts.context = c;
MyLog(LOGA_DEBUG, "Connecting");
opts.cleansession = 1;
opts.onSuccess = test8_onConnect;
rc = MQTTAsync_connect(c, &opts);
rc = 0;
assert("Good rc from connect", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
if (rc != MQTTASYNC_SUCCESS)
goto exit;
while (!test8_subscribed)
#if defined(WIN32)
Sleep(100);
#else
usleep(10000L);
#endif
int i = 0;
pubmsg.qos = 2;
ropts.onSuccess = test8_onPublish;
ropts.onFailure = test8_onPublishFailure;
ropts.context = c;
for (i = 0; i < msg_count; ++i)
{
pubmsg.payload = "a much longer message that we can shorten to the extent that we need to payload up to 11";
pubmsg.payloadlen = 11;
pubmsg.qos = (pubmsg.qos == 2) ? 1 : 2; /* alternate */
pubmsg.retained = 0;
rc = MQTTAsync_sendMessage(c, test8_topic, &pubmsg, &ropts);
assert("Good rc from sendMessage", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
}
/* disconnect immediately without completing the commands */
dopts.timeout = 0;
dopts.onSuccess = test8_onDisconnect;
dopts.context = c;
rc = MQTTAsync_disconnect(c, &dopts); /* now there should be incomplete commands */
assert("Good rc from disconnect", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
while (!test_finished)
#if defined(WIN32)
Sleep(100);
#else
usleep(10000L);
#endif
test_finished = 0;
rc = MQTTAsync_getPendingTokens(c, &tokens);
assert("getPendingTokens rc == 0", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
assert("should get no tokens back", tokens == NULL, "tokens was %p", tokens);
assert("test8_publishFailures > 0", test8_publishFailures > 0,
"test8_publishFailures = %d", test8_publishFailures);
/* Now elicit failure callbacks on destroy */
test8_subscribed = test8_publishFailures = 0;
MyLog(LOGA_DEBUG, "Connecting");
opts.cleansession = 0;
opts.onSuccess = test8_onConnect;
rc = MQTTAsync_connect(c, &opts);
rc = 0;
assert("Good rc from connect", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
if (rc != MQTTASYNC_SUCCESS)
goto exit;
while (!test8_subscribed)
#if defined(WIN32)
Sleep(100);
#else
usleep(10000L);
#endif
i = 0;
pubmsg.qos = 2;
ropts.onSuccess = test8_onPublish;
ropts.onFailure = test8_onPublishFailure;
ropts.context = c;
for (i = 0; i < msg_count; ++i)
{
pubmsg.payload = "a much longer message that we can shorten to the extent that we need to payload up to 11";
pubmsg.payloadlen = 11;
pubmsg.qos = (pubmsg.qos == 2) ? 1 : 2; /* alternate */
pubmsg.retained = 0;
rc = MQTTAsync_sendMessage(c, test8_topic, &pubmsg, &ropts);
assert("Good rc from sendMessage", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
}
/* disconnect immediately without completing the commands */
dopts.timeout = 0;
dopts.onSuccess = test8_onDisconnect;
dopts.context = c;
rc = MQTTAsync_disconnect(c, &dopts); /* now there should be incomplete commands */
assert("Good rc from disconnect", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
while (!test_finished)
#if defined(WIN32)
Sleep(100);
#else
usleep(10000L);
#endif
test_finished = 0;
rc = MQTTAsync_getPendingTokens(c, &tokens);
assert("getPendingTokens rc == 0", rc == MQTTASYNC_SUCCESS, "rc was %d", rc);
assert("should get some tokens back", tokens != NULL, "tokens was %p", tokens);
MQTTAsync_free(tokens);
assert("test8_publishFailures == 0", test8_publishFailures == 0,
"test8_publishFailures = %d", test8_publishFailures);
MQTTAsync_destroy(&c);
assert("test8_publishFailures > 0", test8_publishFailures > 0,
"test8_publishFailures = %d", test8_publishFailures);
exit:
MyLog(LOGA_INFO, "TEST8: test %s. %d tests run, %d failures.",
(failures == 0) ? "passed" : "failed", tests, failures);
write_test_result();
return failures;
}
void trace_callback(enum MQTTASYNC_TRACE_LEVELS level, char* message)
{
printf("Trace : %d, %s\n", level, message);
}
int main(int argc, char** argv)
{
int rc = 0;
int (*tests[])() = {NULL, test1, test2, test3, test4, test5, test6, test7, test8}; /* indexed starting from 1 */
MQTTAsync_nameValue* info;
int i;
xml = fopen("TEST-test4.xml", "w");
fprintf(xml, "<testsuite name=\"test4\" tests=\"%d\">\n", (int)(ARRAY_SIZE(tests)) - 1);
getopts(argc, argv);
MQTTAsync_setTraceCallback(trace_callback);
info = MQTTAsync_getVersionInfo();
while (info->name)
{
MyLog(LOGA_INFO, "%s: %s", info->name, info->value);
info++;
}
for (i = 0; i < options.iterations; ++i)
{
if (options.test_no == -1)
{ /* run all the tests */
for (options.test_no = 1; options.test_no < ARRAY_SIZE(tests); ++options.test_no)
{
failures = 0;
MQTTAsync_setTraceLevel(MQTTASYNC_TRACE_ERROR);
rc += tests[options.test_no](options); /* return number of failures. 0 = test succeeded */
}
}
else
{
MQTTAsync_setTraceLevel(MQTTASYNC_TRACE_ERROR);
rc = tests[options.test_no](options); /* run just the selected test */
}
}
if (rc == 0)
MyLog(LOGA_INFO, "verdict pass");
else
MyLog(LOGA_INFO, "verdict fail");
fprintf(xml, "</testsuite>\n");
fclose(xml);
return rc;
}
|
module.exports = {
ascending: ascending,
descending: descending
};
function ascending(a, b) {
return a > b ? 1 : a < b ? -1 : 0;
}
function descending(a, b) {
return a > b ? -1 : a < b ? 1 : 0;
}
|
/*
Copyright 2017 ODK Central Developers
See the NOTICE file at the top-level directory of this distribution and at
https://github.com/getodk/central-frontend/blob/master/NOTICE.
This file is part of ODK Central. It is subject to the license terms in
the LICENSE file found in the top-level directory of this distribution and at
https://www.apache.org/licenses/LICENSE-2.0. No part of ODK Central,
including this file, may be copied, modified, propagated, or distributed
except according to the terms contained in the LICENSE file.
*/
// This module sends GET requests and stores the resulting response data.
import Vue from 'vue';
import { mapState } from 'vuex';
import { pick } from 'ramda';
import Option from '../../util/option';
import reconcileData from './request/reconcile';
import { Presenter } from '../../presenters/base';
import { configForPossibleBackendRequest, isProblem, logAxiosError, requestAlertMessage } from '../../util/request';
import { getters as dataGetters, keys as allKeys, transforms } from './request/keys';
const updateData = (oldData, newData, props) => {
if (oldData == null) throw new Error('data does not exist');
if (oldData instanceof Option) {
if (oldData.isEmpty()) throw new Error('data is an empty Option');
return Option.of(updateData(oldData.get(), newData, props));
}
if (oldData instanceof Presenter) return oldData.with(pick(props, newData));
return { ...oldData, ...pick(props, newData) };
};
export default {
state: {
// Using allKeys.reduce() in part so that `requests` has a reactive property
// for each key.
requests: allKeys.reduce(
(acc, key) => {
// An object used to manage requests for the key
acc[key] = {
// Information about the last request
last: {
promise: null,
// 'loading', 'success', 'error', or 'canceled'.
state: null
},
// Used to cancel requests.
cancelId: 0
};
return acc;
},
{}
),
/*
`data` contains all response data, mapping each key to its data. `data` has
a reactive property for each key.
If the value of the property associated with a key is `null`, that means
that either no response has been received for the key, or the associated
data has been cleared. It does not mean that a response has been received
for the key, and the data itself is simply null or nonexistent. To implement
the latter case, transform the response and return an Option.
*/
data: allKeys.reduce(
(acc, key) => {
acc[key] = null;
return acc;
},
{}
)
},
getters: {
loading: ({ requests }) => (key) => requests[key].last.state === 'loading',
/*
Given an Array of keys, initiallyLoading() returns `true` if:
1. There is at least one key for which there is no data and for which a
request is in progress. (This condition is not satisfied if there is
already data for the key, and the data is simply being refreshed.)
2. There is no key for which the last request for the key was
unsuccessful.
Otherwise it returns `false`.
*/
initiallyLoading: ({ requests, data }) => (keys) => {
let any = false;
for (const key of keys) {
const { state } = requests[key].last;
if (state === 'error') return false;
if (state === 'loading' && data[key] == null) any = true;
}
return any;
},
dataExists: ({ data }) => (keys) => {
for (const key of keys) {
if (data[key] == null) return false;
}
return true;
},
...dataGetters
},
mutations: {
/* eslint-disable no-param-reassign */
createRequest({ requests }, { key, promise }) {
const lastRequest = requests[key].last;
lastRequest.promise = promise;
lastRequest.state = 'loading';
},
setRequestState({ requests }, { key, state }) {
requests[key].last.state = state;
},
resetRequests({ requests }) {
for (const key of allKeys) {
const requestsForKey = requests[key];
const { last } = requestsForKey;
last.promise = null;
last.state = null;
requestsForKey.cancelId = 0;
}
},
// Cancels the last request for the key.
cancelRequest({ requests }, key) {
const requestsForKey = requests[key];
requestsForKey.last.state = 'canceled';
requestsForKey.cancelId += 1;
},
setData({ data }, { key, value }) {
data[key] = value;
},
setDataProp({ data }, { key, prop, value }) {
const target = data[key] instanceof Option ? data[key].get() : data[key];
Vue.set(target, prop, value);
},
clearData({ data }, key = undefined) {
if (key != null) {
data[key] = null;
} else {
for (const k of allKeys)
data[k] = null;
}
}
/* eslint-enable no-param-reassign */
},
actions: {
/*
get() sends one or more GET requests and stores the response data. Specify
an array of config objects with the following properties, one for each
request:
- key. Specifies where to store the response data. If the request is
successful and is not canceled, then the response data will be
transformed, and the result will be stored at state.data[key]. See
allKeys for the list of possible values of `key`.
Request URL and Headers
-----------------------
- url. The URL of the request.
- headers (optional). The headers of the request.
- extended (default: false). `true` if extended metadata is requested and
`false` if not.
Response Handling
-----------------
- fulfillProblem (optional). Usually, an error response means that the
request was invalid or that something went wrong. However, in some
cases, an error response should be treated as if it is successful
(resulting in a fulfilled, not a rejected, promise). Use fulfillProblem
to identify such responses. fulfillProblem is passed the Backend
Problem. (Any error response that is not a Problem is automatically
considered unsuccessful.) fulfillProblem should return `true` if the
response should be considered successful and `false` if not.
- alert (default: true). Specify `true` to show an alert for an
unsuccessful response and to log it. Specify `false` not to display an
unsuccessful response.
- success (optional)
Callback to run if the request is successful and is not canceled. get()
also returns a promise, on which you can call then() and catch(). (See
"Return Value" below for more information.) There are a couple of times
when you may wish to use `success` rather than then():
1. If get() is used to send multiple requests, a different `success`
callback can be specified for each request. On the other hand, the
promise will be fulfilled only after all requests succeed.
2. A then() or catch() callback will be run after Vue has reacted to the
changes to the store state. Often that is fine, but if you want to
run a callback before Vue reacts, specify `success`. As a rule of
thumb, if the callback changes local state that is only used in the
DOM, use then(). If the callback changes the store state or changes
local state that is used outside the DOM, avoid inconsistent state by
specifying `success`. The callback will be run after the data is
reconciled, but before the validateData watchers.
Existing Data
-------------
- update (optional). By default, any existing data is completely replaced
by the response data. To update only select properties of existing data,
specify an array of property names for `update`.
- clear (default: true). Specify `true` if any existing data for the key
should be cleared before the request is sent. Specify `false` for a
background refresh. Note that by default, the data is cleared for all
keys whenever the route changes. (There are exceptions to this, however:
see the preserveData meta field for more information.)
- resend (default: true)
By default, get() sends a request for every config object specified,
even if there is existing data for the corresponding key. However, if
`resend` is specified as `false`, a request will not be sent if there is
existing data for the key or if another request is in progress for the
same key. Note that the entire request process will be short-circuited,
so any existing data will not be cleared even if `clear` is specified as
`true`.
One common example of specifying `false` for `resend` arises with tabbed
navigation. Say a component associated with one tab sends a request for
a particular key. In most cases, navigating from that tab to another
tab, then back to the original tab will destroy and recreate the
component. However, in that case, we usually do not need to send a new
request for the data that the component needs.
Canceled Requests
-----------------
A request can be canceled if it is still in progress. This happens for one
of two reasons:
1. After the request is sent, another request is sent for the same key.
2. When the route changes, the new route has the option to cancel one or
more requests. (See router.js.)
If a request is canceled, then if/when a response is received for the
request, it will not be used.
Return Value
------------
get() returns a promise. The promise will be rejected if any of the requests
is unsuccessful or is canceled. Otherwise the promise should be fulfilled.
If you call then() on the promise, note that the `state` of each request
will be 'success' when the then() callback is run, not 'loading'. If you
call catch() on the promise, your logic should not assume that any request
resulted in an error. Before running the then() or catch() callback, Vue
will react to the changes to the store state, for example, running watchers
and updating the DOM.
*/
get({ state, commit }, configs) {
let alerted = false;
return Promise.all(configs.map(config => {
const {
key,
// Request URL and headers
url,
headers = undefined,
extended = false,
// Response handling
fulfillProblem = undefined,
alert = true,
success,
// Existing data
update = undefined,
clear = update == null,
resend = true
} = config;
if (clear && update != null)
throw new Error('cannot clear data to be updated');
/*
We need to handle three cases:
1. There is no data for the key, and we are not waiting on a request
for it.
Either there has been no request, or the last request was canceled.
This should only be the case when Frontend first loads or after the
route changes. In this case, we simply need to send a request.
2. There is no data, but we are waiting on a request for it.
We will return immediately if `resend` is `false`. Otherwise, we
will cancel the last request and send a new request.
3. There is data.
We will return immediately if `resend` is `false`. Otherwise, we
will refresh the data, canceling the last request if it is still in
progress.
*/
const { data } = state;
const requestsForKey = state.requests[key];
const lastRequest = requestsForKey.last;
if (!resend && (data[key] != null || lastRequest.state === 'loading'))
return Promise.resolve();
if ((data[key] == null && lastRequest.state === 'loading') ||
data[key] != null) {
if (lastRequest.state === 'loading') commit('cancelRequest', key);
if (data[key] != null && clear) commit('clearData', key);
}
const { cancelId } = requestsForKey;
const baseConfig = { method: 'GET', url };
if (extended)
baseConfig.headers = { ...headers, 'X-Extended-Metadata': 'true' };
else if (headers != null)
baseConfig.headers = headers;
const { session } = data;
const token = session != null ? session.token : null;
const axiosConfig = configForPossibleBackendRequest(baseConfig, token);
const promise = Vue.prototype.$http.request(axiosConfig)
.catch(error => { // eslint-disable-line no-loop-func
if (requestsForKey.cancelId !== cancelId)
throw new Error('request was canceled');
if (fulfillProblem != null && error.response != null &&
isProblem(error.response.data) &&
fulfillProblem(error.response.data))
return error.response;
if (alert) {
logAxiosError(error);
if (!alerted) {
const message = requestAlertMessage(error);
commit('setAlert', { type: 'danger', message });
alerted = true;
}
}
commit('setRequestState', { key, state: 'error' });
throw error;
})
.then(response => {
if (requestsForKey.cancelId !== cancelId)
throw new Error('request was canceled');
commit('setRequestState', { key, state: 'success' });
if (update == null) {
const transform = transforms[key];
const transformed = transform != null
? transform(response)
: response.data;
commit('setData', { key, value: transformed });
} else {
commit('setData', {
key,
value: updateData(data[key], response.data, update)
});
}
reconcileData.reconcile(key, data, commit);
if (success != null) success(data);
});
commit('createRequest', { key, promise });
return promise;
}));
}
}
};
/*
requestData() facilitates access to the response data, returning functions that
can be used for computed properties. (It probably would have been more accurate
to name it responseData().)
requestData() takes a single parameter, an array specifying the keys for the
data to which the component requires access. For each element of the array,
specify either a key or an object.
Examples:
// The component requires access to `project` and `form`.
requestData(['project', 'form'])
// The component also requires access to formDraft, which is an Option. Because
// getOption is specified as `true`, get() will be called on the Option. The
// resulting value, not the Option, will be passed to the component.
requestData(['project', 'form', { key: 'formDraft', getOption: true }])
*/
export const requestData = (options) => {
const map = {};
for (const keyOptions of options) {
if (typeof keyOptions === 'string') {
map[keyOptions] = (state) => state.request.data[keyOptions];
} else {
const { key, getOption = false } = keyOptions;
if (!getOption) {
map[key] = (state) => state.request.data[key];
} else {
map[key] = (state) => {
const value = state.request.data[key];
return value != null ? value.get() : null;
};
}
}
}
return mapState(map);
};
|
import { StyleSheet } from "react-native";
import Colors from "../../constants/Colors";
import Fonts from "../../constants/Fonts";
export default StyleSheet.create({
container: {
paddingHorizontal: 25,
},
backButton: {
color: Colors.contrastColor,
fontSize: 20,
},
settingTitle: {
textTransform: "capitalize",
color: Colors.contrastColor,
fontSize: 25,
lineHeight: 30,
fontFamily: Fonts.base,
marginLeft: 20,
},
titleWrapper: {
flexDirection: "row",
marginTop: 60,
alignItems: "center",
},
infoText: {
color: Colors.contrastColor,
fontSize: 20,
marginTop: 35,
fontFamily: Fonts.base,
},
});
|
import * as React from 'react';
import wrapIcon from '../utils/wrapIcon';
const rawSvg = (iconProps) => {
const { className, primaryFill } = iconProps;
return React.createElement("svg", { width: 20, height: 20, viewBox: "0 0 20 20", xmlns: "http://www.w3.org/2000/svg", className: className },
React.createElement("path", { d: "M3 3.5c0-.28.22-.5.5-.5h13a.5.5 0 010 1h-13a.5.5 0 01-.5-.5z", fill: primaryFill }),
React.createElement("path", { d: "M3 15.5c0-.28.22-.5.5-.5h13a.5.5 0 010 1h-13a.5.5 0 01-.5-.5z", fill: primaryFill }),
React.createElement("path", { d: "M3.5 6a.5.5 0 000 1h1a.5.5 0 000-1h-1z", fill: primaryFill }),
React.createElement("path", { d: "M15 6.5c0-.28.22-.5.5-.5h1a.5.5 0 010 1h-1a.5.5 0 01-.5-.5z", fill: primaryFill }),
React.createElement("path", { d: "M3.5 9a.5.5 0 000 1h1a.5.5 0 000-1h-1z", fill: primaryFill }),
React.createElement("path", { d: "M15 9.5c0-.28.22-.5.5-.5h1a.5.5 0 010 1h-1a.5.5 0 01-.5-.5z", fill: primaryFill }),
React.createElement("path", { d: "M3.5 12a.5.5 0 000 1h1a.5.5 0 000-1h-1z", fill: primaryFill }),
React.createElement("path", { d: "M15 12.5c0-.28.22-.5.5-.5h1a.5.5 0 010 1h-1a.5.5 0 01-.5-.5z", fill: primaryFill }),
React.createElement("path", { d: "M7.5 8.5a2.5 2.5 0 015 0v5a.5.5 0 001 0v-5a3.5 3.5 0 10-7 0v5a.5.5 0 001 0v-5z", fill: primaryFill }));
};
const TextPositionSquare20Regular = wrapIcon(rawSvg({}), 'TextPositionSquare20Regular');
export default TextPositionSquare20Regular;
|
/**
* @license Apache-2.0
*
* Copyright (c) 2018 The Stdlib Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
// MODULES //
var isFunction = require( '@stdlib/assert/is-function' );
// MAIN //
/**
* Browser shim.
*
* @private
* @param {(string|null)} [encoding] - string encoding
* @param {Callback} clbk - callback to be invoked upon completion
* @throws {TypeError} callback argument must be a function
*
* @example
* function onRead( error ) {
* if ( error ) {
* throw error;
* }
* }
*
* stdin( onRead );
*/
function stdin() {
var clbk = arguments[ arguments.length-1 ];
if ( !isFunction( clbk ) ) {
throw new TypeError( 'invalid argument. Callback argument must be a function. Value: `' + clbk + '`.' );
}
setTimeout( onTimeout, 0 );
/**
* Callback invoked upon a timeout.
*
* @private
*/
function onTimeout() {
clbk( new Error( 'invalid operation. The environment does not support reading from `stdin`.' ) );
}
}
// EXPORTS //
module.exports = stdin;
|
from test_pyckage_github.version import __version__
class TestVersion:
def test_version(self):
assert isinstance(__version__, str)
|
//
// Copyright (c) 2020, Paul R. Swan
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
// OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
// COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
// TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
#ifndef CVanguardBaseGame_h
#define CVanguardBaseGame_h
#include "CGame.h"
class CVanguardBaseGame : public CGame
{
public:
//
// CVanguardBaseGame
//
static PERROR delayFunction(
void *context,
unsigned long ms
);
protected:
CVanguardBaseGame(
const ROM_REGION *romRegion
);
~CVanguardBaseGame(
);
};
#endif
|
import copy
import logging
import sys
from http import HTTPStatus
import zlib
import json
from flask import make_response, jsonify, current_app, abort
from werkzeug.urls import url_unquote
from backend.czi_hosted.common.config.client_config import get_client_config, get_client_userinfo
from backend.common.constants import Axis, DiffExpMode, JSON_NaN_to_num_warning_msg
from backend.common.errors import (
FilterError,
JSONEncodingValueError,
PrepareError,
DisabledFeatureError,
ExceedsLimitError,
DatasetAccessError,
ColorFormatException,
AnnotationsError,
UnsupportedSummaryMethod,
)
from backend.common.genesets import summarizeQueryHash
from backend.common.fbs.matrix import decode_matrix_fbs
def abort_and_log(code, logmsg, loglevel=logging.DEBUG, include_exc_info=False):
"""
Log the message, then abort with HTTP code. If include_exc_info is true,
also include current exception via sys.exc_info().
"""
if include_exc_info:
exc_info = sys.exc_info()
else:
exc_info = False
current_app.logger.log(loglevel, logmsg, exc_info=exc_info)
# Do NOT send log message to HTTP response.
return abort(code)
def _query_parameter_to_filter(args):
"""
Convert an annotation value filter, if present in the query args,
into the standard dict filter format used by internal code.
Query param filters look like: <axis>:name=value, where value
may be one of:
- a range, min,max, where either may be an open range by using an asterisc, eg, 10,*
- a value
Eg,
...?tissue=lung&obs:tissue=heart&obs:num_reads=1000,*
"""
filters = {
"obs": {},
"var": {},
}
# args has already been url-unquoted once. We assume double escaping
# on name and value.
try:
for key, value in args.items(multi=True):
axis, name = key.split(":")
if axis not in ("obs", "var"):
raise FilterError("unknown filter axis")
name = url_unquote(name)
current = filters[axis].setdefault(name, {"name": name})
val_split = value.split(",")
if len(val_split) == 1:
if "min" in current or "max" in current:
raise FilterError("do not mix range and value filters")
value = url_unquote(value)
values = current.setdefault("values", [])
values.append(value)
elif len(val_split) == 2:
if len(current) > 1:
raise FilterError("duplicate range specification")
min = url_unquote(val_split[0])
max = url_unquote(val_split[1])
if min != "*":
current["min"] = float(min)
if max != "*":
current["max"] = float(max)
if len(current) < 2:
raise FilterError("must specify at least min or max in range filter")
else:
raise FilterError("badly formated filter value")
except ValueError as e:
raise FilterError(str(e))
result = {}
for axis in ("obs", "var"):
axis_filter = filters[axis]
if len(axis_filter) > 0:
result[axis] = {"annotation_value": [val for val in axis_filter.values()]}
return result
def schema_get_helper(data_adaptor):
"""helper function to gather the schema from the data source and annotations"""
schema = data_adaptor.get_schema()
schema = copy.deepcopy(schema)
# add label obs annotations as needed
annotations = data_adaptor.dataset_config.user_annotations
if annotations.user_annotations_enabled():
label_schema = annotations.get_schema(data_adaptor)
schema["annotations"]["obs"]["columns"].extend(label_schema)
return schema
def schema_get(data_adaptor):
schema = schema_get_helper(data_adaptor)
return make_response(jsonify({"schema": schema}), HTTPStatus.OK)
def config_get(app_config, data_adaptor):
config = get_client_config(app_config, data_adaptor)
return make_response(jsonify(config), HTTPStatus.OK)
def userinfo_get(app_config, data_adaptor):
config = get_client_userinfo(app_config, data_adaptor)
return make_response(jsonify(config), HTTPStatus.OK)
def annotations_obs_get(request, data_adaptor):
fields = request.args.getlist("annotation-name", None)
num_columns_requested = len(data_adaptor.get_obs_keys()) if len(fields) == 0 else len(fields)
if data_adaptor.server_config.exceeds_limit("column_request_max", num_columns_requested):
return abort(HTTPStatus.BAD_REQUEST)
preferred_mimetype = request.accept_mimetypes.best_match(["application/octet-stream"])
if preferred_mimetype != "application/octet-stream":
return abort(HTTPStatus.NOT_ACCEPTABLE)
try:
labels = None
annotations = data_adaptor.dataset_config.user_annotations
if annotations.user_annotations_enabled():
labels = annotations.read_labels(data_adaptor)
fbs = data_adaptor.annotation_to_fbs_matrix(Axis.OBS, fields, labels)
return make_response(fbs, HTTPStatus.OK, {"Content-Type": "application/octet-stream"})
except KeyError as e:
return abort_and_log(HTTPStatus.BAD_REQUEST, str(e), include_exc_info=True)
def annotations_put_fbs_helper(data_adaptor, fbs):
"""helper function to write annotations from fbs"""
annotations = data_adaptor.dataset_config.user_annotations
if not annotations.user_annotations_enabled():
raise DisabledFeatureError("Writable annotations are not enabled")
new_label_df = decode_matrix_fbs(fbs)
if not new_label_df.empty:
new_label_df = data_adaptor.check_new_labels(new_label_df)
annotations.write_labels(new_label_df, data_adaptor)
def inflate(data):
return zlib.decompress(data)
def annotations_obs_put(request, data_adaptor):
annotations = data_adaptor.dataset_config.user_annotations
if not annotations.user_annotations_enabled():
return abort(HTTPStatus.NOT_IMPLEMENTED)
anno_collection = request.args.get("annotation-collection-name", default=None)
fbs = inflate(request.get_data())
if anno_collection is not None:
if not annotations.is_safe_collection_name(anno_collection):
return abort(HTTPStatus.BAD_REQUEST, "Bad annotation collection name")
annotations.set_collection(anno_collection)
try:
annotations_put_fbs_helper(data_adaptor, fbs)
res = json.dumps({"status": "OK"})
return make_response(res, HTTPStatus.OK, {"Content-Type": "application/json"})
except (ValueError, DisabledFeatureError, KeyError) as e:
return abort_and_log(HTTPStatus.BAD_REQUEST, str(e), include_exc_info=True)
def annotations_var_get(request, data_adaptor):
fields = request.args.getlist("annotation-name", None)
num_columns_requested = len(data_adaptor.get_var_keys()) if len(fields) == 0 else len(fields)
if data_adaptor.server_config.exceeds_limit("column_request_max", num_columns_requested):
return abort(HTTPStatus.BAD_REQUEST)
preferred_mimetype = request.accept_mimetypes.best_match(["application/octet-stream"])
if preferred_mimetype != "application/octet-stream":
return abort(HTTPStatus.NOT_ACCEPTABLE)
try:
labels = None
annotations = data_adaptor.dataset_config.user_annotations
if annotations.user_annotations_enabled():
labels = annotations.read_labels(data_adaptor)
return make_response(
data_adaptor.annotation_to_fbs_matrix(Axis.VAR, fields, labels),
HTTPStatus.OK,
{"Content-Type": "application/octet-stream"},
)
except KeyError as e:
return abort_and_log(HTTPStatus.BAD_REQUEST, str(e), include_exc_info=True)
def data_var_put(request, data_adaptor):
preferred_mimetype = request.accept_mimetypes.best_match(["application/octet-stream"])
if preferred_mimetype != "application/octet-stream":
return abort(HTTPStatus.NOT_ACCEPTABLE)
filter_json = request.get_json()
filter = filter_json["filter"] if filter_json else None
try:
return make_response(
data_adaptor.data_frame_to_fbs_matrix(filter, axis=Axis.VAR),
HTTPStatus.OK,
{"Content-Type": "application/octet-stream"},
)
except (FilterError, ValueError, ExceedsLimitError) as e:
return abort_and_log(HTTPStatus.BAD_REQUEST, str(e), include_exc_info=True)
def data_var_get(request, data_adaptor):
preferred_mimetype = request.accept_mimetypes.best_match(["application/octet-stream"])
if preferred_mimetype != "application/octet-stream":
return abort(HTTPStatus.NOT_ACCEPTABLE)
try:
filter = _query_parameter_to_filter(request.args)
return make_response(
data_adaptor.data_frame_to_fbs_matrix(filter, axis=Axis.VAR),
HTTPStatus.OK,
{"Content-Type": "application/octet-stream"},
)
except (FilterError, ValueError, ExceedsLimitError) as e:
return abort_and_log(HTTPStatus.BAD_REQUEST, str(e), include_exc_info=True)
def colors_get(data_adaptor):
if not data_adaptor.dataset_config.presentation__custom_colors:
return make_response(jsonify({}), HTTPStatus.OK)
try:
return make_response(jsonify(data_adaptor.get_colors()), HTTPStatus.OK)
except ColorFormatException as e:
return abort_and_log(HTTPStatus.NOT_FOUND, str(e), include_exc_info=True)
def diffexp_obs_post(request, data_adaptor):
if not data_adaptor.dataset_config.diffexp__enable:
return abort(HTTPStatus.NOT_IMPLEMENTED)
args = request.get_json()
try:
# TODO: implement varfilter mode
mode = DiffExpMode(args["mode"])
if mode == DiffExpMode.VAR_FILTER or "varFilter" in args:
return abort_and_log(HTTPStatus.NOT_IMPLEMENTED, "varFilter not enabled")
set1_filter = args.get("set1", {"filter": {}})["filter"]
set2_filter = args.get("set2", {"filter": {}})["filter"]
count = args.get("count", None)
if set1_filter is None or set2_filter is None or count is None:
return abort_and_log(HTTPStatus.BAD_REQUEST, "missing required parameter")
if Axis.VAR in set1_filter or Axis.VAR in set2_filter:
return abort_and_log(HTTPStatus.BAD_REQUEST, "var axis filter not enabled")
except (KeyError, TypeError) as e:
return abort_and_log(HTTPStatus.BAD_REQUEST, str(e), include_exc_info=True)
try:
diffexp = data_adaptor.diffexp_topN(set1_filter, set2_filter, count)
return make_response(diffexp, HTTPStatus.OK, {"Content-Type": "application/json"})
except (ValueError, DisabledFeatureError, FilterError, ExceedsLimitError) as e:
return abort_and_log(HTTPStatus.BAD_REQUEST, str(e), include_exc_info=True)
except JSONEncodingValueError:
# JSON encoding failure, usually due to bad data. Just let it ripple up
# to default exception handler.
current_app.logger.warning(JSON_NaN_to_num_warning_msg)
raise
def layout_obs_get(request, data_adaptor):
fields = request.args.getlist("layout-name", None)
num_columns_requested = len(data_adaptor.get_embedding_names()) if len(fields) == 0 else len(fields)
if data_adaptor.server_config.exceeds_limit("column_request_max", num_columns_requested):
return abort(HTTPStatus.BAD_REQUEST)
preferred_mimetype = request.accept_mimetypes.best_match(["application/octet-stream"])
if preferred_mimetype != "application/octet-stream":
return abort(HTTPStatus.NOT_ACCEPTABLE)
try:
return make_response(
data_adaptor.layout_to_fbs_matrix(fields), HTTPStatus.OK, {"Content-Type": "application/octet-stream"}
)
except (KeyError, DatasetAccessError) as e:
return abort_and_log(HTTPStatus.BAD_REQUEST, str(e), include_exc_info=True)
except PrepareError:
return abort_and_log(
HTTPStatus.NOT_IMPLEMENTED,
f"No embedding available {request.path}",
loglevel=logging.ERROR,
include_exc_info=True,
)
def layout_obs_put(request, data_adaptor):
args = request.get_json()
filter = args["filter"] if args else None
if not filter:
return abort_and_log(HTTPStatus.BAD_REQUEST, "obs filter is required")
method = args["method"] if args else "umap"
layer = args["layer"] if args else "X"
npcs = args["npcs"] if args else 50
try:
schema = data_adaptor.compute_embedding(method, filter, layer, npcs)
return make_response(jsonify(schema), HTTPStatus.OK, {"Content-Type": "application/json"})
except NotImplementedError as e:
return abort_and_log(HTTPStatus.NOT_IMPLEMENTED, str(e))
except (ValueError, DisabledFeatureError, FilterError) as e:
return abort_and_log(HTTPStatus.BAD_REQUEST, str(e), include_exc_info=True)
def genesets_get(request, data_adaptor):
preferred_mimetype = request.accept_mimetypes.best_match(["application/json", "text/csv"])
if preferred_mimetype not in ("application/json", "text/csv"):
return abort(HTTPStatus.NOT_ACCEPTABLE)
try:
annotations = data_adaptor.dataset_config.user_annotations
(genesets, tid) = annotations.read_gene_sets(data_adaptor)
if preferred_mimetype == "text/csv":
return make_response(
annotations.gene_sets_to_csv(genesets),
HTTPStatus.OK,
{
"Content-Type": "text/csv",
"Content-Disposition": "attachment; filename=genesets.csv",
},
)
else:
return make_response(
jsonify({"genesets": annotations.gene_sets_to_response(genesets), "tid": tid}), HTTPStatus.OK
)
except (ValueError, KeyError, AnnotationsError) as e:
return abort_and_log(HTTPStatus.BAD_REQUEST, str(e))
def summarize_var_helper(request, data_adaptor, key, raw_query):
preferred_mimetype = request.accept_mimetypes.best_match(["application/octet-stream"])
if preferred_mimetype != "application/octet-stream":
return abort(HTTPStatus.NOT_ACCEPTABLE)
summary_method = request.values.get("method", default="mean")
query_hash = summarizeQueryHash(raw_query)
if key and query_hash != key:
return abort(HTTPStatus.BAD_REQUEST, description="query key did not match")
args_filter_only = request.values.copy()
args_filter_only.poplist("method")
args_filter_only.poplist("key")
try:
filter = _query_parameter_to_filter(args_filter_only)
return make_response(
data_adaptor.summarize_var(summary_method, filter, query_hash),
HTTPStatus.OK,
{"Content-Type": "application/octet-stream"},
)
except (ValueError) as e:
return abort(HTTPStatus.NOT_FOUND, description=str(e))
except (UnsupportedSummaryMethod, FilterError) as e:
return abort(HTTPStatus.BAD_REQUEST, description=str(e))
def summarize_var_get(request, data_adaptor):
return summarize_var_helper(request, data_adaptor, None, request.query_string)
def summarize_var_post(request, data_adaptor):
if not request.content_type or "application/x-www-form-urlencoded" not in request.content_type:
return abort(HTTPStatus.UNSUPPORTED_MEDIA_TYPE)
if request.content_length > 1_000_000: # just a sanity check to avoid memory exhaustion
return abort(HTTPStatus.BAD_REQUEST)
key = request.args.get("key", default=None)
return summarize_var_helper(request, data_adaptor, key, request.get_data())
|
O = require('output-manager');
__IS_TRACE = process.env.DO_TRACE || false;
__IS_DEBUG = process.env.DO_DEBUG || false;
if (__IS_DEBUG || __IS_TRACE) {
O.level(((__IS_TRACE) ? O.LogLevel.TRACE : O.LogLevel.DEBUG));
}
const CF = require('js-cfclient');
const CfClient = new CF.CFClient(new CF.CFConfig({
protocol: process.env.API_PROTOCOL,
host: process.env.API_HOST,
username: process.env.API_USERNAME,
password: process.env.API_PASSWORD,
skipSslValidation: process.env.SKIP_SSL_VALIDATION
}));
CfClient.connect().then(() => {
require('./server').start(process.env.PORT || 8888, CfClient, process.env.APP_NAMES || []);
}, O.e);
|
import React from 'react';
import { mount } from 'enzyme';
import { act } from 'react-dom/test-utils';
import ConfigProvider from '..';
import zhCN from '../../locale/zh_CN';
import Form from '../../form';
describe('ConfigProvider.Form', () => {
beforeAll(() => {
jest.useFakeTimers();
});
afterAll(() => {
jest.useRealTimers();
});
describe('form validateMessages', () => {
const wrapperComponent = ({ validateMessages }) => {
const formRef = React.createRef();
const wrapper = mount(
<ConfigProvider locale={zhCN} form={{ validateMessages }}>
<Form ref={formRef} initialValues={{ age: 18 }}>
<Form.Item name="test" label="姓名" rules={[{ required: true }]}>
<input />
</Form.Item>
<Form.Item name="age" label="年龄" rules={[{ type: 'number', len: 17 }]}>
<input />
</Form.Item>
</Form>
</ConfigProvider>,
);
return [wrapper, formRef];
};
it('set locale zhCN', async () => {
const [wrapper, formRef] = wrapperComponent({});
await act(async () => {
try {
await formRef.current.validateFields();
} catch (e) {
// Do nothing
}
});
await act(async () => {
jest.runAllTimers();
wrapper.update();
await Promise.resolve();
});
expect(wrapper.find('.ezd-form-item-explain').first().text()).toEqual('请输入姓名');
});
it('set locale zhCN and set form validateMessages one item, other use default message', async () => {
const [wrapper, formRef] = wrapperComponent({ validateMessages: { required: '必须' } });
await act(async () => {
try {
await formRef.current.validateFields();
} catch (e) {
// Do nothing
}
});
await act(async () => {
jest.runAllTimers();
wrapper.update();
await Promise.resolve();
});
expect(wrapper.find('.ezd-form-item-explain').first().text()).toEqual('必须');
expect(wrapper.find('.ezd-form-item-explain').last().text()).toEqual('年龄必须等于17');
});
});
describe('form requiredMark', () => {
it('set requiredMark optional', async () => {
const wrapper = mount(
<ConfigProvider form={{ requiredMark: 'optional' }}>
<Form initialValues={{ age: 18 }}>
<Form.Item name="age" label="年龄" rules={[{ type: 'number', len: 17 }]}>
<input />
</Form.Item>
</Form>
</ConfigProvider>,
);
expect(wrapper).toMatchRenderedSnapshot();
});
});
});
|
/**
BSD 3-Clause License
This file is part of the Basalt project.
https://gitlab.com/VladyslavUsenko/basalt.git
Copyright (c) 2019, Vladyslav Usenko and Nikolaus Demmel.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#pragma once
#include <basalt/io/dataset_io.h>
#include <basalt/utils/filesystem.h>
#include <opencv2/highgui/highgui.hpp>
namespace basalt {
class KittiVioDataset : public VioDataset {
size_t num_cams;
std::string path;
std::vector<int64_t> image_timestamps;
std::unordered_map<int64_t, std::string> image_path;
// vector of images for every timestamp
// assumes vectors size is num_cams for every timestamp with null pointers for
// missing frames
// std::unordered_map<int64_t, std::vector<ImageData>> image_data;
Eigen::aligned_vector<AccelData> accel_data;
Eigen::aligned_vector<GyroData> gyro_data;
std::vector<int64_t> gt_timestamps; // ordered gt timestamps
Eigen::aligned_vector<Sophus::SE3d>
gt_pose_data; // TODO: change to eigen aligned
int64_t mocap_to_imu_offset_ns;
public:
~KittiVioDataset(){};
size_t get_num_cams() const { return num_cams; }
std::vector<int64_t> &get_image_timestamps() { return image_timestamps; }
const Eigen::aligned_vector<AccelData> &get_accel_data() const {
return accel_data;
}
const Eigen::aligned_vector<GyroData> &get_gyro_data() const {
return gyro_data;
}
const std::vector<int64_t> &get_gt_timestamps() const {
return gt_timestamps;
}
const Eigen::aligned_vector<Sophus::SE3d> &get_gt_pose_data() const {
return gt_pose_data;
}
int64_t get_mocap_to_imu_offset_ns() const { return mocap_to_imu_offset_ns; }
std::vector<ImageData> get_image_data(int64_t t_ns) {
std::vector<ImageData> res(num_cams);
const std::vector<std::string> folder = {"/image_0/", "/image_1/"};
for (size_t i = 0; i < num_cams; i++) {
std::string full_image_path = path + folder[i] + image_path[t_ns];
if (fs::exists(full_image_path)) {
cv::Mat img = cv::imread(full_image_path, cv::IMREAD_UNCHANGED);
if (img.type() == CV_8UC1) {
res[i].img.reset(new ManagedImage<uint16_t>(img.cols, img.rows));
const uint8_t *data_in = img.ptr();
uint16_t *data_out = res[i].img->ptr;
size_t full_size = img.cols * img.rows;
for (size_t i = 0; i < full_size; i++) {
int val = data_in[i];
val = val << 8;
data_out[i] = val;
}
} else {
std::cerr << "img.fmt.bpp " << img.type() << std::endl;
std::abort();
}
}
}
return res;
}
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
friend class KittiIO;
};
class KittiIO : public DatasetIoInterface {
public:
KittiIO() {}
void read(const std::string &path) {
if (!fs::exists(path))
std::cerr << "No dataset found in " << path << std::endl;
data.reset(new KittiVioDataset);
data->num_cams = 2;
data->path = path;
read_image_timestamps(path + "/times.txt");
if (fs::exists(path + "/poses.txt")) {
read_gt_data_pose(path + "/poses.txt");
}
}
void reset() { data.reset(); }
VioDatasetPtr get_data() { return data; }
private:
void read_image_timestamps(const std::string &path) {
std::ifstream f(path);
std::string line;
while (std::getline(f, line)) {
if (line[0] == '#') continue;
std::stringstream ss(line);
double t_s;
ss >> t_s;
int64_t t_ns = t_s * 1e9;
std::stringstream ss1;
ss1 << std::setfill('0') << std::setw(6) << data->image_timestamps.size()
<< ".png";
data->image_timestamps.emplace_back(t_ns);
data->image_path[t_ns] = ss1.str();
}
}
void read_gt_data_pose(const std::string &path) {
data->gt_timestamps.clear();
data->gt_pose_data.clear();
int i = 0;
std::ifstream f(path);
std::string line;
while (std::getline(f, line)) {
if (line[0] == '#') continue;
std::stringstream ss(line);
Eigen::Matrix3d rot;
Eigen::Vector3d pos;
ss >> rot(0, 0) >> rot(0, 1) >> rot(0, 2) >> pos[0] >> rot(1, 0) >>
rot(1, 1) >> rot(1, 2) >> pos[1] >> rot(2, 0) >> rot(2, 1) >>
rot(2, 2) >> pos[2];
data->gt_timestamps.emplace_back(data->image_timestamps[i]);
data->gt_pose_data.emplace_back(Eigen::Quaterniond(rot), pos);
i++;
}
}
std::shared_ptr<KittiVioDataset> data;
};
} // namespace basalt
|
// 4vertices x 1strips
KDint16 g_nVertices1 [ ] =
{
1,-1,0, 1,1,0, -1,-1,0, -1,1
};
// 4vertices x 1strpis
KDint16 g_nVertices2 [ ] =
{
10,0,10, 10,0,-10, -10,0,10, -10,0,-10
};
// 20vertices x 20strips
KDint16 g_nVertices3 [ ] =
{
-10,0,-10, -9,0,-10, -10,0,-9, -9,0,-9, -10,0,-8, -9,0,-8, -10,0,-7, -9,0,-7, -10,0,-6, -9,0,-6, -10,0,-5, -9,0,-5, -10,0,-4, -9,0,-4, -10,0,-3, -9,0,-3, -10,0,-2, -9,0,-2, -10,0,-1, -9,0,-1, -10,0,0, -9,0,0, -10,0,1, -9,0,1, -10,0,2, -9,0,2, -10,0,3, -9,0,3, -10,0,4, -9,0,4, -10,0,5, -9,0,5, -10,0,6, -9,0,6, -10,0,7, -9,0,7, -10,0,8, -9,0,8, -10,0,9, -9,0,9, -9,0,-10, -8,0,-10, -9,0,-9, -8,0,-9, -9,0,-8, -8,0,-8, -9,0,-7, -8,0,-7, -9,0,-6, -8,0,-6, -9,0,-5, -8,0,-5, -9,0,-4, -8,0,-4, -9,0,-3, -8,0,-3, -9,0,-2, -8,0,-2, -9,0,-1, -8,0,-1, -9,0,0, -8,0,0, -9,0,1, -8,0,1, -9,0,2, -8,0,2, -9,0,3, -8,0,3, -9,0,4, -8,0,4, -9,0,5, -8,0,5, -9,0,6, -8,0,6, -9,0,7, -8,0,7, -9,0,8, -8,0,8, -9,0,9, -8,0,9, -8,0,-10, -7,0,-10, -8,0,-9, -7,0,-9, -8,0,-8, -7,0,-8, -8,0,-7, -7,0,-7, -8,0,-6, -7,0,-6, -8,0,-5, -7,0,-5, -8,0,-4, -7,0,-4, -8,0,-3, -7,0,-3, -8,0,-2, -7,0,-2, -8,0,-1, -7,0,-1, -8,0,0, -7,0,0, -8,0,1, -7,0,1, -8,0,2, -7,0,2, -8,0,3, -7,0,3, -8,0,4, -7,0,4, -8,0,5, -7,0,5, -8,0,6, -7,0,6, -8,0,7, -7,0,7, -8,0,8, -7,0,8, -8,0,9, -7,0,9, -7,0,-10, -6,0,-10, -7,0,-9, -6,0,-9, -7,0,-8, -6,0,-8, -7,0,-7, -6,0,-7, -7,0,-6, -6,0,-6, -7,0,-5, -6,0,-5, -7,0,-4, -6,0,-4, -7,0,-3, -6,0,-3, -7,0,-2, -6,0,-2, -7,0,-1, -6,0,-1, -7,0,0, -6,0,0, -7,0,1, -6,0,1, -7,0,2, -6,0,2, -7,0,3, -6,0,3, -7,0,4, -6,0,4, -7,0,5, -6,0,5, -7,0,6, -6,0,6, -7,0,7, -6,0,7, -7,0,8, -6,0,8, -7,0,9, -6,0,9, -6,0,-10, -5,0,-10, -6,0,-9, -5,0,-9, -6,0,-8, -5,0,-8, -6,0,-7, -5,0,-7, -6,0,-6, -5,0,-6, -6,0,-5, -5,0,-5, -6,0,-4, -5,0,-4, -6,0,-3, -5,0,-3, -6,0,-2, -5,0,-2, -6,0,-1, -5,0,-1, -6,0,0, -5,0,0, -6,0,1, -5,0,1, -6,0,2, -5,0,2, -6,0,3, -5,0,3, -6,0,4, -5,0,4, -6,0,5, -5,0,5, -6,0,6, -5,0,6, -6,0,7, -5,0,7, -6,0,8, -5,0,8, -6,0,9, -5,0,9, -5,0,-10, -4,0,-10, -5,0,-9, -4,0,-9, -5,0,-8, -4,0,-8, -5,0,-7, -4,0,-7, -5,0,-6, -4,0,-6, -5,0,-5, -4,0,-5, -5,0,-4, -4,0,-4, -5,0,-3, -4,0,-3, -5,0,-2, -4,0,-2, -5,0,-1, -4,0,-1, -5,0,0, -4,0,0, -5,0,1, -4,0,1, -5,0,2, -4,0,2, -5,0,3, -4,0,3, -5,0,4, -4,0,4, -5,0,5, -4,0,5, -5,0,6, -4,0,6, -5,0,7, -4,0,7, -5,0,8, -4,0,8, -5,0,9, -4,0,9, -4,0,-10, -3,0,-10, -4,0,-9, -3,0,-9, -4,0,-8, -3,0,-8, -4,0,-7, -3,0,-7, -4,0,-6, -3,0,-6, -4,0,-5, -3,0,-5, -4,0,-4, -3,0,-4, -4,0,-3, -3,0,-3, -4,0,-2, -3,0,-2, -4,0,-1, -3,0,-1, -4,0,0, -3,0,0, -4,0,1, -3,0,1, -4,0,2, -3,0,2, -4,0,3, -3,0,3, -4,0,4, -3,0,4, -4,0,5, -3,0,5, -4,0,6, -3,0,6, -4,0,7, -3,0,7, -4,0,8, -3,0,8, -4,0,9, -3,0,9, -3,0,-10, -2,0,-10, -3,0,-9, -2,0,-9, -3,0,-8, -2,0,-8, -3,0,-7, -2,0,-7, -3,0,-6, -2,0,-6, -3,0,-5, -2,0,-5, -3,0,-4, -2,0,-4, -3,0,-3, -2,0,-3, -3,0,-2, -2,0,-2, -3,0,-1, -2,0,-1, -3,0,0, -2,0,0, -3,0,1, -2,0,1, -3,0,2, -2,0,2, -3,0,3, -2,0,3, -3,0,4, -2,0,4, -3,0,5, -2,0,5, -3,0,6, -2,0,6, -3,0,7, -2,0,7, -3,0,8, -2,0,8, -3,0,9, -2,0,9, -2,0,-10, -1,0,-10, -2,0,-9, -1,0,-9, -2,0,-8, -1,0,-8, -2,0,-7, -1,0,-7, -2,0,-6, -1,0,-6, -2,0,-5, -1,0,-5, -2,0,-4, -1,0,-4, -2,0,-3, -1,0,-3, -2,0,-2, -1,0,-2, -2,0,-1, -1,0,-1, -2,0,0, -1,0,0, -2,0,1, -1,0,1, -2,0,2, -1,0,2, -2,0,3, -1,0,3, -2,0,4, -1,0,4, -2,0,5, -1,0,5, -2,0,6, -1,0,6, -2,0,7, -1,0,7, -2,0,8, -1,0,8, -2,0,9, -1,0,9, -1,0,-10, 0,0,-10, -1,0,-9, 0,0,-9, -1,0,-8, 0,0,-8, -1,0,-7, 0,0,-7, -1,0,-6, 0,0,-6, -1,0,-5, 0,0,-5, -1,0,-4, 0,0,-4, -1,0,-3, 0,0,-3, -1,0,-2, 0,0,-2, -1,0,-1, 0,0,-1, -1,0,0, 0,0,0, -1,0,1, 0,0,1, -1,0,2, 0,0,2, -1,0,3, 0,0,3, -1,0,4, 0,0,4, -1,0,5, 0,0,5, -1,0,6, 0,0,6, -1,0,7, 0,0,7, -1,0,8, 0,0,8, -1,0,9, 0,0,9, 0,0,-10, 1,0,-10, 0,0,-9, 1,0,-9, 0,0,-8, 1,0,-8, 0,0,-7, 1,0,-7, 0,0,-6, 1,0,-6, 0,0,-5, 1,0,-5, 0,0,-4, 1,0,-4, 0,0,-3, 1,0,-3, 0,0,-2, 1,0,-2, 0,0,-1, 1,0,-1, 0,0,0, 1,0,0, 0,0,1, 1,0,1, 0,0,2, 1,0,2, 0,0,3, 1,0,3, 0,0,4, 1,0,4, 0,0,5, 1,0,5, 0,0,6, 1,0,6, 0,0,7, 1,0,7, 0,0,8, 1,0,8, 0,0,9, 1,0,9, 1,0,-10, 2,0,-10, 1,0,-9, 2,0,-9, 1,0,-8, 2,0,-8, 1,0,-7, 2,0,-7, 1,0,-6, 2,0,-6, 1,0,-5, 2,0,-5, 1,0,-4, 2,0,-4, 1,0,-3, 2,0,-3, 1,0,-2, 2,0,-2, 1,0,-1, 2,0,-1, 1,0,0, 2,0,0, 1,0,1, 2,0,1, 1,0,2, 2,0,2, 1,0,3, 2,0,3, 1,0,4, 2,0,4, 1,0,5, 2,0,5, 1,0,6, 2,0,6, 1,0,7, 2,0,7, 1,0,8, 2,0,8, 1,0,9, 2,0,9, 2,0,-10, 3,0,-10, 2,0,-9, 3,0,-9, 2,0,-8, 3,0,-8, 2,0,-7, 3,0,-7, 2,0,-6, 3,0,-6, 2,0,-5, 3,0,-5, 2,0,-4, 3,0,-4, 2,0,-3, 3,0,-3, 2,0,-2, 3,0,-2, 2,0,-1, 3,0,-1, 2,0,0, 3,0,0, 2,0,1, 3,0,1, 2,0,2, 3,0,2, 2,0,3, 3,0,3, 2,0,4, 3,0,4, 2,0,5, 3,0,5, 2,0,6, 3,0,6, 2,0,7, 3,0,7, 2,0,8, 3,0,8, 2,0,9, 3,0,9, 3,0,-10, 4,0,-10, 3,0,-9, 4,0,-9, 3,0,-8, 4,0,-8, 3,0,-7, 4,0,-7, 3,0,-6, 4,0,-6, 3,0,-5, 4,0,-5, 3,0,-4, 4,0,-4, 3,0,-3, 4,0,-3, 3,0,-2, 4,0,-2, 3,0,-1, 4,0,-1, 3,0,0, 4,0,0, 3,0,1, 4,0,1, 3,0,2, 4,0,2, 3,0,3, 4,0,3, 3,0,4, 4,0,4, 3,0,5, 4,0,5, 3,0,6, 4,0,6, 3,0,7, 4,0,7, 3,0,8, 4,0,8, 3,0,9, 4,0,9, 4,0,-10, 5,0,-10, 4,0,-9, 5,0,-9, 4,0,-8, 5,0,-8, 4,0,-7, 5,0,-7, 4,0,-6, 5,0,-6, 4,0,-5, 5,0,-5, 4,0,-4, 5,0,-4, 4,0,-3, 5,0,-3, 4,0,-2, 5,0,-2, 4,0,-1, 5,0,-1, 4,0,0, 5,0,0, 4,0,1, 5,0,1, 4,0,2, 5,0,2, 4,0,3, 5,0,3, 4,0,4, 5,0,4, 4,0,5, 5,0,5, 4,0,6, 5,0,6, 4,0,7, 5,0,7, 4,0,8, 5,0,8, 4,0,9, 5,0,9, 5,0,-10, 6,0,-10, 5,0,-9, 6,0,-9, 5,0,-8, 6,0,-8, 5,0,-7, 6,0,-7, 5,0,-6, 6,0,-6, 5,0,-5, 6,0,-5, 5,0,-4, 6,0,-4, 5,0,-3, 6,0,-3, 5,0,-2, 6,0,-2, 5,0,-1, 6,0,-1, 5,0,0, 6,0,0, 5,0,1, 6,0,1, 5,0,2, 6,0,2, 5,0,3, 6,0,3, 5,0,4, 6,0,4, 5,0,5, 6,0,5, 5,0,6, 6,0,6, 5,0,7, 6,0,7, 5,0,8, 6,0,8, 5,0,9, 6,0,9, 6,0,-10, 7,0,-10, 6,0,-9, 7,0,-9, 6,0,-8, 7,0,-8, 6,0,-7, 7,0,-7, 6,0,-6, 7,0,-6, 6,0,-5, 7,0,-5, 6,0,-4, 7,0,-4, 6,0,-3, 7,0,-3, 6,0,-2, 7,0,-2, 6,0,-1, 7,0,-1, 6,0,0, 7,0,0, 6,0,1, 7,0,1, 6,0,2, 7,0,2, 6,0,3, 7,0,3, 6,0,4, 7,0,4, 6,0,5, 7,0,5, 6,0,6, 7,0,6, 6,0,7, 7,0,7, 6,0,8, 7,0,8, 6,0,9, 7,0,9, 7,0,-10, 8,0,-10, 7,0,-9, 8,0,-9, 7,0,-8, 8,0,-8, 7,0,-7, 8,0,-7, 7,0,-6, 8,0,-6, 7,0,-5, 8,0,-5, 7,0,-4, 8,0,-4, 7,0,-3, 8,0,-3, 7,0,-2, 8,0,-2, 7,0,-1, 8,0,-1, 7,0,0, 8,0,0, 7,0,1, 8,0,1, 7,0,2, 8,0,2, 7,0,3, 8,0,3, 7,0,4, 8,0,4, 7,0,5, 8,0,5, 7,0,6, 8,0,6, 7,0,7, 8,0,7, 7,0,8, 8,0,8, 7,0,9, 8,0,9, 8,0,-10, 9,0,-10, 8,0,-9, 9,0,-9, 8,0,-8, 9,0,-8, 8,0,-7, 9,0,-7, 8,0,-6, 9,0,-6, 8,0,-5, 9,0,-5, 8,0,-4, 9,0,-4, 8,0,-3, 9,0,-3, 8,0,-2, 9,0,-2, 8,0,-1, 9,0,-1, 8,0,0, 9,0,0, 8,0,1, 9,0,1, 8,0,2, 9,0,2, 8,0,3, 9,0,3, 8,0,4, 9,0,4, 8,0,5, 9,0,5, 8,0,6, 9,0,6, 8,0,7, 9,0,7, 8,0,8, 9,0,8, 8,0,9, 9,0,9, 9,0,-10, 10,0,-10, 9,0,-9, 10,0,-9, 9,0,-8, 10,0,-8, 9,0,-7, 10,0,-7, 9,0,-6, 10,0,-6, 9,0,-5, 10,0,-5, 9,0,-4, 10,0,-4, 9,0,-3, 10,0,-3, 9,0,-2, 10,0,-2, 9,0,-1, 10,0,-1, 9,0,0, 10,0,0, 9,0,1, 10,0,1, 9,0,2, 10,0,2, 9,0,3, 10,0,3, 9,0,4, 10,0,4, 9,0,5, 10,0,5, 9,0,6, 10,0,6, 9,0,7, 10,0,7, 9,0,8, 10,0,8, 9,0,9, 10,0,9
};
|
version = '8.1.0'
|
import numpy as np
import time
from djitellopy import tello
from cv2 import cv2
me = tello.Tello()
me.connect()
print(me.get_battery())
me.streamon()
#me.takeoff()
#me.send_rc_control(0, 0, 0, 25)
time.sleep(2.2)
w, h = 360, 240
fbRange = [6200, 6800]
pid = [0.4, 0.4, 0]
pError = 0
def findFace(img):
faceCascade = cv2.CascadeClassifier("Resources/haarcascade_frontalface_default.xml")
imgGray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
faces = faceCascade.detectMultiScale(imgGray, 1.2, 8)
myFaceListC = []
myFaceListArea = []
for (x, y, w, h) in faces:
cv2.rectangle(img, (x, y), (x + w, y + h), (0, 0, 255), 2)
cx = x + w // 2
cy = y + h // 2
area = w * h
cv2.circle(img, (cx, cy), 5, (0, 255, 0), cv2.FILLED)
myFaceListC.append([cx, cy])
myFaceListArea.append(area)
if len(myFaceListArea) != 0:
i = myFaceListArea.index(max(myFaceListArea))
return img, [myFaceListC[i], myFaceListArea[i]]
else:
return img, [[0, 0], 0]
def trackFace( info, w, pid, pError):
area = info[1]
x, y = info[0]
fb = 0
error = x - w // 2
speed = pid[0] * error + pid[1] * (error - pError)
speed = int(np.clip(speed, -100, 100))
if area > fbRange[0] and area < fbRange[1]:
fb = 0
elif area > fbRange[1]:
fb = -20
elif area < fbRange[0] and area != 0:
fb = 20
if x == 0:
speed = 0
error = 0
#print(speed, fb)
me.send_rc_control(0, fb, 0, speed)
return error
#cap = cv2.VideoCapture(1)
while True:
#_, img = cap.read()
cv2.waitKey(1)
img = me.get_frame_read().frame
img = cv2.resize(img, (w, h))
img, info = findFace(img)
# pError = trackFace( info, w, pid, pError)
#print("Center", info[0], "Area", info[1])
cv2.imshow("Output", img)
if cv2.waitkey(1) & 0xFF == ord('q'):
#me.land()
break
|
/**
* bootstrap-table - An extended table to integration with some of the most widely used CSS frameworks. (Supports Bootstrap, Semantic UI, Bulma, Material Design, Foundation)
*
* @version v1.18.1
* @homepage https://bootstrap-table.com
* @author wenzhixin <wenzhixin2010@gmail.com> (http://wenzhixin.net.cn/)
* @license MIT
*/
!function(t,n){"object"==typeof exports&&"undefined"!=typeof module?n(require("jquery")):"function"==typeof define&&define.amd?define(["jquery"],n):n((t=t||self).jQuery)}(this,(function(t){"use strict";t=t&&Object.prototype.hasOwnProperty.call(t,"default")?t.default:t;var n="undefined"!=typeof globalThis?globalThis:"undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:{};function r(t,n){return t(n={exports:{}},n.exports),n.exports}var e=function(t){return t&&t.Math==Math&&t},o=e("object"==typeof globalThis&&globalThis)||e("object"==typeof window&&window)||e("object"==typeof self&&self)||e("object"==typeof n&&n)||Function("return this")(),i=function(t){try{return!!t()}catch(t){return!0}},u=!i((function(){return 7!=Object.defineProperty({},"a",{get:function(){return 7}}).a})),c={}.propertyIsEnumerable,f=Object.getOwnPropertyDescriptor,a={f:f&&!c.call({1:2},1)?function(t){var n=f(this,t);return!!n&&n.enumerable}:c},l=function(t,n){return{enumerable:!(1&t),configurable:!(2&t),writable:!(4&t),value:n}},s={}.toString,p=function(t){return s.call(t).slice(8,-1)},g="".split,h=i((function(){return!Object("z").propertyIsEnumerable(0)}))?function(t){return"String"==p(t)?g.call(t,""):Object(t)}:Object,y=function(t){if(null==t)throw TypeError("Can't call method on "+t);return t},m=function(t){return h(y(t))},d=function(t){return"object"==typeof t?null!==t:"function"==typeof t},v=function(t,n){if(!d(t))return t;var r,e;if(n&&"function"==typeof(r=t.toString)&&!d(e=r.call(t)))return e;if("function"==typeof(r=t.valueOf)&&!d(e=r.call(t)))return e;if(!n&&"function"==typeof(r=t.toString)&&!d(e=r.call(t)))return e;throw TypeError("Can't convert object to primitive value")},b={}.hasOwnProperty,w=function(t,n){return b.call(t,n)},S=o.document,O=d(S)&&d(S.createElement),P=!u&&!i((function(){return 7!=Object.defineProperty((t="div",O?S.createElement(t):{}),"a",{get:function(){return 7}}).a;var t})),T=Object.getOwnPropertyDescriptor,j={f:u?T:function(t,n){if(t=m(t),n=v(n,!0),P)try{return T(t,n)}catch(t){}if(w(t,n))return l(!a.f.call(t,n),t[n])}},A=function(t){if(!d(t))throw TypeError(String(t)+" is not an object");return t},x=Object.defineProperty,E={f:u?x:function(t,n,r){if(A(t),n=v(n,!0),A(r),P)try{return x(t,n,r)}catch(t){}if("get"in r||"set"in r)throw TypeError("Accessors not supported");return"value"in r&&(t[n]=r.value),t}},M=u?function(t,n,r){return E.f(t,n,l(1,r))}:function(t,n,r){return t[n]=r,t},C=function(t,n){try{M(o,t,n)}catch(r){o[t]=n}return n},R="__core-js_shared__",F=o[R]||C(R,{}),q=Function.toString;"function"!=typeof F.inspectSource&&(F.inspectSource=function(t){return q.call(t)});var L,I,N,k,_=F.inspectSource,D=o.WeakMap,z="function"==typeof D&&/native code/.test(_(D)),B=r((function(t){(t.exports=function(t,n){return F[t]||(F[t]=void 0!==n?n:{})})("versions",[]).push({version:"3.6.0",mode:"global",copyright:"© 2019 Denis Pushkarev (zloirock.ru)"})})),G=0,W=Math.random(),H=function(t){return"Symbol("+String(void 0===t?"":t)+")_"+(++G+W).toString(36)},J=B("keys"),K={},Q=o.WeakMap;if(z){var U=new Q,V=U.get,Y=U.has,X=U.set;L=function(t,n){return X.call(U,t,n),n},I=function(t){return V.call(U,t)||{}},N=function(t){return Y.call(U,t)}}else{var Z=J[k="state"]||(J[k]=H(k));K[Z]=!0,L=function(t,n){return M(t,Z,n),n},I=function(t){return w(t,Z)?t[Z]:{}},N=function(t){return w(t,Z)}}var $,tt,nt={set:L,get:I,has:N,enforce:function(t){return N(t)?I(t):L(t,{})},getterFor:function(t){return function(n){var r;if(!d(n)||(r=I(n)).type!==t)throw TypeError("Incompatible receiver, "+t+" required");return r}}},rt=r((function(t){var n=nt.get,r=nt.enforce,e=String(String).split("String");(t.exports=function(t,n,i,u){var c=!!u&&!!u.unsafe,f=!!u&&!!u.enumerable,a=!!u&&!!u.noTargetGet;"function"==typeof i&&("string"!=typeof n||w(i,"name")||M(i,"name",n),r(i).source=e.join("string"==typeof n?n:"")),t!==o?(c?!a&&t[n]&&(f=!0):delete t[n],f?t[n]=i:M(t,n,i)):f?t[n]=i:C(n,i)})(Function.prototype,"toString",(function(){return"function"==typeof this&&n(this).source||_(this)}))})),et=o,ot=function(t){return"function"==typeof t?t:void 0},it=function(t,n){return arguments.length<2?ot(et[t])||ot(o[t]):et[t]&&et[t][n]||o[t]&&o[t][n]},ut=Math.ceil,ct=Math.floor,ft=function(t){return isNaN(t=+t)?0:(t>0?ct:ut)(t)},at=Math.min,lt=function(t){return t>0?at(ft(t),9007199254740991):0},st=Math.max,pt=Math.min,gt=function(t){return function(n,r,e){var o,i=m(n),u=lt(i.length),c=function(t,n){var r=ft(t);return r<0?st(r+n,0):pt(r,n)}(e,u);if(t&&r!=r){for(;u>c;)if((o=i[c++])!=o)return!0}else for(;u>c;c++)if((t||c in i)&&i[c]===r)return t||c||0;return!t&&-1}},ht={includes:gt(!0),indexOf:gt(!1)}.indexOf,yt=["constructor","hasOwnProperty","isPrototypeOf","propertyIsEnumerable","toLocaleString","toString","valueOf"].concat("length","prototype"),mt={f:Object.getOwnPropertyNames||function(t){return function(t,n){var r,e=m(t),o=0,i=[];for(r in e)!w(K,r)&&w(e,r)&&i.push(r);for(;n.length>o;)w(e,r=n[o++])&&(~ht(i,r)||i.push(r));return i}(t,yt)}},dt={f:Object.getOwnPropertySymbols},vt=it("Reflect","ownKeys")||function(t){var n=mt.f(A(t)),r=dt.f;return r?n.concat(r(t)):n},bt=function(t,n){for(var r=vt(n),e=E.f,o=j.f,i=0;i<r.length;i++){var u=r[i];w(t,u)||e(t,u,o(n,u))}},wt=/#|\.prototype\./,St=function(t,n){var r=Pt[Ot(t)];return r==jt||r!=Tt&&("function"==typeof n?i(n):!!n)},Ot=St.normalize=function(t){return String(t).replace(wt,".").toLowerCase()},Pt=St.data={},Tt=St.NATIVE="N",jt=St.POLYFILL="P",At=St,xt=j.f,Et=Array.isArray||function(t){return"Array"==p(t)},Mt=function(t){return Object(y(t))},Ct=function(t,n,r){var e=v(n);e in t?E.f(t,e,l(0,r)):t[e]=r},Rt=!!Object.getOwnPropertySymbols&&!i((function(){return!String(Symbol())})),Ft=Rt&&!Symbol.sham&&"symbol"==typeof Symbol(),qt=B("wks"),Lt=o.Symbol,It=Ft?Lt:H,Nt=function(t){return w(qt,t)||(Rt&&w(Lt,t)?qt[t]=Lt[t]:qt[t]=It("Symbol."+t)),qt[t]},kt=Nt("species"),_t=function(t,n){var r;return Et(t)&&("function"!=typeof(r=t.constructor)||r!==Array&&!Et(r.prototype)?d(r)&&null===(r=r[kt])&&(r=void 0):r=void 0),new(void 0===r?Array:r)(0===n?0:n)},Dt=it("navigator","userAgent")||"",zt=o.process,Bt=zt&&zt.versions,Gt=Bt&&Bt.v8;Gt?tt=($=Gt.split("."))[0]+$[1]:Dt&&(!($=Dt.match(/Edge\/(\d+)/))||$[1]>=74)&&($=Dt.match(/Chrome\/(\d+)/))&&(tt=$[1]);var Wt,Ht=tt&&+tt,Jt=Nt("species"),Kt=Nt("isConcatSpreadable"),Qt=9007199254740991,Ut="Maximum allowed index exceeded",Vt=Ht>=51||!i((function(){var t=[];return t[Kt]=!1,t.concat()[0]!==t})),Yt=(Wt="concat",Ht>=51||!i((function(){var t=[];return(t.constructor={})[Jt]=function(){return{foo:1}},1!==t[Wt](Boolean).foo}))),Xt=function(t){if(!d(t))return!1;var n=t[Kt];return void 0!==n?!!n:Et(t)};!function(t,n){var r,e,i,u,c,f=t.target,a=t.global,l=t.stat;if(r=a?o:l?o[f]||C(f,{}):(o[f]||{}).prototype)for(e in n){if(u=n[e],i=t.noTargetGet?(c=xt(r,e))&&c.value:r[e],!At(a?e:f+(l?".":"#")+e,t.forced)&&void 0!==i){if(typeof u==typeof i)continue;bt(u,i)}(t.sham||i&&i.sham)&&M(u,"sham",!0),rt(r,e,u,t)}}({target:"Array",proto:!0,forced:!Vt||!Yt},{concat:function(t){var n,r,e,o,i,u=Mt(this),c=_t(u,0),f=0;for(n=-1,e=arguments.length;n<e;n++)if(Xt(i=-1===n?u:arguments[n])){if(f+(o=lt(i.length))>Qt)throw TypeError(Ut);for(r=0;r<o;r++,f++)r in i&&Ct(c,f,i[r])}else{if(f>=Qt)throw TypeError(Ut);Ct(c,f++,i)}return c.length=f,c}}),t.fn.bootstrapTable.locales["fr-FR"]=t.fn.bootstrapTable.locales.fr={formatCopyRows:function(){return"Copier les lignes"},formatPrint:function(){return"Imprimer"},formatLoadingMessage:function(){return"Chargement en cours"},formatRecordsPerPage:function(t){return"".concat(t," lignes par page")},formatShowingRows:function(t,n,r,e){return void 0!==e&&e>0&&e>r?"Affiche de ".concat(t," à ").concat(n," sur ").concat(r," lignes (filtrés à partir de ").concat(e," lignes)"):"Affiche de ".concat(t," à ").concat(n," sur ").concat(r," lignes")},formatSRPaginationPreText:function(){return"page précédente"},formatSRPaginationPageText:function(t){return"vers la page ".concat(t)},formatSRPaginationNextText:function(){return"page suivante"},formatDetailPagination:function(t){return"Affiche ".concat(t," lignes")},formatClearSearch:function(){return"Effacer la recherche"},formatSearch:function(){return"Recherche"},formatNoMatches:function(){return"Aucun résultat"},formatPaginationSwitch:function(){return"Masquer/Afficher la pagination"},formatPaginationSwitchDown:function(){return"Afficher la pagination"},formatPaginationSwitchUp:function(){return"Masquer la pagination"},formatRefresh:function(){return"Actualiser"},formatToggle:function(){return"Basculer"},formatToggleOn:function(){return"Afficher la vue carte"},formatToggleOff:function(){return"Masquer la vue carte"},formatColumns:function(){return"Colonnes"},formatColumnsToggleAll:function(){return"Tout basculer"},formatFullscreen:function(){return"Plein écran"},formatAllRows:function(){return"Tout"},formatAutoRefresh:function(){return"Actualisation automatique"},formatExport:function(){return"Exporter les données"},formatJumpTo:function(){return"ALLER"},formatAdvancedSearch:function(){return"Recherche avancée"},formatAdvancedCloseButton:function(){return"Fermer"},formatFilterControlSwitch:function(){return"Masquer/Afficher les contrôles"},formatFilterControlSwitchHide:function(){return"Masquer les contrôles"},formatFilterControlSwitchShow:function(){return"Afficher les contrôles"}},t.extend(t.fn.bootstrapTable.defaults,t.fn.bootstrapTable.locales["fr-FR"])}));
|
from pagrant.vendors.docker.utils.utils import (
compare_version, convert_port_bindings, mkbuildcontext, ping, tar
) # flake8: noqa
|
$(document).ready(function() {
$('#data').after('<div id="nav"></div>');
var rowsShown = 6;
var rowsTotal = $('#data tbody tr').length;
var numPages = rowsTotal / rowsShown;
for (i = 0; i < numPages; i++) {
var pageNum = i + 1;
$('#nav').append('<a href="#" rel="' + i + '">' + pageNum + '</a> ');
}
$('#data tbody tr').hide();
$('#data tbody tr').slice(0, rowsShown).show();
$('#nav a:first').addClass('active');
$('#nav a').bind('click', function() {
$('#nav a').removeClass('active');
$(this).addClass('active');
var currPage = $(this).attr('rel');
var startItem = currPage * rowsShown;
var endItem = startItem + rowsShown;
$('#data tbody tr').css('opacity', '0.0').hide().slice(startItem, endItem).
css('display', 'table-row').animate({
opacity: 1
}, 300);
});
});
|
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from django.core.urlresolvers import resolve
import sys, os
from distutils import dir_util, file_util
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
)
help = 'Install Aadhaar fixtures'
def findpath(self, path):
parent_dir = os.path.dirname(os.path.dirname(__file__))
return os.path.abspath(os.path.join(parent_dir,path))
def handle(self, **options):
working_directory = os.getcwd()
module_path = self.findpath("..")
dir_util.copy_tree(module_path + "/fixtures",
working_directory + "/fixtures", update=1)
print "Installed configuration files required for aadhaar authentication"
|
/* Filter menu operator messages */
if (kendo.ui.FilterCell) {
kendo.ui.FilterCell.prototype.options.operators =
$.extend(true, kendo.ui.FilterCell.prototype.options.operators,{
"date": {
"eq": "равна",
"gte": "после или равна",
"gt": "после",
"lte": "до или равна",
"lt": "до",
"neq": "не равна"
},
"number": {
"eq": "равно",
"gte": "больше или равно",
"gt": "больше",
"lte": "меньше или равно",
"lt": "меньше",
"neq": "не равно"
},
"string": {
"endswith": "оканчивается на",
"eq": "равно",
"neq": "не равно",
"startswith": "начинающимися на",
"contains": "содержащими",
"doesnotcontain": "не содержит"
},
"enums": {
"eq": "равно",
"neq": "не равно"
}
});
}
/* Filter menu operator messages */
if (kendo.ui.FilterMenu) {
kendo.ui.FilterMenu.prototype.options.operators =
$.extend(true, kendo.ui.FilterMenu.prototype.options.operators,{
"date": {
"eq": "равна",
"gte": "после или равна",
"gt": "после",
"lte": "до или равна",
"lt": "до",
"neq": "не равна"
},
"number": {
"eq": "равно",
"gte": "больше или равно",
"gt": "больше",
"lte": "меньше или равно",
"lt": "меньше",
"neq": "не равно"
},
"string": {
"endswith": "оканчивается на",
"eq": "равно",
"neq": "не равно",
"startswith": "начинающимися на",
"contains": "содержащими",
"doesnotcontain": "не содержит"
},
"enums": {
"eq": "равно",
"neq": "не равно"
}
});
}
/* ColumnMenu messages */
if (kendo.ui.ColumnMenu) {
kendo.ui.ColumnMenu.prototype.options.messages =
$.extend(true, kendo.ui.ColumnMenu.prototype.options.messages,{
"columns": "Колонны",
"sortAscending": "Сортировка по возрастанию",
"sortDescending": "Сортировка по убыванию",
"settings": "Параметры столбцов",
"done": "Cделанный",
"lock": "Запирать",
"unlock": "Отпереть",
"filter": "Фильтровать"
});
}
/* RecurrenceEditor messages */
if (kendo.ui.RecurrenceEditor) {
kendo.ui.RecurrenceEditor.prototype.options.messages =
$.extend(true, kendo.ui.RecurrenceEditor.prototype.options.messages,{
"daily": {
"interval": "days(s)",
"repeatEvery": "Repeat every:"
},
"end": {
"after": "After",
"occurrence": "occurrence(s)",
"label": "End:",
"never": "Never",
"on": "On",
"mobileLabel": "Ends"
},
"frequencies": {
"daily": "Daily",
"monthly": "Monthly",
"never": "Never",
"weekly": "Weekly",
"yearly": "Yearly"
},
"monthly": {
"day": "Day",
"interval": "month(s)",
"repeatEvery": "Repeat every:",
"repeatOn": "Repeat on:"
},
"offsetPositions": {
"first": "first",
"fourth": "fourth",
"last": "last",
"second": "second",
"third": "third"
},
"weekly": {
"repeatEvery": "Repeat every:",
"repeatOn": "Repeat on:",
"interval": "week(s)"
},
"yearly": {
"of": "of",
"repeatEvery": "Repeat every:",
"repeatOn": "Repeat on:",
"interval": "year(s)"
},
"weekdays": {
"day": "day",
"weekday": "weekday",
"weekend": "weekend day"
}
});
}
/* Grid messages */
if (kendo.ui.Grid) {
kendo.ui.Grid.prototype.options.messages =
$.extend(true, kendo.ui.Grid.prototype.options.messages,{
"commands": {
"create": "Добавить",
"destroy": "Удалить",
"canceledit": "Отмена",
"update": "Обновить",
"edit": "Изменить",
"select": "Выбрать",
"cancel": "Отменить изменения",
"save": "Сохранить изменения"
},
"editable": {
"confirmation": "Вы уверены, что хотите удалить эту запись?",
"cancelDelete": "Отмена",
"confirmDelete": "Удалить"
}
});
}
/* Pager messages */
if (kendo.ui.Pager) {
kendo.ui.Pager.prototype.options.messages =
$.extend(true, kendo.ui.Pager.prototype.options.messages,{
"page": "Страница",
"display": "Отображены записи {0} - {1} из {2}",
"of": "из {0}",
"empty": "Нет записей для отображения",
"refresh": "Обновить",
"first": "Вернуться на первую страницу",
"itemsPerPage": "элементов на странице",
"last": "К последней странице",
"next": "Перейдите на следующую страницу",
"previous": "Перейти на предыдущую страницу",
"morePages": "Больше страниц"
});
}
/* FilterCell messages */
if (kendo.ui.FilterCell) {
kendo.ui.FilterCell.prototype.options.messages =
$.extend(true, kendo.ui.FilterCell.prototype.options.messages,{
"filter": "фильтровать",
"clear": "очистить фильтр",
"isFalse": "ложь",
"isTrue": "истина",
"operator": "Оператор"
});
}
/* FilterMenu messages */
if (kendo.ui.FilterMenu) {
kendo.ui.FilterMenu.prototype.options.messages =
$.extend(true, kendo.ui.FilterMenu.prototype.options.messages,{
"filter": "фильтровать",
"and": "И",
"clear": "очистить фильтр",
"info": "Строки со значениями",
"selectValue": "-выберите-",
"isFalse": "ложь",
"isTrue": "истина",
"or": "или",
"cancel": "Отмена",
"operator": "Оператор",
"value": "Значение"
});
}
/* Groupable messages */
if (kendo.ui.Groupable) {
kendo.ui.Groupable.prototype.options.messages =
$.extend(true, kendo.ui.Groupable.prototype.options.messages,{
"empty": "Переместите сюда заголовок колонки, чтобы сгрупировать записи из этой колонки"
});
}
/* Editor messages */
if (kendo.ui.Editor) {
kendo.ui.Editor.prototype.options.messages =
$.extend(true, kendo.ui.Editor.prototype.options.messages,{
"bold": "Полужирный",
"createLink": "Вставить гиперссылку",
"fontName": "Шрифт",
"fontNameInherit": "(шрифт как в документе)",
"fontSize": "Выбрать размер шрифта",
"fontSizeInherit": "(размер как в документе)",
"formatBlock": "Текст изображения",
"indent": "Увеличить отступ",
"insertHtml": "Вставить HTML",
"insertImage": "Изображение",
"insertOrderedList": "Нумерованный список",
"insertUnorderedList": "Маркированныйсписок",
"italic": "Курсив",
"justifyCenter": "По центру",
"justifyFull": "По ширине",
"justifyLeft": "Влево",
"justifyRight": "Вправо",
"outdent": "Уменьшить отступ",
"strikethrough": "Зачеркнутый",
"styles": "Стиль",
"subscript": "Под строкой",
"superscript": "Над строкой",
"underline": "Подчеркнутый",
"unlink": "Удалить гиперссылку",
"dialogButtonSeparator": "или",
"dialogCancel": "Отмена",
"dialogInsert": "Вставить",
"imageAltText": "Alternate text",
"imageWebAddress": "Веб адрес",
"linkOpenInNewWindow": "Открыть в новом окне",
"linkText": "Текст",
"linkToolTip": "Всплывающая подсказка",
"linkWebAddress": "Веб адрес",
"search": "Поиск",
"createTable": "Вставить таблицу",
"addColumnLeft": "Add column on the left",
"addColumnRight": "Add column on the right",
"addRowAbove": "Add row above",
"addRowBelow": "Add row below",
"deleteColumn": "Delete column",
"deleteRow": "Delete row",
"backColor": "Background color",
"deleteFile": "Are you sure you want to delete \"{0}\"?",
"directoryNotFound": "A directory with this name was not found.",
"dropFilesHere": "drop files here to upload",
"emptyFolder": "Empty Folder",
"foreColor": "Color",
"invalidFileType": "The selected file \"{0}\" is not valid. Supported file types are {1}.",
"orderBy": "Arrange by:",
"orderByName": "Name",
"orderBySize": "Size",
"overwriteFile": "'A file with name \"{0}\" already exists in the current directory. Do you want to overwrite it?",
"uploadFile": "Upload",
"formatting": "Format",
"viewHtml": "View HTML",
"dialogUpdate": "Update",
"insertFile": "Insert file"
});
}
/* Upload messages */
if (kendo.ui.Upload) {
kendo.ui.Upload.prototype.options.localization =
$.extend(true, kendo.ui.Upload.prototype.options.localization,{
"cancel": "Отменить загрузку",
"dropFilesHere": "перетащите сюда файлы для загрузки",
"remove": "Удалить",
"retry": "Повторить",
"select": "Выбрать...",
"statusFailed": "загрузка прервана",
"statusUploaded": "загружено",
"statusUploading": "загружается",
"uploadSelectedFiles": "Загрузить выбранные файлы",
"headerStatusUploaded": "Done",
"headerStatusUploading": "Uploading..."
});
}
/* Scheduler messages */
if (kendo.ui.Scheduler) {
kendo.ui.Scheduler.prototype.options.messages =
$.extend(true, kendo.ui.Scheduler.prototype.options.messages,{
"allDay": "all day",
"cancel": "Отмена",
"confirmation": "Are you sure you want to delete this event?",
"date": "Date",
"destroy": "Delete",
"editor": {
"allDayEvent": "All day event",
"description": "Description",
"editorTitle": "Event",
"end": "End",
"endTimezone": "End timezone",
"repeat": "Repeat",
"separateTimezones": "Use separate start and end time zones",
"start": "Start",
"startTimezone": "Start timezone",
"timezone": " ",
"timezoneEditorButton": "Time zone",
"timezoneEditorTitle": "Timezones",
"title": "Title",
"noTimezone": "No timezone"
},
"event": "Event",
"recurrenceMessages": {
"deleteRecurring": "Do you want to delete only this event occurrence or the whole series?",
"deleteWindowOccurrence": "Delete current occurrence",
"deleteWindowSeries": "Delete the series",
"deleteWindowTitle": "Delete Recurring Item",
"editRecurring": "Do you want to edit only this event occurrence or the whole series?",
"editWindowOccurrence": "Edit current occurrence",
"editWindowSeries": "Edit the series",
"editWindowTitle": "Edit Recurring Item"
},
"save": "Save",
"time": "Time",
"today": "Today",
"views": {
"agenda": "Agenda",
"day": "Day",
"month": "Month",
"week": "Week",
"workWeek": "Work Week"
},
"deleteWindowTitle": "Delete event",
"showFullDay": "Show full day",
"showWorkDay": "Show business hours"
});
}
/* Validator messages */
if (kendo.ui.Validator) {
kendo.ui.Validator.prototype.options.messages =
$.extend(true, kendo.ui.Validator.prototype.options.messages,{
"required": "{0} обязателен",
"pattern": "{0} не верен",
"min": "{0} должен быть больше или равен {1}",
"max": "{0} должен быть меньше или равен {1}",
"step": "{0} не верен",
"email": "{0} не корректный email",
"url": "{0} не корректный URL",
"date": "{0} не корректная дата"
});
}
|
var fs = require('fs');
var path = require('path');
var newTestFile = 'hello2_spec.js';
var newTestBody = 'describe("hello2", function() {\n' +
' it("should say hello 2", function() {\n' +
' expect(hello("2")).toBe("hello 2");\n' +
' });\n' +
'});';
module.exports = {
framework: 'jasmine',
// executing js function as a hook
// creating new js file before testing starts
before_tests: function(config, data, callback) {
fs.writeFile(path.join(__dirname, newTestFile), newTestBody, function(err) {
callback(err);
});
},
// and using regular string as a shell command
// test the new file exists
after_tests: 'test -f ' + newTestFile,
// by default it sends all js files
// so `testem.js` won't conflict with framework resource
// explicitly specify test files
"src_files": [
"hello*.js"
]
}
|