prompt large_stringlengths 70 991k | completion large_stringlengths 0 1.02k |
|---|---|
<|file_name|>ISensorListener.java<|end_file_name|><|fim▁begin|>package com.touchableheroes.drafts.app.sensors;
/**
* Created by asiebert on 06.12.14.
*/
public class ISensorListener {
<|fim▁hole|><|fim▁end|> | } |
<|file_name|>hyperbolictangent.rs<|end_file_name|><|fim▁begin|>use std::f64;
use activation::Activation;
#[derive(Copy, Clone)]
pub struct HyperbolicTangent;
impl HyperbolicTangent {
pub fn new() -> HyperbolicTangent {
return HyperbolicTangent;
}
}
impl Activation for HyperbolicTangent {
/// Calculates the tanh of input `x`
fn calc(&self, x: Vec<f64>) -> Vec<f64> {
x.iter().map(|n| n.tanh()).collect::<Vec<_>>()
}
/// Calculates the Derivative tanh of input `x`
fn derivative(&self, x: Vec<f64>) -> Vec<f64> {
x.iter()
.map(|n| {
let tanh_factor = n.tanh();
1f64 - (tanh_factor * tanh_factor)
})
.collect::<Vec<_>>()
}
}
#[cfg(test)]
mod tests {
use super::Activation;
use super::HyperbolicTangent;
#[test]
fn tanh_test() {
let activation = HyperbolicTangent::new();
assert_approx_eq!(activation.calc(vec![3f64])[0], 0.995054754f64);<|fim▁hole|> #[test]
fn tanh_derivative_test() {
let activation = HyperbolicTangent::new();
assert_approx_eq!(activation.derivative(vec![3f64])[0], 0.0098660372f64);
}
}<|fim▁end|> | }
|
<|file_name|>server.js<|end_file_name|><|fim▁begin|>import Express from 'express';
import compression from 'compression';
import mongoose from 'mongoose';
import bodyParser from 'body-parser';
import path from 'path';
import IntlWrapper from '../client/modules/Intl/IntlWrapper';
// Webpack Requirements
import webpack from 'webpack';
import config from '../webpack.config.dev';
import webpackDevMiddleware from 'webpack-dev-middleware';
import webpackHotMiddleware from 'webpack-hot-middleware';
// Initialize the Express App
const app = new Express();
// Run Webpack dev server in development mode
if (process.env.NODE_ENV === 'development') {
const compiler = webpack(config);
app.use(webpackDevMiddleware(compiler, { noInfo: true, publicPath: config.output.publicPath }));
app.use(webpackHotMiddleware(compiler));
}
// React And Redux Setup
import { configureStore } from '../client/store';
import { Provider } from 'react-redux';
import React from 'react';
import { renderToString } from 'react-dom/server';
import { match, RouterContext } from 'react-router';
import Helmet from 'react-helmet';
// Import required modules
import routes from '../client/routes';
import { fetchComponentData } from './util/fetchData';
import posts from './routes/post.routes';
import medicalRights from './routes/medicalrights.routes';
import populateDB from './routes/populateDB.routes';
import dummyData from './dummyData';
import serverConfig from './config';
// Set native promises as mongoose promise
mongoose.Promise = global.Promise;<|fim▁hole|> console.error('Please make sure Mongodb is installed and running!'); // eslint-disable-line no-console
throw error;
}
// feed some dummy data in DB.
//dummyData();
});
// Apply body Parser and server public assets and routes
app.use(compression());
app.use(bodyParser.json({ limit: '20mb' }));
app.use(bodyParser.urlencoded({ limit: '20mb', extended: false }));
app.use(Express.static(path.resolve(__dirname, '../dist')));
app.use('/api', posts);
app.use('/api', medicalRights);
app.use('/api', populateDB);
// Render Initial HTML
const renderFullPage = (html, initialState) => {
const head = Helmet.rewind();
// Import Manifests
const assetsManifest = process.env.webpackAssets && JSON.parse(process.env.webpackAssets);
const chunkManifest = process.env.webpackChunkAssets && JSON.parse(process.env.webpackChunkAssets);
return `
<!doctype html>
<html>
<head>
${head.base.toString()}
${head.title.toString()}
${head.meta.toString()}
${head.link.toString()}
${head.script.toString()}
${process.env.NODE_ENV === 'production' ? `<link rel='stylesheet' href='${assetsManifest['/app.css']}' />` : ''}
<link href='https://fonts.googleapis.com/css?family=Lato:400,300,700' rel='stylesheet' type='text/css'/>
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/latest/css/bootstrap.min.css" type='text/css'>
<link rel="shortcut icon" href="http://res.cloudinary.com/hashnode/image/upload/v1455629445/static_imgs/mern/mern-favicon-circle-fill.png" type="image/png" />
<style>
/* local page styles */
html h1 {
font-size: 26px;
margin-left: 10px;
}
html h2 {
font-size: 22px;
margin-left: 10px;
}
html h3 {
font-size: 14px;
margin-left: 10px;
}
html h4 {
font-size: 16px;
}
.progtrckr {
text-align: center;
padding-bottom: 16px;
// border-bottom: solid 1px;
}
.progtrckr li {
margin-bottom: 10px;
}
.val-err-tooltip {
background-color: red;
padding: 3px 5px 3px 10px;
font-size: 14px;
color: #fff;
}
.step {
// background-color: #ccc;
border:1px solid #e5e5e5;
min-height: 437px;
padding: 10px;
max-width: 815px;
}
html .row, html .form-horizontal .form-group {
margin: 0;
}
.footer-buttons {
margin-top: 10px;
margin-bottom: 50px;
}
html .step3 label, html .step4 label {
font-size: 20px;
text-align: left;
}
html .form-horizontal .control-label {
text-align: left;
}
.review .txt {
font-size: 20px;
text-align: left;
margin: 0;
padding: 0;
}
html body .saving {
background-color: #5cb85c;
width: 90%;
padding: 5px;
font-size: 16px;
}
code {
position: relative;
left: 12px;
line-height: 25px;
}
.eg-jump-lnk {
margin-top: 50px;
font-style: italic;
}
.lib-version {
font-size: 12px;
background-color: rgba(255, 255, 0, 0.38);
position: absolute;
right: 10px;
top: 10px;
padding: 5px;
}
html .content {
margin-left: 10px;
}
span.red {
color: #d9534f;
}
span.green {
color: #3c763d;
}
span.bold {
font-weight: bold;
}
html .hoc-alert {
margin-top: 20px;
}
html .form-block-holder {
margin-top: 20px !important;
}
ol.progtrckr {
margin: 0;
padding-bottom: 2.2rem;
list-style-type: none;
}
ol.progtrckr li {
display: inline-block;
text-align: center;
line-height: 4.5rem;
padding: 0 0.7rem;
cursor: pointer;
}
ol.progtrckr li span {
padding: 0 1.5rem;
}
@media (max-width: 650px) {
.progtrckr li span {
display: none;
}
}
.progtrckr em {
display: none;
font-weight: 700;
padding-left: 1rem;
}
@media (max-width: 650px) {
.progtrckr em {
display: inline;
}
border-bottom: solid 1px;
}
@media (max-width: 650px) {
.step {
max-height=320px;
min-height=437px;
min-width=300px;
}
}
}
ol.progtrckr li.progtrckr-todo {
color: silver;
border-bottom: 4px solid silver;
}
ol.progtrckr li.progtrckr-doing {
color: black;
border-bottom: 4px solid #33C3F0;
}
ol.progtrckr li.progtrckr-done {
color: black;
border-bottom: 4px solid #33C3F0;
}
ol.progtrckr li:after {
content: "\\00a0\\00a0";
}
ol.progtrckr li:before {
position: relative;
bottom: -3.7rem;
float: left;
left: 50%;
}
ol.progtrckr li.progtrckr-todo:before {
content: "\\039F";
color: silver;
background-color: white;
width: 1.2em;
line-height: 1.4em;
}
ol.progtrckr li.progtrckr-todo:hover:before {
color: #0FA0CE;
}
ol.progtrckr li.progtrckr-doing:before {
content: "\\2022";
color: white;
background-color: #33C3F0;
width: 1.2em;
line-height: 1.2em;
border-radius: 1.2em;
}
ol.progtrckr li.progtrckr-doing:hover:before {
color: #0FA0CE;
}
ol.progtrckr li.progtrckr-done:before {
content: "\\2713";
color: white;
background-color: #33C3F0;
width: 1.2em;
line-height: 1.2em;
border-radius: 1.2em;
}
ol.progtrckr li.progtrckr-done:hover:before {
color: #0FA0CE;
}
</style>
</head>
<body>
<div id="root">${html}</div>
<script>
window.__INITIAL_STATE__ = ${JSON.stringify(initialState)};
${process.env.NODE_ENV === 'production' ?
`//<![CDATA[
window.webpackManifest = ${JSON.stringify(chunkManifest)};
//]]>` : ''}
</script>
<script src='${process.env.NODE_ENV === 'production' ? assetsManifest['/vendor.js'] : '/vendor.js'}'></script>
<script src='${process.env.NODE_ENV === 'production' ? assetsManifest['/app.js'] : '/app.js'}'></script>
</body>
</html>
`;
};
const renderError = err => {
const softTab = '    ';
const errTrace = process.env.NODE_ENV !== 'production' ?
`:<br><br><pre style="color:red">${softTab}${err.stack.replace(/\n/g, `<br>${softTab}`)}</pre>` : '';
return renderFullPage(`Server Error${errTrace}`, {});
};
// Server Side Rendering based on routes matched by React-router.
app.use((req, res, next) => {
match({ routes, location: req.url }, (err, redirectLocation, renderProps) => {
if (err) {
return res.status(500).end(renderError(err));
}
if (redirectLocation) {
return res.redirect(302, redirectLocation.pathname + redirectLocation.search);
}
if (!renderProps) {
return next();
}
const store = configureStore();
return fetchComponentData(store, renderProps.components, renderProps.params)
.then(() => {
const initialView = renderToString(
<Provider store={store}>
<IntlWrapper>
<RouterContext {...renderProps} />
</IntlWrapper>
</Provider>
);
const finalState = store.getState();
res
.set('Content-Type', 'text/html')
.status(200)
.end(renderFullPage(initialView, finalState));
})
.catch((error) => next(error));
});
});
// start app
app.listen(serverConfig.port, (error) => {
if (!error) {
console.log(`MERN is running on port: ${serverConfig.port}! Build something amazing!`); // eslint-disable-line
}
});
export default app;<|fim▁end|> |
// MongoDB Connection
mongoose.connect(serverConfig.mongoURL, (error) => {
if (error) { |
<|file_name|>MetaThemeMenuItemUI.java<|end_file_name|><|fim▁begin|>/*
* This file is part of MetaTheme.
* Copyright (c) 2004 Martin Dvorak <jezek2@advel.cz>
*
* MetaTheme is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* MetaTheme is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with MetaTheme; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package metatheme;
import java.awt.*;
import java.awt.event.MouseEvent;
import java.awt.event.MouseAdapter;
import java.awt.event.KeyEvent;
import javax.swing.*;
import javax.swing.plaf.*;
import javax.swing.plaf.basic.BasicMenuItemUI;
import javax.swing.plaf.basic.BasicGraphicsUtils;
public class MetaThemeMenuItemUI extends BasicMenuItemUI implements MT {
private ThemeEngine engine;
private JMenuItem mi;
private int type;
public MetaThemeMenuItemUI(JMenuItem i) {
super();
engine = ThemeEngine.get();
mi = i;
type = MT_MENU_ITEM;
if (mi instanceof JCheckBoxMenuItem) {
type = MT_MENU_ITEM_CHECK;
}
else if (mi instanceof JRadioButtonMenuItem) {
type = MT_MENU_ITEM_RADIO;
}
else if (mi instanceof JMenu) {
type = MT_MENU_ITEM_ARROW;
}
}
public static ComponentUI createUI(JComponent c) {
return new MetaThemeMenuItemUI((JMenuItem)c);
}
protected void paintMenuItem(Graphics g, JComponent c,
Icon checkIcon, Icon arrowIcon,
Color background, Color foreground,
int defaultTextIconGap) {
JMenuItem menuItem = (JMenuItem)c;
ButtonModel model = menuItem.getModel();
int x,y;
int maxpmw = 16;
Font font = menuItem.getFont();
FontMetrics fm = menuItem.getFontMetrics(font);
int state = Utils.getState(menuItem);
paintBackground(g, menuItem, null);
Icon icon = menuItem.getIcon();
if (icon != null) {
if (icon.getIconWidth() > maxpmw) {
maxpmw = icon.getIconWidth();<|fim▁hole|> }
if ((state & MT_DISABLED) != 0) {
icon = (Icon)menuItem.getDisabledIcon();
}
else if (model.isPressed() && model.isArmed()) {
icon = (Icon)menuItem.getPressedIcon();
if (icon == null) {
icon = (Icon)menuItem.getIcon();
}
} else {
icon = (Icon)menuItem.getIcon();
}
}
if (type == MT_MENU_ITEM_CHECK || type == MT_MENU_ITEM_RADIO) {
if ((state & MT_ACTIVE) != 0) {
x = engine.metricSize[MT_MENU_BORDER].width;
y = engine.metricSize[MT_MENU_BORDER].height;
engine.drawWidget(g, type, state, x, y, maxpmw, menuItem.getHeight() - 2*y, c);
}
}
else {
if (icon != null) {
y = (menuItem.getHeight() - icon.getIconHeight() - 1) / 2;
icon.paintIcon(menuItem, g, engine.metricSize[MT_MENU_ITEM_BORDER].width, y);
}
}
String text = menuItem.getText();
if (text != null) {
g.setFont(font);
paintText(g, menuItem, new Rectangle(0, 0, menuItem.getWidth(), menuItem.getHeight()), text);
}
KeyStroke accelerator = menuItem.getAccelerator();
String acceleratorText = "";
if (accelerator != null) {
int modifiers = accelerator.getModifiers();
if (modifiers > 0) {
acceleratorText = KeyEvent.getKeyModifiersText(modifiers);
acceleratorText += /*acceleratorDelimiter*/"-";
}
int keyCode = accelerator.getKeyCode();
if (keyCode != 0) {
acceleratorText += KeyEvent.getKeyText(keyCode);
} else {
acceleratorText += accelerator.getKeyChar();
}
}
if (acceleratorText != null && !acceleratorText.equals("")) {
x = menuItem.getWidth() - engine.metricSize[MT_MENU_ITEM_BORDER].width - 3;
y = (menuItem.getHeight() - fm.getHeight() + 1) / 2;
x -= SwingUtilities.computeStringWidth(fm, acceleratorText);
g.setFont(font);
engine.drawString(g, MT_MENU_ITEM, state, x, y, acceleratorText, engine.palette[Utils.getTextColor(state, MT_FOREGROUND, true)], -1);
}
if (type == MT_MENU_ITEM_ARROW) {
x = menuItem.getWidth() - 1 - engine.metricSize[MT_MENU_ITEM_BORDER].width - 8;
y = engine.metricSize[MT_MENU_ITEM_BORDER].height;
engine.drawWidget(g, MT_MENU_ITEM_ARROW, state, x, y, 8, menuItem.getHeight() - 2*y, c);
}
}
protected void paintBackground(Graphics g, JMenuItem menuItem, Color bgColor) {
Dimension size = menuItem.getSize();
int state = Utils.getState(menuItem);
Color oldColor = g.getColor();
engine.drawWidget(g, MT_MENU_ITEM, state, 0, 0, size.width, size.height, menuItem);
g.setColor(oldColor);
}
protected void paintText(Graphics g, JMenuItem menuItem, Rectangle textRect, String text) {
FontMetrics fm = g.getFontMetrics();
int mnemIndex = menuItem.getDisplayedMnemonicIndex();
int state = Utils.getState(menuItem);
Rectangle tr = new Rectangle(textRect);
int maxpmw = 16;
Icon icon = menuItem.getIcon();
if (icon != null && icon.getIconWidth() > maxpmw) maxpmw = icon.getIconWidth();
tr.x += engine.metricSize[MT_MENU_ITEM_BORDER].width + maxpmw + 3;
tr.y += (tr.height - fm.getHeight() + 1) / 2;
int sc = MT_FOREGROUND;
if ((state & MT_SELECTED) != 0) sc = MT_SELECTED_FOREGROUND;
engine.drawString(g, MT_MENU_ITEM, state, tr.x, tr.y, text, engine.palette[Utils.getTextColor(state, MT_FOREGROUND, true)], mnemIndex);
}
protected Dimension getPreferredMenuItemSize(JComponent c, Icon checkIcon, Icon arrowIcon, int defaultTextIconGap) {
Dimension d = super.getPreferredMenuItemSize(c, checkIcon, arrowIcon, defaultTextIconGap);
if (d.height <= 18) d.height -= 3;
d.width += engine.metricSize[MT_MENU_ITEM_BORDER].width * 2;
d.height += engine.metricSize[MT_MENU_ITEM_BORDER].height * 2;
if (d.height < 18) d.height = 18;
return d;
}
}<|fim▁end|> | |
<|file_name|>test_03b_subcmd_primer3.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""test_03b_subcmd_primer3.py
Test primer3 subcommand for pdp script. These tests require primer3 v2+.
This test suite is intended to be run from the repository root using:
pytest -v
(c) The James Hutton Institute 2017-2019
Author: Leighton Pritchard
Contact:
leighton.pritchard@hutton.ac.uk
Leighton Pritchard,
Information and Computing Sciences,
James Hutton Institute,
Errol Road,<|fim▁hole|>Dundee,
DD2 5DA,
Scotland,
UK
The MIT License
Copyright (c) 2017-2019 The James Hutton Institute
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import logging
import os
import shutil
from argparse import Namespace
import pytest
from diagnostic_primers.scripts import subcommands
from tools import PDPTestCase, get_primer3_version, modify_namespace
# Defined as global so it can be seen by the TestPrimer3Subcommand() class
# and setUpClass() classmethod.
OUTDIR = os.path.join("tests", "test_output", "pdp_primer3")
# Available primer3 version as global so that pytest.skipif() can see it
PRIMER3_VERSION = get_primer3_version()
class TestPrimer3Subcommand(PDPTestCase):
"""Class defining tests of the pdp primer3 subcommand."""
@classmethod
def setUpClass(TestPrimer3Subcommand):
# Clean up old output directory
if os.path.isdir(OUTDIR):
shutil.rmtree(OUTDIR)
def setUp(self):
"""Set parameters for tests."""
self.confdir = os.path.join("tests", "test_input", "pdp_primer3")
self.outdir = OUTDIR
self.targetdir = os.path.join("tests", "test_targets", "pdp_primer3")
self.p3_exe = "primer3_core"
self.scheduler = "multiprocessing"
self.workers = None
# null logger instance that does nothing
self.logger = logging.getLogger("TestPrimer3Subcommand logger")
self.logger.addHandler(logging.NullHandler())
# path to thermodynamic parameters (needed for Travis/testing)
self.therm_param_path = os.path.join(
"tests", "test_input", "primer3", "primer3_config"
)
# base Namespace
self.base_namespace = Namespace(
primer3_dir=self.outdir,
primer3_exe=self.p3_exe,
primer3_force=True,
scheduler=self.scheduler,
workers=4,
verbose=True,
p3_hybridprobe=False,
p3_filter=False,
disable_tqdm=True,
p3_param_path=self.therm_param_path,
p3_numreturn=10,
p3_osize=20,
p3_minsize=18,
p3_maxsize=22,
p3_wt_lt=2,
p3_wt_gt=2,
p3_opttm=59,
p3_mintm=58,
p3_maxtm=60,
p3_ogcpercent=55,
p3_mingc=30,
p3_maxgc=80,
p3_psizeopt=100,
p3_psizemin=50,
p3_psizemax=150,
p3_maxpolyx=3,
p3_osizeopt=20,
p3_ominsize=13,
p3_omaxsize=30,
p3_otmopt=69,
p3_otmmin=68,
p3_otmmax=70,
p3_ogcopt=55,
p3_ogcmin=30,
p3_ogcmax=80,
recovery=False,
)
@pytest.mark.skipif(PRIMER3_VERSION[0] < 2, reason="requires primer3 v2+")
def test_primer3_01_run(self):
"""primer3 subcommand recapitulates primer design for small input set.
pdp primer3 -v \
--outdir=tests/test_output/pdp_primer3/subset \
tests/test_input/pdp_primer3/subsetconf.json \
tests/test_output/pdp_primer3/subsetep3conf.json
"""
subcommands.subcmd_primer3(
modify_namespace(
self.base_namespace,
{
"infilename": os.path.join(self.confdir, "subsetconf.json"),
"outfilename": os.path.join(self.outdir, "subsetep3conf.json"),
"primer3_dir": os.path.join(self.outdir, "subset"),
},
),
self.logger,
)
# Check file contents
self.assertDirsEqual(
os.path.join(self.outdir, "subset"), os.path.join(self.targetdir, "subset")
)
@pytest.mark.skipif(PRIMER3_VERSION[0] < 2, reason="requires primer3 v2+")
def test_primer3_02_force(self):
"""primer3 subcommand executes and overwrites existing output.
This is the same test as test_primer3_01_run:
pdp primer3 -v -f \
--outdir=tests/test_output/pdp_primer3/subset \
tests/test_input/pdp_primer3/subsetconf.json \
tests/test_output/pdp_primer3/subsetep3conf.json
"""
self.test_primer3_01_run()
@pytest.mark.skipif(PRIMER3_VERSION[0] < 2, reason="requires primer3 v2+")
def test_primer3_03_noforce(self):
"""Script exits when not forcing primer3 output overwrite of existing output.
pdp primer3 -v \
--outdir=tests/test_output/pdp_primer3/subset \
tests/test_input/pdp_primer3/subsetconf.json \
tests/test_output/pdp_primer3/subsetep3conf.json
"""
with pytest.raises(SystemExit):
subcommands.subcmd_primer3(
modify_namespace(
self.base_namespace,
{
"infilename": os.path.join(self.confdir, "subsetconf.json"),
"outfilename": os.path.join(self.outdir, "subsetep3conf.json"),
"primer3_dir": os.path.join(self.outdir, "subset"),
"primer3_force": False,
},
),
self.logger,
)
@pytest.mark.skipif(PRIMER3_VERSION[0] < 2, reason="requires primer3 v2+")
def test_invalid_conf_file(self):
"""Script exits when primer3 config file has wrong suffix.
pdp primer3 -v \
--outdir=tests/test_output/pdp_primer3/subset \
tests/test_input/pdp_primer3/testprodigalconf.nojson \
tests/test_output/pdp_primer3/ep3conf.json
"""
with pytest.raises(SystemExit):
subcommands.subcmd_primer3(
modify_namespace(
self.base_namespace,
{
"infilename": os.path.join(
self.confdir, "testprodigalconf.nojson"
),
"outfilename": os.path.join(self.outdir, "ep3conf.json"),
},
),
self.logger,
)
@pytest.mark.skipif(PRIMER3_VERSION[0] < 2, reason="requires primer3 v2+")
def test_tsv_conf_file(self):
"""Error raised when .conf file provided for primer3.
pdp primer3 -v \
--outdir=tests/test_output/pdp_primer3/subset \
tests/test_input/pdp_primer3/testin.conf \
tests/test_output/pdp_primer3/ep3conf.json
"""
with pytest.raises(ValueError):
subcommands.subcmd_primer3(
modify_namespace(
self.base_namespace,
{
"infilename": os.path.join(self.confdir, "testin.conf"),
"outfilename": os.path.join(self.outdir, "ep3conf.json"),
},
),
self.logger,
)<|fim▁end|> | Invergowrie, |
<|file_name|>merge_sort.py<|end_file_name|><|fim▁begin|>def merge(a, b):
"""
inuput: two sorted lists
output: a merged sorted list
for example:
merge([2,3], [1,4])
--> [1,2,3,4]
"""
merged = []
while a or b:
if a and b:
if a[0] < b[0]:
merged.append(a.pop(0))
else:
merged.append(b.pop(0))
else:
merged += a + b
break
return merged
<|fim▁hole|>def merge_sort(one_list):
# divide
if len(one_list) == 1:
return one_list
middle = int(len(one_list)/2)
left = merge_sort(one_list[:middle])
right = merge_sort(one_list[middle:])
# conquer
return merge(left, right)<|fim▁end|> | |
<|file_name|>SocialIcon.js<|end_file_name|><|fim▁begin|>import React, { Component, PropTypes } from 'react';
import { Image } from 'react-bootstrap';
require('./styles.scss');
class SocialBar extends Component {
constructor(props) {
super(props);
}<|fim▁hole|> const { icon, url } = this.props;
return (
<a href={url} target="_blank">
<Image
src={icon}
className="profile-header-social-icon"
/>
</a>
);
}
}
SocialBar.propTypes = {
children: PropTypes.any,
icon: PropTypes.any.isRequired,
url: PropTypes.string.isRequired,
};
export default SocialBar;<|fim▁end|> |
render() { |
<|file_name|>mp3.py<|end_file_name|><|fim▁begin|># PyMM - Python MP3 Manager
# Copyright (C) 2000 Pierre Hjalm <pierre.hjalm@dis.uu.se>
#
# Modified by Alexander Kanavin <ak@sensi.org>
# Removed ID tags support and added VBR support
# Used http://home.swipnet.se/grd/mp3info/ for information
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
""" mp3.py
Reads information from an mp3 file.
This is a python port of code taken from the mpg123 input module of xmms.
"""
import struct
def header(buf):
return struct.unpack(">I",buf)[0]
def head_check(head):
if ((head & 0xffe00000L) != 0xffe00000L):
return 0
if (not ((head >> 17) & 3)):
return 0
if (((head >> 12) & 0xf) == 0xf):
return 0
if ( not ((head >> 12) & 0xf)):
return 0
if (((head >> 10) & 0x3) == 0x3):
return 0
if (((head >> 19) & 1) == 1 and ((head >> 17) & 3) == 3 and ((head >> 16) & 1) == 1):
return 0
if ((head & 0xffff0000L) == 0xfffe0000L):
return 0
return 1
def filesize(file):
""" Returns the size of file sans any ID3 tag
"""
f=open(file)
f.seek(0,2)
size=f.tell()
try:
f.seek(-128,2)
except:
f.close()
return 0
buf=f.read(3)
f.close()
if buf=="TAG":
size=size-128
if size<0:
return 0
else:
return size
table=[[
[0, 32, 64, 96, 128, 160, 192, 224, 256, 288, 320, 352, 384, 416, 448],
[0, 32, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 384],
[0, 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320]],
[
[0, 32, 48, 56, 64, 80, 96, 112, 128, 144, 160, 176, 192, 224, 256],
[0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160],
[0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160]]]
def decode_header(head):
""" Decode the mp3 header and put the information in a frame structure
"""
freqs=[44100, 48000, 32000, 22050, 24000, 16000, 11025, 12000, 8000]
fr={}
if head & (1 << 20):
if head & (1 << 19):
fr["lsf"]=0
else:
fr["lsf"]=1
fr["mpeg25"] = 0
else:
fr["lsf"] = 1
fr["mpeg25"] = 1
fr["lay"] = 4 - ((head >> 17) & 3)
if fr["mpeg25"]:
fr["sampling_frequency"] = freqs[6 + ((head >> 10) & 0x3)]
else:
fr["sampling_frequency"] = freqs[((head >> 10) & 0x3) + (fr["lsf"] * 3)]
fr["error_protection"] = ((head >> 16) & 0x1) ^ 0x1
fr["bitrate_index"] = ((head >> 12) & 0xf)
fr["bitrate"]=table[fr["lsf"]][fr["lay"]-1][fr["bitrate_index"]]
fr["padding"]=((head>>9) & 0x1)
fr["channel_mode"]=((head>>6) & 0x3)
if fr["lay"]==1:
fr["framesize"]=table[fr["lsf"]][0][fr["bitrate_index"]]*12000
fr["framesize"]=fr["framesize"]/fr["sampling_frequency"]
fr["framesize"]=((fr["framesize"]+fr["padding"])<<2)-4
elif fr["lay"]==2:
fr["framesize"]=table[fr["lsf"]][1][fr["bitrate_index"]]*144000
fr["framesize"]=fr["framesize"]/fr["sampling_frequency"]
fr["framesize"]=fr["framesize"]+fr["padding"]-1
elif fr["lay"]==3:
fr["framesize"]=table[fr["lsf"]][2][fr["bitrate_index"]]*144000
fr["framesize"]=fr["framesize"]/fr["sampling_frequency"]<<fr["lsf"]
fr["framesize"]=fr["framesize"]+fr["padding"]-4
pass
else:
return 0
return fr
def decode_vbr(buf):
vbr = {}
if buf[:4] != "Xing":
return 0
frames_flag = ord(buf[7]) & 1
if not frames_flag:
return 0
vbr["frames"] = header(buf[8:])
return vbr
def decode_synch_integer(buf):
return (ord(buf[0])<<21)+(ord(buf[1])<<14)+(ord(buf[2])<<7)+ord(buf[3])
def detect_mp3(filename):
""" Determines whether this is an mp3 file and if so reads information
from it.
"""
try:
f=open(filename,"rb")
except:
return 0
try:
tmp=f.read(4)
except:
f.close()
return 0
if tmp[:3] == 'ID3':
try:
tmp = f.read(6)
f.seek(decode_synch_integer(tmp[2:])+10)
tmp=f.read(4)
except:
f.close()
return 0
try:
head=header(tmp)
except:
return 0
while not head_check(head):
# This is a real time waster, but an mp3 stream can start anywhere
# in a file so we have to search the entire file which can take a
# while for large non-mp3 files.
try:
buf=f.read(1024)
except:
f.close()
return 0
if buf=="":
f.close()
return 0
for i in range(0,len(buf)-1):
head=long(head)<<8;
head=head|ord(buf[i])
if head_check(head):
f.seek(i+1-len(buf),1)
break
mhead=decode_header(head)
if mhead:
# Decode VBR header if there's any.
pos = f.tell()
mhead["vbr"] = 0
if not mhead["lsf"]:
if mhead["channel_mode"] == 3:
vbrpos = 17
else:
vbrpos = 32
else:
if mhead["channel_mode"] == 3:
vbrpos = 9
else:
vbrpos = 17
try:
f.seek(vbrpos,1)
vbr = decode_vbr(f.read(12))
mhead["vbrframes"] = vbr["frames"]
if mhead["vbrframes"] >0:
mhead["vbr"] = 1
except:
pass
# We found something which looks like a MPEG-header
# We check the next frame too, to be sure
if f.seek(pos+mhead["framesize"]):
f.close()
return 0
try:
tmp=f.read(4)
except:
f.close()
return 0
if len(tmp)!=4:
f.close()
return 0
htmp=header(tmp)
if not (head_check(htmp) and decode_header(htmp)):<|fim▁hole|>
# If we have found a valid mp3 add some more info the head data.
if mhead:
mhead["filesize"]=filesize(filename)
if not mhead["vbr"]:
if mhead["bitrate"] and mhead["filesize"]:
mhead["time"]=int(float(mhead["filesize"])/(mhead["bitrate"]*1000)*8)
else:
mhead["time"]=0
else:
if mhead["filesize"] and mhead["sampling_frequency"]:
medframesize = float(mhead["filesize"])/float(mhead["vbrframes"])
if mhead["lsf"]:
coef = 12
else:
coef = 144
vbrrate = medframesize*mhead["sampling_frequency"]/(1000*coef)
mhead["time"]=int(float(mhead["filesize"])/(vbrrate*1000)*8)
mhead["vbrrate"] = int(vbrrate)
return mhead
else:
return 0
if __name__=="__main__":
import sys
mp3info=detect_mp3(sys.argv[1])
if mp3info:
print mp3info
else:
print "Not an mp3 file."<|fim▁end|> | f.close()
return 0
f.close() |
<|file_name|>instagram.js<|end_file_name|><|fim▁begin|>var ig = {};
// !!! USE YOUR OWN TOKEN
ig.token = '43619676.1677ed0.5ca7163640fc4a7f89ca21dc02475134';
ig.init = function() {
$('.instagram').each(function(i) {
var args = {};
args.container = $(this);
args.userid = args.container.data('userid');<|fim▁hole|> args.html = '';
// PASS ARGS TO QUERY
ig.query(args);
});
}
ig.query = function(args) {
$.getJSON(args.feedurl, {}, function(data) {
// PASS QUERY DATA TO BUILDER
ig.build(data, args);
});
}
ig.build = function(data, args) {
$.each(data.data,function (i,item) {
console.log(item);
if (item.caption) var caption = item.caption.text;
var thumb = item.images.low_resolution.url;
var img = item.images.standard_resolution.url;
//get 1280 size photo [hack until avail in api]
var hires = img.replace('s640x640', '1080x1080');
args.html += '<a class="image" style="background-image: url('+thumb+');" data-img="'+hires+'">';
if (caption) args.html += '<span class="caption">'+caption+'</span>';
args.html += '</a>';
// PASS TO OUTPUT
ig.output(args);
});
}
ig.output = function(args) {
args.container.html(args.html);
}
ig.view = {
viewer: $('.igviewer'),
image: $('.igviewer img'),
open: function(img) {
ig.view.viewer.removeClass('hidden');
ig.view.image.attr('src', img);
},
close: function() {
ig.view.viewer.addClass('hidden');
ig.view.image.attr('src', '');
}
}
ig.init();
//Listeners
$('.instagram').on('click', '.image', function(){
var img = this.dataset.img;
ig.view.open(img);
});
$('.igviewer').on('click', function(){
ig.view.close();
});<|fim▁end|> | args.limit = args.container.data('limit');
args.feedurl = 'https://api.instagram.com/v1/users/'+args.userid+'/media/recent/?access_token='+ig.token+'&count='+args.limit+'&callback=?'; |
<|file_name|>SliderOffsets.ts<|end_file_name|><|fim▁begin|>import { ClientRect } from '@ephox/dom-globals';
import { AlloyComponent } from '../../api/component/ComponentApi';
const top = 'top',
right = 'right',
bottom = 'bottom',
left = 'left',
width = 'width',
height = 'height';
// Screen offsets from bounding client rect
const getBounds = (component: AlloyComponent): ClientRect => component.element().dom().getBoundingClientRect();
const getBoundsProperty = (bounds: ClientRect, property: string): number => bounds[property];
const getMinXBounds = (component: AlloyComponent): number => {
const bounds = getBounds(component);
return getBoundsProperty(bounds, left);<|fim▁hole|> return getBoundsProperty(bounds, right);
};
const getMinYBounds = (component: AlloyComponent): number => {
const bounds = getBounds(component);
return getBoundsProperty(bounds, top);
};
const getMaxYBounds = (component: AlloyComponent): number => {
const bounds = getBounds(component);
return getBoundsProperty(bounds, bottom);
};
const getXScreenRange = (component: AlloyComponent): number => {
const bounds = getBounds(component);
return getBoundsProperty(bounds, width);
};
const getYScreenRange = (component: AlloyComponent): number => {
const bounds = getBounds(component);
return getBoundsProperty(bounds, height);
};
const getCenterOffsetOf = (componentMinEdge: number, componentMaxEdge: number, spectrumMinEdge: number): number =>
(componentMinEdge + componentMaxEdge) / 2 - spectrumMinEdge;
const getXCenterOffSetOf = (component: AlloyComponent, spectrum: AlloyComponent): number => {
const componentBounds = getBounds(component);
const spectrumBounds = getBounds(spectrum);
const componentMinEdge = getBoundsProperty(componentBounds, left);
const componentMaxEdge = getBoundsProperty(componentBounds, right);
const spectrumMinEdge = getBoundsProperty(spectrumBounds, left);
return getCenterOffsetOf(componentMinEdge, componentMaxEdge, spectrumMinEdge);
};
const getYCenterOffSetOf = (component: AlloyComponent, spectrum: AlloyComponent): number => {
const componentBounds = getBounds(component);
const spectrumBounds = getBounds(spectrum);
const componentMinEdge = getBoundsProperty(componentBounds, top);
const componentMaxEdge = getBoundsProperty(componentBounds, bottom);
const spectrumMinEdge = getBoundsProperty(spectrumBounds, top);
return getCenterOffsetOf(componentMinEdge, componentMaxEdge, spectrumMinEdge);
};
export {
getMinXBounds,
getMaxXBounds,
getMinYBounds,
getMaxYBounds,
getXScreenRange,
getYScreenRange,
getXCenterOffSetOf,
getYCenterOffSetOf
};<|fim▁end|> | };
const getMaxXBounds = (component: AlloyComponent): number => {
const bounds = getBounds(component); |
<|file_name|>distance-service.js<|end_file_name|><|fim▁begin|>var Service = require("./../service.js");
function DistanceService(deviceId, serviceId) {
var distanceService = {}
, _characteristics = {
'distance' : 1
, 'unit' : 2
}
, _requests = {
'read-distance' : function () {
return {
packetType : 'read'
, packetData :[_characteristics['distance']]
}
}
, 'read-unit' : function () {
return {
packetType : 'read'
, packetData :[_characteristics['unit']]
}
}
, 'read-distance-with-unit' : function () {
return {
packetType : 'read'
, packetData :[_characteristics['distance'], _characteristics['unit']]
}
}
, 'set-unit' : function (unit) {
return {
packetType : 'write'
, packetData :[
{id : _characteristics['unit'], data : new Buffer(unit)}
]
}
}
}
, _responses = {};
_responses[_characteristics['distance']] = function (distanceBufer) {
var distance = distanceBufer.readUInt8(0, true);
return {
response : 'distance'
, data : "" + (distance === undefined ? 0 : distance)
};
};
_responses[_characteristics['unit']] = function (unitBuffer) {
return {
response : 'unit'
, data : unitBuffer.toString()
};
};
distanceService.__proto__ = Service(
deviceId
, serviceId
, _requests
, _responses
);
return distanceService;
}
module.exports = DistanceService;
(function(){
var assert = require("assert");
var ResponsePacket = require("../device-packets/response-packet");
var serviceId = 3
, distanceService = DistanceService("my-device-id", serviceId);
(function(){
console.log("Should process read distance json message.");
assert.deepEqual(
distanceService.processRequest(JSON.stringify(
{request: 'read-distance'}
)),
new Buffer([1, 1, 0, 0, 0, serviceId, 1, 1])
);
})();
(function(){
console.log("Should process read unit json message.");
assert.deepEqual(
distanceService.processRequest(JSON.stringify(
{request: 'read-unit'}
)),
new Buffer([1, 1, 0, 0, 0, serviceId, 1, 2])
);
})();<|fim▁hole|> (function(){
console.log("Should process read distance with unit json message.");
assert.deepEqual(
distanceService.processRequest(JSON.stringify(
{request: 'read-distance-with-unit'}
)),
new Buffer([1, 1, 0, 0, 0, serviceId, 2, 1, 2])
);
})();
(function(){
console.log("Should process set unit json message.");
var unit = "cm"
, unitBuffer = new Buffer(unit);
assert.deepEqual(
distanceService.processRequest(JSON.stringify(
{request: 'set-unit', data: unit}
)),
Buffer.concat([
new Buffer([1, 2, 0, 0, 0, serviceId, 1, 2, unitBuffer.length])
, unitBuffer
])
);
})();
(function(){
console.log("Should process distance response packet.");
assert.deepEqual(
distanceService.processResponse(ResponsePacket(new Buffer([
1, 4, 0, 0, 0, serviceId, 1, 1, 1, 23
]))),
[{response: 'distance', data: 23}]
);
})();
(function(){
console.log("Should process unit response packet.");
var unit = "cm"
, unitBuffer = new Buffer(unit);
assert.deepEqual(
distanceService.processResponse(ResponsePacket(Buffer.concat([
new Buffer([1, 4, 0, 0, 0, serviceId, 1, 2, unitBuffer.length])
, unitBuffer
]))),
[{response: 'unit', data: 'cm'}]
);
})();
(function(){
console.log("Should process distance with unit response packet.");
var unit = "cm"
, unitBuffer = new Buffer(unit);
assert.deepEqual(
distanceService.processResponse(ResponsePacket(Buffer.concat([
new Buffer([1, 4, 0, 0, 0, serviceId, 2, 1, 1, 54, 2, unitBuffer.length])
, unitBuffer
]))),
[
{response: 'distance', data: '54'}
, {response: 'unit', data: 'cm'}
]
);
})();
})(this);<|fim▁end|> | |
<|file_name|>as_ref.rs<|end_file_name|><|fim▁begin|>#![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
use core::convert::AsRef;
// pub trait FixedSizeArray<T> {
// /// Converts the array to immutable slice
// fn as_slice(&self) -> &[T];
// /// Converts the array to mutable slice
// fn as_mut_slice(&mut self) -> &mut [T];
// }
// macro_rules! array_impls {
// ($($N:expr)+) => {
// $(
// #[unstable(feature = "core")]
// impl<T> FixedSizeArray<T> for [T; $N] {
// #[inline]
// fn as_slice(&self) -> &[T] {
// &self[..]
// }
// #[inline]
// fn as_mut_slice(&mut self) -> &mut [T] {
// &mut self[..]
// }
// }
//
// #[unstable(feature = "array_as_ref",
// reason = "should ideally be implemented for all fixed-sized arrays")]
// impl<T> AsRef<[T]> for [T; $N] {
// #[inline]
// fn as_ref(&self) -> &[T] {
// &self[..]
// }
// }
//
// #[unstable(feature = "array_as_ref",
// reason = "should ideally be implemented for all fixed-sized arrays")]
// impl<T> AsMut<[T]> for [T; $N] {
// #[inline]
// fn as_mut(&mut self) -> &mut [T] {
// &mut self[..]
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T:Copy> Clone for [T; $N] {
// fn clone(&self) -> [T; $N] {
// *self
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T: Hash> Hash for [T; $N] {
// fn hash<H: hash::Hasher>(&self, state: &mut H) {
// Hash::hash(&self[..], state)
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T: fmt::Debug> fmt::Debug for [T; $N] {
// fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// fmt::Debug::fmt(&&self[..], f)
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<'a, T> IntoIterator for &'a [T; $N] {
// type Item = &'a T;
// type IntoIter = Iter<'a, T>;
//
// fn into_iter(self) -> Iter<'a, T> {
// self.iter()
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<'a, T> IntoIterator for &'a mut [T; $N] {
// type Item = &'a mut T;<|fim▁hole|> // self.iter_mut()
// }
// }
//
// // NOTE: some less important impls are omitted to reduce code bloat
// __impl_slice_eq1! { [A; $N], [B; $N] }
// __impl_slice_eq2! { [A; $N], [B] }
// __impl_slice_eq2! { [A; $N], &'b [B] }
// __impl_slice_eq2! { [A; $N], &'b mut [B] }
// // __impl_slice_eq2! { [A; $N], &'b [B; $N] }
// // __impl_slice_eq2! { [A; $N], &'b mut [B; $N] }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T:Eq> Eq for [T; $N] { }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T:PartialOrd> PartialOrd for [T; $N] {
// #[inline]
// fn partial_cmp(&self, other: &[T; $N]) -> Option<Ordering> {
// PartialOrd::partial_cmp(&&self[..], &&other[..])
// }
// #[inline]
// fn lt(&self, other: &[T; $N]) -> bool {
// PartialOrd::lt(&&self[..], &&other[..])
// }
// #[inline]
// fn le(&self, other: &[T; $N]) -> bool {
// PartialOrd::le(&&self[..], &&other[..])
// }
// #[inline]
// fn ge(&self, other: &[T; $N]) -> bool {
// PartialOrd::ge(&&self[..], &&other[..])
// }
// #[inline]
// fn gt(&self, other: &[T; $N]) -> bool {
// PartialOrd::gt(&&self[..], &&other[..])
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T:Ord> Ord for [T; $N] {
// #[inline]
// fn cmp(&self, other: &[T; $N]) -> Ordering {
// Ord::cmp(&&self[..], &&other[..])
// }
// }
// )+
// }
// }
// array_impls! {
// 0 1 2 3 4 5 6 7 8 9
// 10 11 12 13 14 15 16 17 18 19
// 20 21 22 23 24 25 26 27 28 29
// 30 31 32
// }
type T = i32;
#[test]
fn as_ref_test1() {
let array: [T; 18] = [
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 16, 17
];
let as_ref: &[T] = array.as_ref();
for i in 0..18 {
assert_eq!(array[i], as_ref[i]);
}
}
}<|fim▁end|> | // type IntoIter = IterMut<'a, T>;
//
// fn into_iter(self) -> IterMut<'a, T> { |
<|file_name|>SchemaFormContentFileService.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2017 OBiBa. All rights reserved.
*
* This program and the accompanying materials
* are made available under the terms of the GNU Public License v3.0.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.obiba.mica.core.service;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.stream.Collectors;
import javax.inject.Inject;
import javax.validation.constraints.NotNull;
import net.minidev.json.JSONArray;
import org.obiba.mica.core.domain.SchemaFormContentAware;
import org.obiba.mica.file.FileStoreService;
import org.springframework.stereotype.Service;
import org.springframework.util.Assert;
import com.google.common.collect.Sets;
import com.jayway.jsonpath.DocumentContext;
import com.jayway.jsonpath.JsonPath;
import com.jayway.jsonpath.Option;
import com.jayway.jsonpath.PathNotFoundException;
import com.jayway.jsonpath.internal.JsonReader;
import static com.jayway.jsonpath.Configuration.defaultConfiguration;
@Service
public class SchemaFormContentFileService {
@Inject
private FileStoreService fileStoreService;
public void save(@NotNull SchemaFormContentAware newEntity, SchemaFormContentAware oldEntity, String entityPath) {
Assert.notNull(newEntity, "New content cannot be null");
Object json = defaultConfiguration().jsonProvider().parse(newEntity.getContent());
DocumentContext newContext = JsonPath.using(defaultConfiguration().addOptions(Option.AS_PATH_LIST)).parse(json);
Map<String, JSONArray> newPaths = getPathFilesMap(newContext, json);
if (newPaths == null) return; // content does not have any file field
if (oldEntity != null) {
Object oldJson = defaultConfiguration().jsonProvider().parse(oldEntity.getContent());
DocumentContext oldContext = JsonPath.using(defaultConfiguration().addOptions(Option.AS_PATH_LIST)).parse(oldJson);
Map<String, JSONArray> oldPaths = getPathFilesMap(oldContext, oldJson);
if (oldPaths != null) {
saveAndDelete(oldPaths, newPaths, entityPath);<|fim▁hole|> } else {
// schema and definition now have files
newPaths.values().forEach(v -> saveFiles(v, entityPath));
}
} else {
newPaths.values().forEach(v -> saveFiles(v, entityPath));
}
cleanup(newPaths, newContext);
newEntity.setContent(newContext.jsonString());
}
public void deleteFiles(SchemaFormContentAware entity) {
Object json = defaultConfiguration().jsonProvider().parse(entity.getContent());
DocumentContext context = JsonPath.using(defaultConfiguration().addOptions(Option.AS_PATH_LIST)).parse(json);
DocumentContext reader =
new JsonReader(defaultConfiguration().addOptions(Option.REQUIRE_PROPERTIES)).parse(json);
try {
((JSONArray)context.read("$..obibaFiles")).stream()
.map(p -> (JSONArray) reader.read(p.toString()))
.flatMap(Collection::stream)
.forEach(file -> fileStoreService.delete(((LinkedHashMap)file).get("id").toString()));
} catch(PathNotFoundException e) {
}
}
/**
* Removes the fields with empty obibaFiles from content.
*
* @param newPaths
* @param newContext
*/
private void cleanup(Map<String, JSONArray> newPaths, DocumentContext newContext) {
newPaths.keySet().forEach(p -> {
if (newPaths.get(p).isEmpty()) {
newContext.delete(p.replace("['obibaFiles']", ""));
}
});
}
private void saveAndDelete(Map<String, JSONArray> oldPaths, Map<String, JSONArray> newPaths, String entityPath) {
newPaths.keySet().forEach(p -> {
if (oldPaths.containsKey(p)) {
saveAndDeleteFiles(oldPaths.get(p), newPaths.get(p), entityPath);
} else {
saveFiles(newPaths.get(p), entityPath);
}
});
}
private Map<String, JSONArray> getPathFilesMap(DocumentContext context, Object json) {
DocumentContext reader =
new JsonReader(defaultConfiguration().addOptions(Option.REQUIRE_PROPERTIES)).parse(json);
JSONArray paths = null;
try {
paths = context.read("$..obibaFiles");
} catch(PathNotFoundException e) {
return null;
}
return paths.stream().collect(Collectors.toMap(Object::toString, p -> (JSONArray) reader.read(p.toString())));
}
private Iterable<Object> saveAndDeleteFiles(JSONArray oldFiles, JSONArray newFiles, String entityPath) {
cleanFileJsonArrays(oldFiles, newFiles);
Iterable<Object> toDelete = Sets.difference(Sets.newHashSet(oldFiles), Sets.newHashSet(newFiles));
Iterable<Object> toSave = Sets.difference(Sets.newHashSet(newFiles), Sets.newHashSet(oldFiles));
toDelete.forEach(file -> fileStoreService.delete(((LinkedHashMap)file).get("id").toString()));
saveFiles(toSave, entityPath);
return toDelete;
}
private void cleanFileJsonArrays(JSONArray... arrays) {
if (arrays != null) {
Arrays.stream(arrays).forEach(s -> s.forEach(a -> {
if (a instanceof LinkedHashMap) {
LinkedHashMap<String, String> jsonMap = (LinkedHashMap<String, String>) a;
jsonMap.keySet().stream().filter(k -> k.contains("$")).collect(Collectors.toList()).forEach(jsonMap::remove);
}
}));
}
}
private void saveFiles(Iterable files, String entityPath) {
if(files != null) files.forEach(file -> {
LinkedHashMap map = (LinkedHashMap)file;
map.put("path", entityPath);
fileStoreService.save(map.get("id").toString());
});
}
}<|fim▁end|> | |
<|file_name|>action.py<|end_file_name|><|fim▁begin|>"""Provides a class for managing BIG-IP L7 Rule Action resources."""
# coding=utf-8
#
# Copyright (c) 2017-2021 F5 Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
from f5_cccl.resource import Resource
LOGGER = logging.getLogger(__name__)
class Action(Resource):
"""L7 Rule Action class."""
# The property names class attribute defines the names of the
# properties that we wish to compare.
properties = dict(
expression=None,
forward=False,
location=None,
pool=None,
redirect=False,
request=True,
reset=False,
setVariable=False,
tcl=False,
tmName=None,
httpHost=False,
httpUri=False,
path=None,
replace=False,
value=None,
shutdown=True,
select=True,
)
def __init__(self, name, data):
"""Initialize the Action object.
Actions do not have explicit partition attributes, the are
implied by the partition of the rule to which they belong.
"""
super(Action, self).__init__(name, partition=None)
# Actions are Only supported on requests.
self._data['request'] = True
# Is this a forwarding action?
if data.get('forward', False):
self._data['forward'] = True
# Yes, there are two supported forwarding actions:
# forward to pool and reset, these are mutually
# exclusive options.
pool = data.get('pool', None)
reset = data.get('reset', False)
# This allows you to specify an empty node. This is
# what Container Connector does.
select = data.get('select', False)
# This was added in 13.1.0
shutdown = data.get('shutdown', False)
if pool:
self._data['pool'] = pool
elif reset:
self._data['reset'] = reset
elif select:
self._data['select'] = select
elif shutdown:<|fim▁hole|> else:
raise ValueError(
"Unsupported forward action, must be one of reset, "
"forward to pool, select, or shutdown.")
# Is this a redirect action?
elif data.get('redirect', False):
self._data['redirect'] = True
# Yes, set the location and httpReply attribute
self._data['location'] = data.get('location', None)
self._data['httpReply'] = data.get('httpReply', True)
# Is this a setVariable action?
elif data.get('setVariable', False):
self._data['setVariable'] = True
# Set the variable name and the value
self._data['tmName'] = data.get('tmName', None)
self._data['expression'] = data.get('expression', None)
self._data['tcl'] = True
# Is this a replace URI host action?
elif data.get('replace', False) and data.get('httpHost', False):
self._data['replace'] = True
self._data['httpHost'] = True
self._data['value'] = data.get('value', None)
# Is this a replace URI path action?
elif data.get('replace', False) and data.get('httpUri', False) and \
data.get('path', False):
self._data['replace'] = True
self._data['httpUri'] = True
self._data['path'] = data.get('path', None)
self._data['value'] = data.get('value', None)
# Is this a replace URI action?
elif data.get('replace', False) and data.get('httpUri', False):
self._data['replace'] = True
self._data['httpUri'] = True
self._data['value'] = data.get('value', None)
else:
# Only forward, redirect and setVariable are supported.
raise ValueError("Unsupported action, must be one of forward, "
"redirect, setVariable, replace, or reset.")
def __eq__(self, other):
"""Check the equality of the two objects.
Do a straight data to data comparison.
"""
if not isinstance(other, Action):
return False
return super(Action, self).__eq__(other)
def __str__(self):
return str(self._data)
def _uri_path(self, bigip):
"""Return the URI path of an action object.
Not implemented because the current implementation does
not manage Actions individually."""
raise NotImplementedError<|fim▁end|> | self._data['shutdown'] = shutdown |
<|file_name|>simulation_CS3.py<|end_file_name|><|fim▁begin|>import modules.pumpingsystem as ps
import pandas as pd
import numpy as np
# Pump schedule as per SCADA. rows = pumps, columns 1:=Peak, 2:=Standard, 3:Off-peak
pump_schedule_41 = np.array([[72, 42, 50],
[95, 78, 86],
[110, 110, 110],
[120, 120, 120],
[150, 150, 150]])
pump_schedule_31 = np.array([[77, 45, 45],
[92, 70, 60],
[110, 110, 110],
[120, 120, 120]])
pump_schedule_20 = np.array([[72, 47, 55],
[82, 70, 70],
[91, 87, 92],
[110, 110, 110]])
pump_schedule_IPC = np.array([[80, 45, 45],
[85, 70, 60],
[90, 82, 82],
[110, 110, 110],
[150, 150, 150]])
dummy_pump_schedule_surface = np.array([[150, 150, 150]])
# Inflows into dams
dam_inflow_profiles = pd.read_csv('input/CS3_dam_inflow_profiles.csv.gz')
inflow_41 = np.reshape(dam_inflow_profiles['41L Inflow'].values, (24, 2))
inflow_31 = np.reshape(dam_inflow_profiles['31L Inflow'].values, (24, 2))
inflow_20 = np.reshape(dam_inflow_profiles['20L Inflow'].values, (24, 2))
inflow_IPC = np.reshape(dam_inflow_profiles['IPC Inflow'].values, (24, 2))
inflow_surface = np.reshape(dam_inflow_profiles['Surface Inflow'].values, (24, 2))
# Read actual data for initial conditions and validation
actual_values = pd.read_csv('input/CS3_data_for_validation.csv.gz')
actual_status_41 = actual_values['41L Status'].values
actual_status_31 = actual_values['31L Status'].values
actual_status_20 = actual_values['20L Status'].values
actual_status_IPC = actual_values['IPC Status'].values
initial_level_41 = actual_values['41L Level'][0]
initial_level_31 = actual_values['31L Level'][0]
initial_level_20 = actual_values['20L Level'][0]
initial_level_IPC = actual_values['IPC Level'][0]
initial_level_surface = actual_values['Surface Level'][0]
# Create pump system
pump_system = ps.PumpSystem('CS3')
pump_system.add_level(ps.PumpingLevel("41L", 3000000, initial_level_41,
216.8, 3508.4, pump_schedule_41, actual_status_41[0],<|fim▁hole|> n_mode_top_offset=5))
pump_system.add_level(ps.PumpingLevel("31L", 3000000, initial_level_31,
146.8, 3283.6, pump_schedule_31, actual_status_31[0],
inflow_31, fed_to_level="20L", pump_statuses_for_validation=actual_status_31,
n_mode_max_pumps=2, n_mode_max_level=80, n_mode_control_range=20,
n_mode_top_offset=5, n_mode_bottom_offset=5))
pump_system.add_level(ps.PumpingLevel("20L", 3000000, initial_level_20,
171.8, 3821.0, pump_schedule_20, actual_status_20[0],
inflow_20, fed_to_level="IPC", pump_statuses_for_validation=actual_status_20,
n_mode_max_pumps=2, n_mode_control_range=20, n_mode_top_offset=7,
n_mode_bottom_offset=5))
pump_system.add_level(ps.PumpingLevel("IPC", 3000000, initial_level_IPC,
147.4, 3572.8, pump_schedule_IPC, actual_status_IPC[0],
inflow_IPC, fed_to_level="Surface",
pump_statuses_for_validation=actual_status_IPC,
n_mode_max_pumps=2, n_mode_max_level=80, n_mode_control_range=10,
n_mode_top_offset=5, n_mode_bottom_offset=3))
pump_system.add_level(ps.PumpingLevel("Surface", 5000000, initial_level_surface,
0, 0, dummy_pump_schedule_surface, 0, inflow_surface,
pump_statuses_for_validation=actual_status_IPC,
n_mode_max_pumps=0)) # the status data doesn't matter
# Perform simulations
pump_system.perform_simulation(mode='validation', save=True)
pump_system.perform_simulation(mode='1-factor', save=True)
pump_system.perform_simulation(mode='2-factor', save=True)
pump_system.perform_simulation(mode='n-factor', save=True)<|fim▁end|> | inflow_41, fed_to_level="31L", pump_statuses_for_validation=actual_status_41,
n_mode_max_pumps=2, n_mode_max_level=80, n_mode_control_range=30, |
<|file_name|>gallery_generator.py<|end_file_name|><|fim▁begin|>"""
Sphinx plugin to run example scripts and create a gallery page.
Lightly modified from the mpld3 project.
"""
from __future__ import division
import os
import os.path as op
import re
import glob
import token
import tokenize
import shutil
from seaborn.external import six
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from matplotlib import image
if six.PY3:
# Python 3 has no execfile
def execfile(filename, globals=None, locals=None):
with open(filename, "rb") as fp:
six.exec_(compile(fp.read(), filename, 'exec'), globals, locals)
RST_TEMPLATE = """
.. _{sphinx_tag}:
{docstring}
.. image:: {img_file}
**Python source code:** :download:`[download source: {fname}]<{fname}>`
.. raw:: html
<div class="col-md-9">
.. literalinclude:: {fname}
:lines: {end_line}-
.. raw:: html
</div>
"""
INDEX_TEMPLATE = """
.. raw:: html
<style type="text/css">
.figure {{
position: relative;
float: left;
margin: 10px;
width: 180px;
height: 200px;
}}
.figure img {{
position: absolute;
display: inline;
left: 0;
width: 170px;
height: 170px;
opacity:1.0;
filter:alpha(opacity=100); /* For IE8 and earlier */
}}
.figure:hover img {{
-webkit-filter: blur(3px);
-moz-filter: blur(3px);
-o-filter: blur(3px);
-ms-filter: blur(3px);
filter: blur(3px);
opacity:1.0;
filter:alpha(opacity=100); /* For IE8 and earlier */
}}
.figure span {{
position: absolute;
display: inline;
left: 0;
width: 170px;
height: 170px;
background: #000;
color: #fff;
visibility: hidden;
opacity: 0;
z-index: 100;
}}
.figure p {{
position: absolute;
top: 45%;
width: 170px;
font-size: 110%;
}}
.figure:hover span {{
visibility: visible;
opacity: .4;
}}
.caption {{
position: absolue;
width: 180px;
top: 170px;
text-align: center !important;
}}
</style>
.. _{sphinx_tag}:
Example gallery
===============
{toctree}
{contents}
.. raw:: html
<div style="clear: both"></div>
"""
def create_thumbnail(infile, thumbfile,
width=275, height=275,
cx=0.5, cy=0.5, border=4):
baseout, extout = op.splitext(thumbfile)
im = image.imread(infile)
rows, cols = im.shape[:2]
x0 = int(cx * cols - .5 * width)
y0 = int(cy * rows - .5 * height)
xslice = slice(x0, x0 + width)
yslice = slice(y0, y0 + height)
thumb = im[yslice, xslice]
thumb[:border, :, :3] = thumb[-border:, :, :3] = 0
thumb[:, :border, :3] = thumb[:, -border:, :3] = 0
dpi = 100
fig = plt.figure(figsize=(width / dpi, height / dpi), dpi=dpi)
ax = fig.add_axes([0, 0, 1, 1], aspect='auto',
frameon=False, xticks=[], yticks=[])
ax.imshow(thumb, aspect='auto', resample=True,
interpolation='bilinear')
fig.savefig(thumbfile, dpi=dpi)
return fig
def indent(s, N=4):
"""indent a string"""
return s.replace('\n', '\n' + N * ' ')
class ExampleGenerator(object):
"""Tools for generating an example page from a file"""
def __init__(self, filename, target_dir):
self.filename = filename
self.target_dir = target_dir
self.thumbloc = .5, .5
self.extract_docstring()
with open(filename, "r") as fid:
self.filetext = fid.read()
outfilename = op.join(target_dir, self.rstfilename)
# Only actually run it if the output RST file doesn't
# exist or it was modified less recently than the example
if (not op.exists(outfilename)
or (op.getmtime(outfilename) < op.getmtime(filename))):
self.exec_file()
else:
print("skipping {0}".format(self.filename))
@property
def dirname(self):
return op.split(self.filename)[0]
@property
def fname(self):
return op.split(self.filename)[1]
@property
def modulename(self):
return op.splitext(self.fname)[0]
@property
def pyfilename(self):
return self.modulename + '.py'
@property
def rstfilename(self):
return self.modulename + ".rst"
@property
def htmlfilename(self):
return self.modulename + '.html'
@property
def pngfilename(self):
pngfile = self.modulename + '.png'
return "_images/" + pngfile
@property
def thumbfilename(self):
pngfile = self.modulename + '_thumb.png'
return pngfile
@property
def sphinxtag(self):
return self.modulename
@property
def pagetitle(self):
return self.docstring.strip().split('\n')[0].strip()
@property
def plotfunc(self):
match = re.search(r"sns\.(.+plot)\(", self.filetext)
if match:
return match.group(1)
match = re.search(r"sns\.(.+map)\(", self.filetext)
if match:
return match.group(1)
match = re.search(r"sns\.(.+Grid)\(", self.filetext)
if match:
return match.group(1)
return ""
def extract_docstring(self):
""" Extract a module-level docstring
"""
lines = open(self.filename).readlines()
start_row = 0
if lines[0].startswith('#!'):
lines.pop(0)
start_row = 1
docstring = ''
first_par = ''
line_iter = lines.__iter__()
tokens = tokenize.generate_tokens(lambda: next(line_iter))
for tok_type, tok_content, _, (erow, _), _ in tokens:
tok_type = token.tok_name[tok_type]
if tok_type in ('NEWLINE', 'COMMENT', 'NL', 'INDENT', 'DEDENT'):
continue
elif tok_type == 'STRING':
docstring = eval(tok_content)
# If the docstring is formatted with several paragraphs,
# extract the first one:
paragraphs = '\n'.join(line.rstrip()
for line in docstring.split('\n')
).split('\n\n')
if len(paragraphs) > 0:
first_par = paragraphs[0]
break
thumbloc = None
for i, line in enumerate(docstring.split("\n")):
m = re.match(r"^_thumb: (\.\d+),\s*(\.\d+)", line)
if m:
thumbloc = float(m.group(1)), float(m.group(2))
break
if thumbloc is not None:
self.thumbloc = thumbloc
docstring = "\n".join([l for l in docstring.split("\n")
if not l.startswith("_thumb")])
self.docstring = docstring
self.short_desc = first_par
self.end_line = erow + 1 + start_row
def exec_file(self):
print("running {0}".format(self.filename))
plt.close('all')
my_globals = {'pl': plt,
'plt': plt}
execfile(self.filename, my_globals)
fig = plt.gcf()
fig.canvas.draw()
pngfile = op.join(self.target_dir, self.pngfilename)
thumbfile = op.join("example_thumbs", self.thumbfilename)
self.html = "<img src=../%s>" % self.pngfilename
fig.savefig(pngfile, dpi=75, bbox_inches="tight")
cx, cy = self.thumbloc
create_thumbnail(pngfile, thumbfile, cx=cx, cy=cy)
def toctree_entry(self):
return " ./%s\n\n" % op.splitext(self.htmlfilename)[0]
def contents_entry(self):
return (".. raw:: html\n\n"
" <div class='figure align-center'>\n"
" <a href=./{0}>\n"
" <img src=../_static/{1}>\n"
" <span class='figure-label'>\n"
" <p>{2}</p>\n"
" </span>\n"
" </a>\n"
" </div>\n\n"
"\n\n"
"".format(self.htmlfilename,
self.thumbfilename,
self.plotfunc))
def main(app):
static_dir = op.join(app.builder.srcdir, '_static')
target_dir = op.join(app.builder.srcdir, 'examples')
image_dir = op.join(app.builder.srcdir, 'examples/_images')
thumb_dir = op.join(app.builder.srcdir, "example_thumbs")
source_dir = op.abspath(op.join(app.builder.srcdir,
'..', 'examples'))<|fim▁hole|> if not op.exists(static_dir):
os.makedirs(static_dir)
if not op.exists(target_dir):
os.makedirs(target_dir)
if not op.exists(image_dir):
os.makedirs(image_dir)
if not op.exists(thumb_dir):
os.makedirs(thumb_dir)
if not op.exists(source_dir):
os.makedirs(source_dir)
banner_data = []
toctree = ("\n\n"
".. toctree::\n"
" :hidden:\n\n")
contents = "\n\n"
# Write individual example files
for filename in glob.glob(op.join(source_dir, "*.py")):
ex = ExampleGenerator(filename, target_dir)
banner_data.append({"title": ex.pagetitle,
"url": op.join('examples', ex.htmlfilename),
"thumb": op.join(ex.thumbfilename)})
shutil.copyfile(filename, op.join(target_dir, ex.pyfilename))
output = RST_TEMPLATE.format(sphinx_tag=ex.sphinxtag,
docstring=ex.docstring,
end_line=ex.end_line,
fname=ex.pyfilename,
img_file=ex.pngfilename)
with open(op.join(target_dir, ex.rstfilename), 'w') as f:
f.write(output)
toctree += ex.toctree_entry()
contents += ex.contents_entry()
if len(banner_data) < 10:
banner_data = (4 * banner_data)[:10]
# write index file
index_file = op.join(target_dir, 'index.rst')
with open(index_file, 'w') as index:
index.write(INDEX_TEMPLATE.format(sphinx_tag="example_gallery",
toctree=toctree,
contents=contents))
def setup(app):
app.connect('builder-inited', main)<|fim▁end|> | |
<|file_name|>globe_plugin.cpp<|end_file_name|><|fim▁begin|>/***************************************************************************
globe_plugin.cpp
Globe Plugin
a QGIS plugin
--------------------------------------
Date : 08-Jul-2010
Copyright : (C) 2010 by Sourcepole
Email : info at sourcepole.ch
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
#include "globe_plugin.h"
#include "qgsglobeplugindialog.h"
#include "qgsglobefeatureidentify.h"
#include "qgsglobefrustumhighlight.h"
#include "qgsglobetilesource.h"
#include "qgsglobevectorlayerproperties.h"
#include "qgsglobewidget.h"
#include "featuresource/qgsglobefeatureoptions.h"
#include <qgisinterface.h>
#include <qgslogger.h>
#include <qgsapplication.h>
#include <qgsmapcanvas.h>
#include <qgsvectorlayer.h>
#include <qgsfeature.h>
#include <qgsgeometry.h>
#include <qgspoint.h>
#include <qgsdistancearea.h>
#include <symbology/qgsrenderer.h>
#include <symbology/qgssymbol.h>
#include <qgspallabeling.h>
#include <qgssettings.h>
#include <qgsvectorlayerlabeling.h>
#include <qgsproject.h>
#include <QAction>
#include <QDir>
#include <QDockWidget>
#include <QStringList>
#include <osg/Light>
#include <osgDB/ReadFile>
#include <osgDB/Registry>
#include <osgGA/StateSetManipulator>
#include <osgGA/GUIEventHandler>
#include <osgViewer/Viewer>
#include <osgViewer/ViewerEventHandlers>
#include <osgEarthQt/ViewerWidget>
#include <osgEarth/ElevationQuery>
#include <osgEarth/Notify>
#include <osgEarth/Map>
#include <osgEarth/MapNode>
#include <osgEarth/Registry>
#if OSGEARTH_VERSION_GREATER_OR_EQUAL(2, 8, 0)
#include <osgEarth/TerrainEngineNode>
#endif
#include <osgEarth/TileSource>
#include <osgEarth/Version>
#include <osgEarthDrivers/engine_mp/MPTerrainEngineOptions>
#include <osgEarthUtil/Controls>
#include <osgEarthUtil/EarthManipulator>
#if OSGEARTH_VERSION_LESS_THAN( 2, 6, 0 )
#include <osgEarthUtil/SkyNode>
#else
#include <osgEarthUtil/Sky>
#endif
#include <osgEarthUtil/AutoClipPlaneHandler>
#include <osgEarthDrivers/gdal/GDALOptions>
#include <osgEarthDrivers/tms/TMSOptions>
#include <osgEarthDrivers/wms/WMSOptions>
#if OSGEARTH_VERSION_GREATER_OR_EQUAL( 2, 2, 0 )
#include <osgEarthDrivers/cache_filesystem/FileSystemCache>
#endif
#if OSGEARTH_VERSION_GREATER_OR_EQUAL( 2, 5, 0 )
#include <osgEarthUtil/VerticalScale>
#endif
#include <osgEarthDrivers/model_feature_geom/FeatureGeomModelOptions>
#include <osgEarthUtil/FeatureQueryTool>
#include <osgEarthFeatures/FeatureDisplayLayout>
#define MOVE_OFFSET 0.05
static const QString sName = QObject::tr( "Globe" );
static const QString sDescription = QObject::tr( "Overlay data on a 3D globe" );
static const QString sCategory = QObject::tr( "Plugins" );
static const QString sPluginVersion = QObject::tr( "Version 1.0" );
static const QgisPlugin::PluginType sPluginType = QgisPlugin::UI;
static const QString sIcon = ":/globe/icon.svg";
static const QString sExperimental = QString( "false" );
class NavigationControlHandler : public osgEarth::Util::Controls::ControlEventHandler
{
public:
virtual void onMouseDown() { }
virtual void onClick( const osgGA::GUIEventAdapter & /*ea*/, osgGA::GUIActionAdapter & /*aa*/ ) {}
};
class ZoomControlHandler : public NavigationControlHandler
{
public:
ZoomControlHandler( osgEarth::Util::EarthManipulator *manip, double dx, double dy )
: _manip( manip ), _dx( dx ), _dy( dy ) { }
void onMouseDown() override
{
_manip->zoom( _dx, _dy );
}
private:
osg::observer_ptr<osgEarth::Util::EarthManipulator> _manip;
double _dx;
double _dy;
};
class HomeControlHandler : public NavigationControlHandler
{
public:
HomeControlHandler( osgEarth::Util::EarthManipulator *manip ) : _manip( manip ) { }
void onClick( const osgGA::GUIEventAdapter &ea, osgGA::GUIActionAdapter &aa ) override
{
_manip->home( ea, aa );
}
private:
osg::observer_ptr<osgEarth::Util::EarthManipulator> _manip;
};
class SyncExtentControlHandler : public NavigationControlHandler
{
public:
SyncExtentControlHandler( GlobePlugin *globe ) : mGlobe( globe ) { }
void onClick( const osgGA::GUIEventAdapter & /*ea*/, osgGA::GUIActionAdapter & /*aa*/ ) override
{
mGlobe->syncExtent();
}
private:
GlobePlugin *mGlobe = nullptr;
};
class PanControlHandler : public NavigationControlHandler
{
public:
PanControlHandler( osgEarth::Util::EarthManipulator *manip, double dx, double dy ) : _manip( manip ), _dx( dx ), _dy( dy ) { }
void onMouseDown() override
{
_manip->pan( _dx, _dy );
}
private:
osg::observer_ptr<osgEarth::Util::EarthManipulator> _manip;
double _dx;
double _dy;
};
class RotateControlHandler : public NavigationControlHandler
{
public:
RotateControlHandler( osgEarth::Util::EarthManipulator *manip, double dx, double dy ) : _manip( manip ), _dx( dx ), _dy( dy ) { }
void onMouseDown() override
{
if ( 0 == _dx && 0 == _dy )
_manip->setRotation( osg::Quat() );
else
_manip->rotate( _dx, _dy );
}
private:
osg::observer_ptr<osgEarth::Util::EarthManipulator> _manip;
double _dx;
double _dy;
};
// An event handler that will print out the coordinates at the clicked point
class QueryCoordinatesHandler : public osgGA::GUIEventHandler
{
public:
QueryCoordinatesHandler( GlobePlugin *globe ) : mGlobe( globe ) { }
bool handle( const osgGA::GUIEventAdapter &ea, osgGA::GUIActionAdapter &aa )
{
if ( ea.getEventType() == osgGA::GUIEventAdapter::MOVE )
{
osgViewer::View *view = static_cast<osgViewer::View *>( aa.asView() );
osgUtil::LineSegmentIntersector::Intersections hits;
if ( view->computeIntersections( ea.getX(), ea.getY(), hits ) )
{
osgEarth::GeoPoint isectPoint;
isectPoint.fromWorld( mGlobe->mapNode()->getMapSRS()->getGeodeticSRS(), hits.begin()->getWorldIntersectPoint() );
mGlobe->showCurrentCoordinates( isectPoint );
}
}
return false;
}
private:
GlobePlugin *mGlobe = nullptr;
};
class KeyboardControlHandler : public osgGA::GUIEventHandler
{
public:
KeyboardControlHandler( osgEarth::Util::EarthManipulator *manip ) : _manip( manip ) { }
bool handle( const osgGA::GUIEventAdapter &ea, osgGA::GUIActionAdapter &aa ) override;
private:
osg::observer_ptr<osgEarth::Util::EarthManipulator> _manip;
};
class NavigationControl : public osgEarth::Util::Controls::ImageControl
{
public:
NavigationControl( osg::Image *image = 0 ) : ImageControl( image ), mMousePressed( false ) {}
protected:
bool handle( const osgGA::GUIEventAdapter &ea, osgGA::GUIActionAdapter &aa, osgEarth::Util::Controls::ControlContext &cx ) override;
private:
bool mMousePressed;
};
GlobePlugin::GlobePlugin( QgisInterface *qgisInterface )
: QgisPlugin( sName, sDescription, sCategory, sPluginVersion, sPluginType )
, mQGisIface( qgisInterface )
, mViewerWidget( 0 )
, mDockWidget( 0 )
, mSettingsDialog( 0 )
, mSelectedLat( 0. )
, mSelectedLon( 0. )
, mSelectedElevation( 0. )
, mLayerPropertiesFactory( 0 )
{
#ifdef Q_OS_MACX
// update path to osg plugins on Mac OS X
if ( !getenv( "OSG_LIBRARY_PATH" ) )
{
// OSG_PLUGINS_PATH value set by CMake option
QString ogsPlugins( OSG_PLUGINS_PATH );
QString bundlePlugins = QgsApplication::pluginPath() + "/../osgPlugins";
if ( QFile::exists( bundlePlugins ) )
{
// add internal osg plugin path if bundled osg
ogsPlugins = bundlePlugins;
}
if ( QFile::exists( ogsPlugins ) )
{
osgDB::Registry::instance()->setLibraryFilePathList( QDir::cleanPath( ogsPlugins ).toStdString() );
}
}
#endif
}
GlobePlugin::~GlobePlugin() {}
void GlobePlugin::initGui()
{
mSettingsDialog = new QgsGlobePluginDialog( mQGisIface->mainWindow(), QgsGuiUtils::ModalDialogFlags );
connect( mSettingsDialog, SIGNAL( settingsApplied() ), this, SLOT( applySettings() ) );
mActionToggleGlobe = new QAction( QIcon( ":/globe/globe.png" ), tr( "Launch Globe" ), this );
mActionToggleGlobe->setCheckable( true );
mQGisIface->addToolBarIcon( mActionToggleGlobe );
mQGisIface->addPluginToMenu( tr( "&Globe" ), mActionToggleGlobe );
mLayerPropertiesFactory = new QgsGlobeLayerPropertiesFactory( this );
mQGisIface->registerMapLayerConfigWidgetFactory( mLayerPropertiesFactory );
connect( mActionToggleGlobe, SIGNAL( triggered( bool ) ), this, SLOT( setGlobeEnabled( bool ) ) );
connect( mLayerPropertiesFactory, SIGNAL( layerSettingsChanged( QgsMapLayer * ) ), this, SLOT( layerChanged( QgsMapLayer * ) ) );
connect( this, SIGNAL( xyCoordinates( const QgsPointXY & ) ), mQGisIface->mapCanvas(), SIGNAL( xyCoordinates( const QgsPointXY & ) ) );
connect( mQGisIface->mainWindow(), SIGNAL( projectRead() ), this, SLOT( projectRead() ) );
}
void GlobePlugin::run()
{
if ( mViewerWidget != 0 )
{
return;
}
#ifdef GLOBE_SHOW_TILE_STATS
QgsGlobeTileStatistics *tileStats = new QgsGlobeTileStatistics();
connect( tileStats, SIGNAL( changed( int, int ) ), this, SLOT( updateTileStats( int, int ) ) );
#endif
QgsSettings settings;
// osgEarth::setNotifyLevel( osg::DEBUG_INFO );
mOsgViewer = new osgViewer::Viewer();
mOsgViewer->setThreadingModel( osgViewer::Viewer::SingleThreaded );
mOsgViewer->setRunFrameScheme( osgViewer::Viewer::ON_DEMAND );
// Set camera manipulator with default home position
osgEarth::Util::EarthManipulator *manip = new osgEarth::Util::EarthManipulator();
mOsgViewer->setCameraManipulator( manip );
osgEarth::Util::Viewpoint viewpoint;
viewpoint.focalPoint() = osgEarth::GeoPoint( osgEarth::SpatialReference::get( "wgs84" ), 0., 0., 0. );
viewpoint.heading() = 0.;
viewpoint.pitch() = -90.;
viewpoint.range() = 2e7;
manip->setHomeViewpoint( viewpoint, 1. );
manip->home( 0 );
setupProxy();
// Tile stats label
mStatsLabel = new osgEarth::Util::Controls::LabelControl( "", 10 );
mStatsLabel->setPosition( 0, 0 );
osgEarth::Util::Controls::ControlCanvas::get( mOsgViewer )->addControl( mStatsLabel.get() );
mDockWidget = new QgsGlobeWidget( mQGisIface, mQGisIface->mainWindow() );
connect( mDockWidget, SIGNAL( destroyed( QObject * ) ), this, SLOT( reset() ) );
connect( mDockWidget, SIGNAL( layersChanged() ), this, SLOT( updateLayers() ) );
connect( mDockWidget, SIGNAL( showSettings() ), this, SLOT( showSettings() ) );
connect( mDockWidget, SIGNAL( refresh() ), this, SLOT( rebuildQGISLayer() ) );
connect( mDockWidget, SIGNAL( syncExtent() ), this, SLOT( syncExtent() ) );
mQGisIface->addDockWidget( Qt::RightDockWidgetArea, mDockWidget );
if ( getenv( "GLOBE_MAPXML" ) )
{
char *mapxml = getenv( "GLOBE_MAPXML" );
QgsDebugMsg( mapxml );
osg::Node *node = osgDB::readNodeFile( mapxml );
if ( !node )
{
QgsDebugMsg( "Failed to load earth file " );
return;
}
mMapNode = osgEarth::MapNode::findMapNode( node );
mRootNode = new osg::Group();
mRootNode->addChild( node );
}
else
{
QString cacheDirectory = settings.value( "cache/directory" ).toString();
if ( cacheDirectory.isEmpty() )
cacheDirectory = QgsApplication::qgisSettingsDirPath() + "cache";
osgEarth::Drivers::FileSystemCacheOptions cacheOptions;
cacheOptions.rootPath() = cacheDirectory.toStdString();
osgEarth::MapOptions mapOptions;
mapOptions.cache() = cacheOptions;
osgEarth::Map *map = new osgEarth::Map( /*mapOptions*/ );
// The MapNode will render the Map object in the scene graph.
osgEarth::MapNodeOptions mapNodeOptions;
mMapNode = new osgEarth::MapNode( map, mapNodeOptions );
mRootNode = new osg::Group();
mRootNode->addChild( mMapNode );
osgEarth::Registry::instance()->unRefImageDataAfterApply() = false;
// Add draped layer
osgEarth::TileSourceOptions opts;
opts.L2CacheSize() = 0;
#if OSGEARTH_VERSION_LESS_THAN( 2, 9, 0 )
opts.tileSize() = 128;
#endif
mTileSource = new QgsGlobeTileSource( mQGisIface->mapCanvas(), opts );
#if OSGEARTH_VERSION_GREATER_OR_EQUAL( 2, 9, 0 )
mTileSource->open();
#endif
osgEarth::ImageLayerOptions options( "QGIS" );
options.driver()->L2CacheSize() = 0;
#if OSGEARTH_VERSION_GREATER_OR_EQUAL( 2, 9, 0 )
options.tileSize() = 128;
#endif
options.cachePolicy() = osgEarth::CachePolicy::USAGE_NO_CACHE;
mQgisMapLayer = new osgEarth::ImageLayer( options, mTileSource );
map->addImageLayer( mQgisMapLayer );
// Create the frustum highlight callback
mFrustumHighlightCallback = new QgsGlobeFrustumHighlightCallback(
mOsgViewer, mMapNode->getTerrain(), mQGisIface->mapCanvas(), QColor( 0, 0, 0, 50 ) );
}
mRootNode->addChild( osgEarth::Util::Controls::ControlCanvas::get( mOsgViewer ) );
mOsgViewer->setSceneData( mRootNode );
mOsgViewer->addEventHandler( new QueryCoordinatesHandler( this ) );
mOsgViewer->addEventHandler( new KeyboardControlHandler( manip ) );
mOsgViewer->addEventHandler( new osgViewer::StatsHandler() );
mOsgViewer->addEventHandler( new osgViewer::WindowSizeHandler() );
mOsgViewer->addEventHandler( new osgGA::StateSetManipulator( mOsgViewer->getCamera()->getOrCreateStateSet() ) );
mOsgViewer->getCamera()->addCullCallback( new osgEarth::Util::AutoClipPlaneCullCallback( mMapNode ) );
// osgEarth benefits from pre-compilation of GL objects in the pager. In newer versions of
// OSG, this activates OSG's IncrementalCompileOpeartion in order to avoid frame breaks.
mOsgViewer->getDatabasePager()->setDoPreCompile( true );
mViewerWidget = new osgEarth::QtGui::ViewerWidget( mOsgViewer );
QGLFormat glf = QGLFormat::defaultFormat();
#if OSGEARTH_VERSION_GREATER_OR_EQUAL( 2, 9, 0 )
glf.setVersion( 3, 3 );
glf.setProfile( QGLFormat::CoreProfile );
#endif
if ( settings.value( "/Plugin-Globe/anti-aliasing", true ).toBool() &&
settings.value( "/Plugin-Globe/anti-aliasing-level", "" ).toInt() > 0 )
{
glf.setSampleBuffers( true );
glf.setSamples( settings.value( "/Plugin-Globe/anti-aliasing-level", "" ).toInt() );
}
mViewerWidget->setFormat( glf );
mDockWidget->setWidget( mViewerWidget );
mViewerWidget->setParent( mDockWidget );
mFeatureQueryToolIdentifyCb = new QgsGlobeFeatureIdentifyCallback( mQGisIface->mapCanvas() );
mFeatureQueryTool = new osgEarth::Util::FeatureQueryTool();
mFeatureQueryTool->addChild( mMapNode );
mFeatureQueryTool->setDefaultCallback( mFeatureQueryToolIdentifyCb.get() );
setupControls();
// FIXME: Workaround for OpenGL errors, in some manner related to the SkyNode,
// which appear when launching the globe a second time:
// Delay applySettings one event loop iteration, i.e. one update call of the GL canvas
QTimer *timer = new QTimer();
QTimer *timer2 = new QTimer();
connect( timer, SIGNAL( timeout() ), timer, SLOT( deleteLater() ) );
connect( timer2, SIGNAL( timeout() ), timer2, SLOT( deleteLater() ) );
connect( timer, SIGNAL( timeout() ), this, SLOT( applySettings() ) );
connect( timer2, SIGNAL( timeout() ), this, SLOT( updateLayers() ) );
timer->start( 0 );
timer2->start( 100 );
}
void GlobePlugin::showSettings()
{
mSettingsDialog->exec();
}
void GlobePlugin::projectRead()
{
setGlobeEnabled( false ); // Hide globe when new projects loaded, on some systems it is very slow loading a new project with globe enabled
mSettingsDialog->readProjectSettings();
applyProjectSettings();
}
void GlobePlugin::applySettings()
{
if ( !mOsgViewer )
{
return;
}
osgEarth::Util::EarthManipulator *manip = dynamic_cast<osgEarth::Util::EarthManipulator *>( mOsgViewer->getCameraManipulator() );
osgEarth::Util::EarthManipulator::Settings *settings = manip->getSettings();
settings->setScrollSensitivity( mSettingsDialog->getScrollSensitivity() );
if ( !mSettingsDialog->getInvertScrollWheel() )
{
settings->bindScroll( osgEarth::Util::EarthManipulator::ACTION_ZOOM_IN, osgGA::GUIEventAdapter::SCROLL_UP );
settings->bindScroll( osgEarth::Util::EarthManipulator::ACTION_ZOOM_OUT, osgGA::GUIEventAdapter::SCROLL_DOWN );
}
else
{
settings->bindScroll( osgEarth::Util::EarthManipulator::ACTION_ZOOM_OUT, osgGA::GUIEventAdapter::SCROLL_UP );
settings->bindScroll( osgEarth::Util::EarthManipulator::ACTION_ZOOM_IN, osgGA::GUIEventAdapter::SCROLL_DOWN );
}
// Advanced settings
enableFrustumHighlight( mSettingsDialog->getFrustumHighlighting() );
enableFeatureIdentification( mSettingsDialog->getFeatureIdenification() );
applyProjectSettings();
}
void GlobePlugin::applyProjectSettings()
{
if ( mOsgViewer && !getenv( "GLOBE_MAPXML" ) )
{
// Imagery settings
QList<QgsGlobePluginDialog::LayerDataSource> imageryDataSources = mSettingsDialog->getImageryDataSources();
if ( imageryDataSources != mImagerySources )
{
mImagerySources = imageryDataSources;
QgsDebugMsg( "imageryLayersChanged: Globe Running, executing" );
osg::ref_ptr<osgEarth::Map> map = mMapNode->getMap();
// Remove image layers
osgEarth::ImageLayerVector list;
#if OSGEARTH_VERSION_GREATER_OR_EQUAL( 2, 9, 0 )
map->getLayers( list );
#else
map->getImageLayers( list );
#endif
for ( osgEarth::ImageLayerVector::iterator i = list.begin(); i != list.end(); ++i )
{
if ( *i != mQgisMapLayer )
map->removeImageLayer( *i );
}
if ( !list.empty() )
{
mOsgViewer->getDatabasePager()->clear();
}
// Add image layers
for ( const QgsGlobePluginDialog::LayerDataSource &datasource : mImagerySources )
{
osgEarth::ImageLayer *layer = 0;
if ( "Raster" == datasource.type )
{
osgEarth::Drivers::GDALOptions options;
options.url() = datasource.uri.toStdString();
layer = new osgEarth::ImageLayer( datasource.uri.toStdString(), options );
}
else if ( "TMS" == datasource.type )
{
osgEarth::Drivers::TMSOptions options;
options.url() = datasource.uri.toStdString();
layer = new osgEarth::ImageLayer( datasource.uri.toStdString(), options );
}
else if ( "WMS" == datasource.type )
{
osgEarth::Drivers::WMSOptions options;
options.url() = datasource.uri.toStdString();
layer = new osgEarth::ImageLayer( datasource.uri.toStdString(), options );
}
map->insertImageLayer( layer, 0 );
}
}
// Elevation settings
QList<QgsGlobePluginDialog::LayerDataSource> elevationDataSources = mSettingsDialog->getElevationDataSources();
if ( elevationDataSources != mElevationSources )
{
mElevationSources = elevationDataSources;
QgsDebugMsg( "elevationLayersChanged: Globe Running, executing" );
osg::ref_ptr<osgEarth::Map> map = mMapNode->getMap();
// Remove elevation layers
osgEarth::ElevationLayerVector list;
#if OSGEARTH_VERSION_GREATER_OR_EQUAL( 2, 9, 0 )
map->getLayers( list );
#else
map->getElevationLayers( list );
#endif
for ( osgEarth::ElevationLayerVector::iterator i = list.begin(); i != list.end(); ++i )
{
map->removeElevationLayer( *i );
}
if ( !list.empty() )
{
mOsgViewer->getDatabasePager()->clear();
}
// Add elevation layers
for ( const QgsGlobePluginDialog::LayerDataSource &datasource : mElevationSources )
{
osgEarth::ElevationLayer *layer = 0;
if ( "Raster" == datasource.type )
{
osgEarth::Drivers::GDALOptions options;
options.interpolation() = osgEarth::Drivers::INTERP_NEAREST;
options.url() = datasource.uri.toStdString();
layer = new osgEarth::ElevationLayer( datasource.uri.toStdString(), options );
}
else if ( "TMS" == datasource.type )
{
osgEarth::Drivers::TMSOptions options;
options.url() = datasource.uri.toStdString();
layer = new osgEarth::ElevationLayer( datasource.uri.toStdString(), options );
}
map->addElevationLayer( layer );
}
}
#if OSGEARTH_VERSION_GREATER_OR_EQUAL( 2, 5, 0 )
double verticalScaleValue = mSettingsDialog->getVerticalScale();
if ( !mVerticalScale.get() || mVerticalScale->getScale() != verticalScaleValue )
{
mMapNode->getTerrainEngine()->removeEffect( mVerticalScale );
mVerticalScale = new osgEarth::Util::VerticalScale();
mVerticalScale->setScale( verticalScaleValue );
mMapNode->getTerrainEngine()->addEffect( mVerticalScale );
}
#endif
// Sky settings
if ( mSettingsDialog->getSkyEnabled() )
{
// Create if not yet done
if ( !mSkyNode.get() )
{
mSkyNode = osgEarth::Util::SkyNode::create( mMapNode );
mSkyNode->attach( mOsgViewer );
mRootNode->addChild( mSkyNode );
// Insert sky between root and map
mSkyNode->addChild( mMapNode );
mRootNode->removeChild( mMapNode );
}
mSkyNode->setLighting( mSettingsDialog->getSkyAutoAmbience() ? osg::StateAttribute::ON : osg::StateAttribute::OFF );
double ambient = mSettingsDialog->getSkyMinAmbient();
mSkyNode->getSunLight()->setAmbient( osg::Vec4( ambient, ambient, ambient, 1 ) );
QDateTime dateTime = mSettingsDialog->getSkyDateTime();
mSkyNode->setDateTime( osgEarth::DateTime(
dateTime.date().year(),
dateTime.date().month(),
dateTime.date().day(),
dateTime.time().hour() + dateTime.time().minute() / 60.0 ) );
}
else if ( mSkyNode != 0 )
{
mRootNode->addChild( mMapNode );
mSkyNode->removeChild( mMapNode );
mRootNode->removeChild( mSkyNode );
mSkyNode = 0;
}
}
}
QgsRectangle GlobePlugin::getQGISLayerExtent() const
{
QList<QgsRectangle> extents = mLayerExtents.values();
QgsRectangle fullExtent = extents.isEmpty() ? QgsRectangle() : extents.front();
for ( int i = 1, n = extents.size(); i < n; ++i )
{
if ( !extents[i].isNull() )
fullExtent.combineExtentWith( extents[i] );
}
return fullExtent;
}
void GlobePlugin::showCurrentCoordinates( const osgEarth::GeoPoint &geoPoint )
{
osg::Vec3d pos = geoPoint.vec3d();
emit xyCoordinates( QgsCoordinateTransform( QgsCoordinateReferenceSystem( GEO_EPSG_CRS_AUTHID ), mQGisIface->mapCanvas()->mapSettings().destinationCrs(), QgsProject::instance()->transformContext() ).transform( QgsPointXY( pos.x(), pos.y() ) ) );
}
void GlobePlugin::setSelectedCoordinates( const osg::Vec3d &coords )
{
mSelectedLon = coords.x();
mSelectedLat = coords.y();
mSelectedElevation = coords.z();
emit newCoordinatesSelected( QgsPointXY( mSelectedLon, mSelectedLat ) );
}
osg::Vec3d GlobePlugin::getSelectedCoordinates()
{
return osg::Vec3d( mSelectedLon, mSelectedLat, mSelectedElevation );
}
void GlobePlugin::syncExtent()
{
const QgsMapSettings &mapSettings = mQGisIface->mapCanvas()->mapSettings();
QgsRectangle extent = mQGisIface->mapCanvas()->extent();
long epsgGlobe = 4326;
QgsCoordinateReferenceSystem globeCrs;
globeCrs.createFromOgcWmsCrs( QString( "EPSG:%1" ).arg( epsgGlobe ) );
// transform extent to WGS84
if ( mapSettings.destinationCrs().authid().compare( QString( "EPSG:%1" ).arg( epsgGlobe ), Qt::CaseInsensitive ) != 0 )
{
QgsCoordinateReferenceSystem srcCRS( mapSettings.destinationCrs() );
extent = QgsCoordinateTransform( srcCRS, globeCrs, QgsProject::instance()->transformContext() ).transformBoundingBox( extent );
}
QgsDistanceArea dist;
dist.setSourceCrs( globeCrs, QgsProject::instance()->transformContext() );
dist.setEllipsoid( "WGS84" );
QgsPointXY ll = QgsPointXY( extent.xMinimum(), extent.yMinimum() );
QgsPointXY ul = QgsPointXY( extent.xMinimum(), extent.yMaximum() );
double height = dist.measureLine( ll, ul );
// double height = dist.computeDistanceBearing( ll, ul );
double camViewAngle = 30;
double camDistance = height / tan( camViewAngle * osg::PI / 180 ); //c = b*cotan(B(rad))
#if OSGEARTH_VERSION_LESS_THAN(2, 7, 0)
osgEarth::Util::Viewpoint viewpoint( osg::Vec3d( extent.center().x(), extent.center().y(), 0.0 ), 0.0, -90.0, camDistance );
#else
osgEarth::Util::Viewpoint viewpoint;
viewpoint.focalPoint() = osgEarth::GeoPoint( osgEarth::SpatialReference::get( "wgs84" ), extent.center().x(), extent.center().y(), 0.0 );
viewpoint.heading() = 0.0;
viewpoint.pitch() = -90.0;
viewpoint.range() = camDistance;
#endif
OE_NOTICE << "map extent: " << height << " camera distance: " << camDistance << std::endl;
osgEarth::Util::EarthManipulator *manip = dynamic_cast<osgEarth::Util::EarthManipulator *>( mOsgViewer->getCameraManipulator() );
manip->setRotation( osg::Quat() );
manip->setViewpoint( viewpoint, 4.0 );
}
void GlobePlugin::addControl( osgEarth::Util::Controls::Control *control, int x, int y, int w, int h, osgEarth::Util::Controls::ControlEventHandler *handler )
{
control->setPosition( x, y );
control->setHeight( h );
control->setWidth( w );
control->addEventHandler( handler );
osgEarth::Util::Controls::ControlCanvas::get( mOsgViewer )->addControl( control );
}
void GlobePlugin::addImageControl( const std::string &imgPath, int x, int y, osgEarth::Util::Controls::ControlEventHandler *handler )
{
osg::Image *image = osgDB::readImageFile( imgPath );
osgEarth::Util::Controls::ImageControl *control = new NavigationControl( image );
control->setPosition( x, y );
control->setWidth( image->s() );
control->setHeight( image->t() );
if ( handler )
control->addEventHandler( handler );
osgEarth::Util::Controls::ControlCanvas::get( mOsgViewer )->addControl( control );
}
void GlobePlugin::setupControls()
{
std::string imgDir = QDir::cleanPath( QgsApplication::pkgDataPath() + "/globe/gui" ).toStdString();
if ( QgsApplication::isRunningFromBuildDir() )
{
imgDir = QDir::cleanPath( QgsApplication::buildSourcePath() + "/src/plugins/globe/images/gui" ).toStdString();
}
osgEarth::Util::EarthManipulator *manip = dynamic_cast<osgEarth::Util::EarthManipulator *>( mOsgViewer->getCameraManipulator() );
// Rotate and tiltcontrols
int imgLeft = 16;
int imgTop = 20;
addImageControl( imgDir + "/YawPitchWheel.png", 16, 20 );
addControl( new NavigationControl, imgLeft, imgTop + 18, 20, 22, new RotateControlHandler( manip, -MOVE_OFFSET, 0 ) );
addControl( new NavigationControl, imgLeft + 36, imgTop + 18, 20, 22, new RotateControlHandler( manip, MOVE_OFFSET, 0 ) );
addControl( new NavigationControl, imgLeft + 20, imgTop + 18, 16, 22, new RotateControlHandler( manip, 0, 0 ) );
addControl( new NavigationControl, imgLeft + 20, imgTop, 24, 19, new RotateControlHandler( manip, 0, -MOVE_OFFSET ) );
addControl( new NavigationControl, imgLeft + 16, imgTop + 36, 24, 19, new RotateControlHandler( manip, 0, MOVE_OFFSET ) );
// Move controls
imgTop = 80;
addImageControl( imgDir + "/MoveWheel.png", imgLeft, imgTop );
addControl( new NavigationControl, imgLeft, imgTop + 18, 20, 22, new PanControlHandler( manip, MOVE_OFFSET, 0 ) );
addControl( new NavigationControl, imgLeft + 36, imgTop + 18, 20, 22, new PanControlHandler( manip, -MOVE_OFFSET, 0 ) );
addControl( new NavigationControl, imgLeft + 20, imgTop, 24, 19, new PanControlHandler( manip, 0, -MOVE_OFFSET ) );
addControl( new NavigationControl, imgLeft + 16, imgTop + 36, 24, 19, new PanControlHandler( manip, 0, MOVE_OFFSET ) );
addControl( new NavigationControl, imgLeft + 20, imgTop + 18, 16, 22, new HomeControlHandler( manip ) );
// Zoom controls
imgLeft = 28;
imgTop = imgTop + 62;
addImageControl( imgDir + "/button-background.png", imgLeft, imgTop );
addImageControl( imgDir + "/zoom-in.png", imgLeft + 3, imgTop + 2, new ZoomControlHandler( manip, 0, -MOVE_OFFSET ) );
addImageControl( imgDir + "/zoom-out.png", imgLeft + 3, imgTop + 29, new ZoomControlHandler( manip, 0, MOVE_OFFSET ) );
}
void GlobePlugin::setupProxy()
{
QgsSettings settings;
settings.beginGroup( "proxy" );
if ( settings.value( "/proxyEnabled" ).toBool() )
{
osgEarth::ProxySettings proxySettings( settings.value( "/proxyHost" ).toString().toStdString(),
settings.value( "/proxyPort" ).toInt() );
if ( !settings.value( "/proxyUser" ).toString().isEmpty() )
{
QString auth = settings.value( "/proxyUser" ).toString() + ":" + settings.value( "/proxyPassword" ).toString();
qputenv( "OSGEARTH_CURL_PROXYAUTH", auth.toLocal8Bit() );
}
//TODO: settings.value("/proxyType")
//TODO: URL exlusions
osgEarth::HTTPClient::setProxySettings( proxySettings );
}
settings.endGroup();
}
void GlobePlugin::refreshQGISMapLayer( const QgsRectangle &dirtyRect )
{
if ( mTileSource )
{
mOsgViewer->getDatabasePager()->clear();
mTileSource->refresh( dirtyRect );
mOsgViewer->requestRedraw();
}
}
void GlobePlugin::updateTileStats( int queued, int tot )
{
if ( mStatsLabel )
mStatsLabel->setText( QString( "Queued tiles: %1\nTot tiles: %2" ).arg( queued ).arg( tot ).toStdString() );
}
void GlobePlugin::addModelLayer( QgsVectorLayer *vLayer, QgsGlobeVectorLayerConfig *layerConfig )
{
QgsGlobeFeatureOptions featureOpt;
featureOpt.setLayer( vLayer );
osgEarth::Style style;
QgsRenderContext ctx;
if ( !vLayer->renderer()->symbols( ctx ).isEmpty() )
{
for ( QgsSymbol *sym : vLayer->renderer()->symbols( ctx ) )
{
if ( sym->type() == QgsSymbol::Line )
{
osgEarth::LineSymbol *ls = style.getOrCreateSymbol<osgEarth::LineSymbol>();
QColor color = sym->color();
ls->stroke()->color() = osg::Vec4f( color.redF(), color.greenF(), color.blueF(), color.alphaF() * vLayer->opacity() );
ls->stroke()->width() = 1.0f;
}
else if ( sym->type() == QgsSymbol::Fill )
{
// TODO access border color, etc.
osgEarth::PolygonSymbol *poly = style.getOrCreateSymbol<osgEarth::PolygonSymbol>();
QColor color = sym->color();
poly->fill()->color() = osg::Vec4f( color.redF(), color.greenF(), color.blueF(), color.alphaF() * vLayer->opacity() );
style.addSymbol( poly );
}
}
}
else
{
osgEarth::PolygonSymbol *poly = style.getOrCreateSymbol<osgEarth::PolygonSymbol>();
poly->fill()->color() = osg::Vec4f( 1.f, 0, 0, vLayer->opacity() );
style.addSymbol( poly );
osgEarth::LineSymbol *ls = style.getOrCreateSymbol<osgEarth::LineSymbol>();
ls->stroke()->color() = osg::Vec4f( 1.f, 0, 0, vLayer->opacity() );
ls->stroke()->width() = 1.0f;
}
osgEarth::AltitudeSymbol *altitudeSymbol = style.getOrCreateSymbol<osgEarth::AltitudeSymbol>();
altitudeSymbol->clamping() = layerConfig->altitudeClamping;
altitudeSymbol->technique() = layerConfig->altitudeTechnique;
altitudeSymbol->binding() = layerConfig->altitudeBinding;
altitudeSymbol->verticalOffset() = layerConfig->verticalOffset;
altitudeSymbol->verticalScale() = layerConfig->verticalScale;
altitudeSymbol->clampingResolution() = layerConfig->clampingResolution;
style.addSymbol( altitudeSymbol );
if ( layerConfig->extrusionEnabled )
{
osgEarth::ExtrusionSymbol *extrusionSymbol = style.getOrCreateSymbol<osgEarth::ExtrusionSymbol>();
bool extrusionHeightOk = false;
float extrusionHeight = layerConfig->extrusionHeight.toFloat( &extrusionHeightOk );
if ( extrusionHeightOk )
{
extrusionSymbol->height() = extrusionHeight;
}
else
{
extrusionSymbol->heightExpression() = layerConfig->extrusionHeight.toStdString();
}
extrusionSymbol->flatten() = layerConfig->extrusionFlatten;
extrusionSymbol->wallGradientPercentage() = layerConfig->extrusionWallGradient;
style.addSymbol( extrusionSymbol );
}
if ( layerConfig->labelingEnabled )
{
osgEarth::TextSymbol *textSymbol = style.getOrCreateSymbol<osgEarth::TextSymbol>();
textSymbol->declutter() = layerConfig->labelingDeclutter;
QgsPalLayerSettings lyr = vLayer->labeling()->settings();
textSymbol->content() = QString( "[%1]" ).arg( lyr.fieldName ).toStdString();
textSymbol->font() = lyr.format().font().family().toStdString();
textSymbol->size() = lyr.format().font().pointSize();
textSymbol->alignment() = osgEarth::TextSymbol::ALIGN_CENTER_TOP;
osgEarth::Stroke stroke;
QColor bufferColor = lyr.format().buffer().color();
stroke.color() = osgEarth::Symbology::Color( bufferColor.redF(), bufferColor.greenF(), bufferColor.blueF(), bufferColor.alphaF() );
textSymbol->halo() = stroke;
textSymbol->haloOffset() = lyr.format().buffer().size();
}
osgEarth::RenderSymbol *renderSymbol = style.getOrCreateSymbol<osgEarth::RenderSymbol>();
renderSymbol->lighting() = layerConfig->lightingEnabled;
renderSymbol->backfaceCulling() = false;
style.addSymbol( renderSymbol );
osgEarth::Drivers::FeatureGeomModelOptions geomOpt;
geomOpt.featureOptions() = featureOpt;
geomOpt.styles() = new osgEarth::StyleSheet();
geomOpt.styles()->addStyle( style );
geomOpt.featureIndexing() = osgEarth::Features::FeatureSourceIndexOptions();
#if 0
FeatureDisplayLayout layout;
layout.tileSizeFactor() = 45.0;
layout.addLevel( FeatureLevel( 0.0f, 200000.0f ) );
geomOpt.layout() = layout;
#endif
osgEarth::ModelLayerOptions modelOptions( vLayer->id().toStdString(), geomOpt );
osgEarth::ModelLayer *nLayer = new osgEarth::ModelLayer( modelOptions );
mMapNode->getMap()->addModelLayer( nLayer );
}
void GlobePlugin::updateLayers()
{
if ( mOsgViewer )
{
// Get previous full extent
QgsRectangle dirtyExtent = getQGISLayerExtent();
mLayerExtents.clear();
QList<QgsMapLayer *> drapedLayers;
QStringList selectedLayerIds = mDockWidget->getSelectedLayerIds();
// Disconnect any previous repaintRequested signals
for ( QgsMapLayer *mapLayer : mTileSource->layers() )
{
if ( mapLayer )
disconnect( mapLayer, SIGNAL( repaintRequested() ), this, SLOT( layerChanged() ) );
if ( dynamic_cast<QgsVectorLayer *>( mapLayer ) )
disconnect( static_cast<QgsVectorLayer *>( mapLayer ), SIGNAL( layerTransparencyChanged( int ) ), this, SLOT( layerChanged() ) );
}
osgEarth::ModelLayerVector modelLayers;
#if OSGEARTH_VERSION_GREATER_OR_EQUAL( 2, 9, 0 )
mMapNode->getMap()->getLayers( modelLayers );
#else
mMapNode->getMap()->getModelLayers( modelLayers );
#endif
for ( const osg::ref_ptr<osgEarth::ModelLayer> &modelLayer : modelLayers )
{
QgsMapLayer *mapLayer = QgsProject::instance()->mapLayer( QString::fromStdString( modelLayer->getName() ) );
if ( mapLayer )
disconnect( mapLayer, SIGNAL( repaintRequested() ), this, SLOT( layerChanged() ) );
if ( dynamic_cast<QgsVectorLayer *>( mapLayer ) )
disconnect( static_cast<QgsVectorLayer *>( mapLayer ), SIGNAL( layerTransparencyChanged( int ) ), this, SLOT( layerChanged() ) );
if ( !selectedLayerIds.contains( QString::fromStdString( modelLayer->getName() ) ) )
mMapNode->getMap()->removeModelLayer( modelLayer );
}
for ( const QString &layerId : selectedLayerIds )
{
QgsMapLayer *mapLayer = QgsProject::instance()->mapLayer( layerId );
connect( mapLayer, SIGNAL( repaintRequested() ), this, SLOT( layerChanged() ) );
QgsGlobeVectorLayerConfig *layerConfig = 0;
if ( dynamic_cast<QgsVectorLayer *>( mapLayer ) )
{
layerConfig = QgsGlobeVectorLayerConfig::getConfig( static_cast<QgsVectorLayer *>( mapLayer ) );
connect( static_cast<QgsVectorLayer *>( mapLayer ), SIGNAL( layerTransparencyChanged( int ) ), this, SLOT( layerChanged() ) );
}
if ( layerConfig && ( layerConfig->renderingMode == QgsGlobeVectorLayerConfig::RenderingModeModelSimple || layerConfig->renderingMode == QgsGlobeVectorLayerConfig::RenderingModeModelAdvanced ) )
{
#if OSGEARTH_VERSION_GREATER_OR_EQUAL( 2, 9, 0 )
if ( !mMapNode->getMap()->getLayerByName( mapLayer->id().toStdString() ) )
#else
if ( !mMapNode->getMap()->getModelLayerByName( mapLayer->id().toStdString() ) )
#endif
addModelLayer( static_cast<QgsVectorLayer *>( mapLayer ), layerConfig );
}
else
{
drapedLayers.append( mapLayer );
QgsRectangle extent = QgsCoordinateTransform( mapLayer->crs(), QgsCoordinateReferenceSystem( GEO_EPSG_CRS_AUTHID ), QgsProject::instance()->transformContext() ).transform( mapLayer->extent() );
mLayerExtents.insert( mapLayer->id(), extent );
}
}
mTileSource->setLayers( drapedLayers );
QgsRectangle newExtent = getQGISLayerExtent();
if ( dirtyExtent.isNull() )
dirtyExtent = newExtent;
else if ( !newExtent.isNull() )
dirtyExtent.combineExtentWith( newExtent );
refreshQGISMapLayer( dirtyExtent );
}
}
void GlobePlugin::layerChanged( QgsMapLayer *mapLayer )
{
if ( !mapLayer )
{
mapLayer = qobject_cast<QgsMapLayer *>( QObject::sender() );
}
if ( mapLayer->isEditable() )
{
return;
}
if ( mMapNode )
{
QgsGlobeVectorLayerConfig *layerConfig = 0;
if ( dynamic_cast<QgsVectorLayer *>( mapLayer ) )
{
layerConfig = QgsGlobeVectorLayerConfig::getConfig( static_cast<QgsVectorLayer *>( mapLayer ) );
}
if ( layerConfig && ( layerConfig->renderingMode == QgsGlobeVectorLayerConfig::RenderingModeModelSimple || layerConfig->renderingMode == QgsGlobeVectorLayerConfig::RenderingModeModelAdvanced ) )
{
// If was previously a draped layer, refresh the draped layer
if ( mTileSource->layers().contains( mapLayer ) )
{
QList<QgsMapLayer *> layers = mTileSource->layers();
layers.removeAll( mapLayer );
mTileSource->setLayers( layers );
QgsRectangle dirtyExtent = mLayerExtents[mapLayer->id()];
mLayerExtents.remove( mapLayer->id() );
refreshQGISMapLayer( dirtyExtent );
}
#if OSGEARTH_VERSION_GREATER_OR_EQUAL( 2, 9, 0 )
mMapNode->getMap()->removeLayer( mMapNode->getMap()->getLayerByName( mapLayer->id().toStdString() ) );
#else
mMapNode->getMap()->removeModelLayer( mMapNode->getMap()->getModelLayerByName( mapLayer->id().toStdString() ) );
#endif
addModelLayer( static_cast<QgsVectorLayer *>( mapLayer ), layerConfig );
}
else
{
// Re-insert into layer set if necessary
if ( !mTileSource->layers().contains( mapLayer ) )
{
QList<QgsMapLayer *> layers;
for ( const QString &layerId : mDockWidget->getSelectedLayerIds() )
{
#if OSGEARTH_VERSION_GREATER_OR_EQUAL( 2, 9, 0 )
if ( ! mMapNode->getMap()->getLayerByName( layerId.toStdString() ) )
#else
if ( ! mMapNode->getMap()->getModelLayerByName( layerId.toStdString() ) )
#endif
{
QgsMapLayer *layer = QgsProject::instance()->mapLayer( layerId );
if ( layer )
{
layers.append( layer );
}
}
}
mTileSource->setLayers( layers );
QgsRectangle extent = QgsCoordinateTransform( mapLayer->crs(), QgsCoordinateReferenceSystem( GEO_EPSG_CRS_AUTHID ), QgsProject::instance()->transformContext() ).transform( mapLayer->extent() );
mLayerExtents.insert( mapLayer->id(), extent );
}
// Remove any model layer of that layer, in case one existed
#if OSGEARTH_VERSION_GREATER_OR_EQUAL( 2, 9, 0 )
mMapNode->getMap()->removeLayer( mMapNode->getMap()->getLayerByName( mapLayer->id().toStdString() ) );
#else
mMapNode->getMap()->removeModelLayer( mMapNode->getMap()->getModelLayerByName( mapLayer->id().toStdString() ) );
#endif
QgsRectangle layerExtent = QgsCoordinateTransform( mapLayer->crs(), QgsCoordinateReferenceSystem( GEO_EPSG_CRS_AUTHID ), QgsProject::instance()->transformContext() ).transform( mapLayer->extent() );
QgsRectangle dirtyExtent = layerExtent;
if ( mLayerExtents.contains( mapLayer->id() ) )
{
if ( dirtyExtent.isNull() )
dirtyExtent = mLayerExtents[mapLayer->id()];
else if ( !mLayerExtents[mapLayer->id()].isNull() )
dirtyExtent.combineExtentWith( mLayerExtents[mapLayer->id()] );
}
mLayerExtents[mapLayer->id()] = layerExtent;
refreshQGISMapLayer( dirtyExtent );
}
}
}
void GlobePlugin::rebuildQGISLayer()
{
if ( mMapNode )
{
mMapNode->getMap()->removeImageLayer( mQgisMapLayer );
mLayerExtents.clear();
osgEarth::TileSourceOptions opts;
opts.L2CacheSize() = 0;
#if OSGEARTH_VERSION_LESS_THAN( 2, 9, 0 )
opts.tileSize() = 128;
#endif
mTileSource = new QgsGlobeTileSource( mQGisIface->mapCanvas(), opts );
#if OSGEARTH_VERSION_GREATER_OR_EQUAL( 2, 9, 0 )
mTileSource->open();
#endif
<|fim▁hole|> options.driver()->L2CacheSize() = 0;
#if OSGEARTH_VERSION_GREATER_OR_EQUAL( 2, 9, 0 )
options.tileSize() = 128;
#endif
options.cachePolicy() = osgEarth::CachePolicy::USAGE_NO_CACHE;
mQgisMapLayer = new osgEarth::ImageLayer( options, mTileSource );
mMapNode->getMap()->addImageLayer( mQgisMapLayer );
updateLayers();
}
}
void GlobePlugin::setGlobeEnabled( bool enabled )
{
if ( enabled )
{
run();
}
else if ( mDockWidget )
{
mDockWidget->close(); // triggers reset
}
}
void GlobePlugin::reset()
{
mStatsLabel = 0;
mActionToggleGlobe->blockSignals( true );
mActionToggleGlobe->setChecked( false );
mActionToggleGlobe->blockSignals( false );
mMapNode->getMap()->removeImageLayer( mQgisMapLayer ); // abort any rendering
mTileSource->waitForFinished();
mOsgViewer = 0;
mMapNode = 0;
mRootNode = 0;
mSkyNode = 0;
mBaseLayer = 0;
mBaseLayerUrl.clear();
mQgisMapLayer = 0;
mTileSource = 0;
mVerticalScale = 0;
mFrustumHighlightCallback = 0;
mFeatureQueryToolIdentifyCb = 0;
#if OSGEARTH_VERSION_LESS_THAN(2, 7, 0)
mFeatureQueryToolHighlightCb = 0;
#endif
mFeatureQueryTool = 0;
mViewerWidget = 0;
mDockWidget = 0;
mImagerySources.clear();
mElevationSources.clear();
mLayerExtents.clear();
#ifdef GLOBE_SHOW_TILE_STATS
disconnect( QgsGlobeTileStatistics::instance(), SIGNAL( changed( int, int ) ), this, SLOT( updateTileStats( int, int ) ) );
delete QgsGlobeTileStatistics::instance();
#endif
}
void GlobePlugin::unload()
{
if ( mDockWidget )
{
disconnect( mDockWidget, SIGNAL( destroyed( QObject * ) ), this, SLOT( reset() ) );
delete mDockWidget;
reset();
}
mQGisIface->removePluginMenu( tr( "&Globe" ), mActionToggleGlobe );
mQGisIface->removeToolBarIcon( mActionToggleGlobe );
mQGisIface->unregisterMapLayerConfigWidgetFactory( mLayerPropertiesFactory );
delete mLayerPropertiesFactory;
mLayerPropertiesFactory = 0;
delete mSettingsDialog;
mSettingsDialog = 0;
disconnect( this, SIGNAL( xyCoordinates( const QgsPointXY & ) ),
mQGisIface->mapCanvas(), SIGNAL( xyCoordinates( const QgsPointXY & ) ) );
}
void GlobePlugin::enableFrustumHighlight( bool status )
{
if ( status )
mMapNode->getTerrainEngine()->addUpdateCallback( mFrustumHighlightCallback );
else
mMapNode->getTerrainEngine()->removeUpdateCallback( mFrustumHighlightCallback );
}
void GlobePlugin::enableFeatureIdentification( bool status )
{
if ( status )
mOsgViewer->addEventHandler( mFeatureQueryTool );
else
mOsgViewer->removeEventHandler( mFeatureQueryTool );
}
bool NavigationControl::handle( const osgGA::GUIEventAdapter &ea, osgGA::GUIActionAdapter &aa, osgEarth::Util::Controls::ControlContext &cx )
{
if ( ea.getEventType() == osgGA::GUIEventAdapter::PUSH )
{
mMousePressed = true;
}
else if ( ea.getEventType() == osgGA::GUIEventAdapter::FRAME && mMousePressed )
{
float canvasY = cx._vp->height() - ( ea.getY() - cx._view->getCamera()->getViewport()->y() );
float canvasX = ea.getX() - cx._view->getCamera()->getViewport()->x();
if ( intersects( canvasX, canvasY ) )
{
for ( osgEarth::Util::Controls::ControlEventHandlerList::const_iterator i = _eventHandlers.begin(); i != _eventHandlers.end(); ++i )
{
NavigationControlHandler *handler = dynamic_cast<NavigationControlHandler *>( i->get() );
if ( handler )
{
handler->onMouseDown();
}
}
}
else
{
mMousePressed = false;
}
}
else if ( ea.getEventType() == osgGA::GUIEventAdapter::RELEASE )
{
for ( osgEarth::Util::Controls::ControlEventHandlerList::const_iterator i = _eventHandlers.begin(); i != _eventHandlers.end(); ++i )
{
NavigationControlHandler *handler = dynamic_cast<NavigationControlHandler *>( i->get() );
if ( handler )
{
handler->onClick( ea, aa );
}
}
mMousePressed = false;
}
return Control::handle( ea, aa, cx );
}
bool KeyboardControlHandler::handle( const osgGA::GUIEventAdapter &ea, osgGA::GUIActionAdapter &aa )
{
if ( ea.getEventType() == osgGA::GUIEventAdapter::KEYDOWN )
{
//move map
if ( ea.getKey() == '4' )
_manip->pan( -MOVE_OFFSET, 0 );
else if ( ea.getKey() == '6' )
_manip->pan( MOVE_OFFSET, 0 );
else if ( ea.getKey() == '2' )
_manip->pan( 0, MOVE_OFFSET );
else if ( ea.getKey() == '8' )
_manip->pan( 0, -MOVE_OFFSET );
//rotate
else if ( ea.getKey() == '/' )
_manip->rotate( MOVE_OFFSET, 0 );
else if ( ea.getKey() == '*' )
_manip->rotate( -MOVE_OFFSET, 0 );
//tilt
else if ( ea.getKey() == '9' )
_manip->rotate( 0, MOVE_OFFSET );
else if ( ea.getKey() == '3' )
_manip->rotate( 0, -MOVE_OFFSET );
//zoom
else if ( ea.getKey() == '-' )
_manip->zoom( 0, MOVE_OFFSET );
else if ( ea.getKey() == '+' )
_manip->zoom( 0, -MOVE_OFFSET );
//reset
else if ( ea.getKey() == '5' )
_manip->home( ea, aa );
}
return false;
}
/**
* Required extern functions needed for every plugin
* These functions can be called prior to creating an instance
* of the plugin class
*/
// Class factory to return a new instance of the plugin class
QGISEXTERN QgisPlugin *classFactory( QgisInterface *qgisInterfacePointer )
{
return new GlobePlugin( qgisInterfacePointer );
}
// Return the name of the plugin - note that we do not user class members as
// the class may not yet be insantiated when this method is called.
QGISEXTERN QString name()
{
return sName;
}
// Return the description
QGISEXTERN QString description()
{
return sDescription;
}
// Return the category
QGISEXTERN QString category()
{
return sCategory;
}
// Return the type (either UI or MapLayer plugin)
QGISEXTERN int type()
{
return sPluginType;
}
// Return the version number for the plugin
QGISEXTERN QString version()
{
return sPluginVersion;
}
// Return the icon
QGISEXTERN QString icon()
{
return sIcon;
}
// Return the experimental status for the plugin
QGISEXTERN QString experimental()
{
return sExperimental;
}
// Delete ourself
QGISEXTERN void unload( QgisPlugin *pluginPointer )
{
delete pluginPointer;
}<|fim▁end|> | osgEarth::ImageLayerOptions options( "QGIS" ); |
<|file_name|>rssfeeds.py<|end_file_name|><|fim▁begin|># coding=utf-8
import re
import urlparse
from feedparser.api import parse
from feedparser.util import FeedParserDict
from sickbeard import logger
from sickrage.helper.exceptions import ex
def getFeed(url, request_headers=None, handlers=None):
parsed = list(urlparse.urlparse(url))
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
try:
feed = parse(url, False, False, request_headers, handlers=handlers)
if feed:
if 'entries' in feed:
return feed
elif 'error' in feed.feed:
err_code = feed.feed['error']['code']
err_desc = feed.feed['error']['description']
logger.log(u'RSS ERROR:[%s] CODE:[%s]' % (err_desc, err_code), logger.DEBUG)<|fim▁hole|> logger.log(u'RSS error loading url: ' + url, logger.DEBUG)
except Exception as e:
logger.log(u'RSS error: ' + ex(e), logger.DEBUG)
return FeedParserDict()<|fim▁end|> | else: |
<|file_name|>copyrequest.cpp<|end_file_name|><|fim▁begin|>#include <iostream>
#include <sys/stat.h>
#include <libgen.h>
#include <string.h>
#include <stdlib.h>
#include <stdio.h>
#include "copyrequest.h"
#include "urlencode.h"
using namespace std;
CopyRequest::CopyRequest(WebdavServer& owner, bool deleteSource) :
Request(owner),
m_deleteSource(deleteSource)
{
}
int CopyRequest::handleRequest(struct MHD_Connection* connection, const char* url, const char* version, const char* uploadData, size_t* uploadDataSize)
{
const char* contentLength = MHD_lookup_connection_value(connection, MHD_HEADER_KIND, "content-length");
struct MHD_Response* response = MHD_create_response_from_data(0, 0, MHD_NO, MHD_NO);
m_owner.initRequest(response);
int ret;
if (contentLength && strcmp(contentLength, "0") != 0)
{
ret = MHD_queue_response(connection, 415, response);
MHD_destroy_response(response);
return ret;
}
if (m_deleteSource && !checkLock(connection, url, ret))
{
return ret;
}
const char* depth = MHD_lookup_connection_value(connection, MHD_HEADER_KIND, "depth");
if (!depth)
depth = "infinity";
int iDepth = strcasecmp(depth, "infinity") == 0 ? -1 : atoi(depth);
const char* _destination = MHD_lookup_connection_value(connection, MHD_HEADER_KIND, "destination");
const char* host = MHD_lookup_connection_value(connection, MHD_HEADER_KIND, "host");
// check for other destination
string self("http://");
self.append(host);
if (self.compare(0, self.length(), _destination, self.length()) != 0)
{
ret = MHD_queue_response(connection, 502, response);
MHD_destroy_response(response);
return ret;
}
string destination(_destination);
cout << "Moving file to: " << destination << endl;
destination = destination.substr(self.length());
const char* _overwrite = MHD_lookup_connection_value(connection, MHD_HEADER_KIND, "overwrite");
bool overwrite = _overwrite && strcmp(_overwrite, "T") == 0;
string source = m_owner.getRoot() + url;
if (!m_owner.checkPath(source))
{
ret = MHD_queue_response(connection, 404, response);
MHD_destroy_response(response);
return ret;
}
string destUrl = destination;
if (!checkLock(connection, destUrl.c_str(), ret))
{
return ret;
}
destination = m_owner.getRoot() + Util::UriDecode(destination);
bool newResource = !m_owner.checkPath(destination);
struct stat st;
bool destIsDir = stat(destination.c_str(), &st)==0 && S_ISDIR(st.st_mode);
if (!newResource && m_deleteSource && destIsDir && !overwrite)
{
ret = MHD_queue_response(connection, 412, response);
MHD_destroy_response(response);
return ret;
}
bool existingCol = false;
if (!newResource && m_deleteSource && destIsDir)
{
char* dup = strdup(url);
destination.append(basename(dup));
free(dup);
if (stat(destination.c_str(), &st) != 0)
{
newResource = true;
existingCol = true;
}
}
if (!newResource && !overwrite)
{
ret = MHD_queue_response(connection, 412, response);
MHD_destroy_response(response);
return ret;<|fim▁hole|> }
else if (!newResource && overwrite)
{
WebdavServer::recursiveDelete(destination);
}
char* dup = strdup(destination.c_str());
string destDirname = dirname(dup);
free(dup);
if (!m_owner.checkPath(destDirname))
{
ret = MHD_queue_response(connection, 409, response);
MHD_destroy_response(response);
return ret;
}
else if (m_deleteSource && !rename(source.c_str(), destination.c_str()))
{
m_owner.moveProperties(url, destUrl);
}
else if (!m_deleteSource)
{
// TODO: copy properties
WebdavServer::recursiveCopy(source, destination, iDepth);
}
ret = MHD_queue_response(connection, newResource && !existingCol ? 201 : 204, response);
MHD_destroy_response(response);
return ret;
}<|fim▁end|> | |
<|file_name|>BeansResource.java<|end_file_name|><|fim▁begin|>package com.sebastian_daschner.scalable_coffee_shop.beans.boundary;
import javax.inject.Inject;
import javax.json.Json;
import javax.json.JsonObject;
import javax.json.JsonObjectBuilder;
import javax.ws.rs.BadRequestException;
import javax.ws.rs.GET;<|fim▁hole|>import javax.ws.rs.Path;
@Path("beans")
public class BeansResource {
@Inject
BeanCommandService commandService;
@Inject
BeanQueryService queryService;
@GET
public JsonObject getBeans() {
final JsonObjectBuilder builder = Json.createObjectBuilder();
queryService.getStoredBeans()
.entrySet().forEach(e -> builder.add(e.getKey(), e.getValue()));
return builder.build();
}
@POST
public void storeBeans(JsonObject object) {
final String beanOrigin = object.getString("beanOrigin", null);
final int amount = object.getInt("amount", 0);
if (beanOrigin == null || amount == 0)
throw new BadRequestException();
commandService.storeBeans(beanOrigin, amount);
}
}<|fim▁end|> | import javax.ws.rs.POST; |
<|file_name|>check-todos.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
#
# Copyright © 2019 Endless Mobile, Inc.
#
# SPDX-License-Identifier: LGPL-2.1-or-later
#
# Original author: Philip Withnall
"""
Checks that a merge request doesn’t add any instances of the string ‘todo’
(in uppercase), or similar keywords. It may remove instances of that keyword,
or move them around, according to the logic of `git log -S`.
"""
import argparse
import re
import subprocess
import sys
# We have to specify these keywords obscurely to avoid the script matching
# itself. The keyword ‘fixme’ (in upper case) is explicitly allowed because
# that’s conventionally used as a way of marking a workaround which needs to
# be merged for now, but is to be grepped for and reverted or reworked later.
BANNED_KEYWORDS = [
'TO' + 'DO',
'X' + 'XX',
'W' + 'IP',
]
def main():
parser = argparse.ArgumentParser(
description='Check a range of commits to ensure they don’t contain '
'banned keywords.')
parser.add_argument('commits',
help='SHA to diff from, or range of commits to diff')
args = parser.parse_args()
banned_words_seen = set()
seen_in_log = False
seen_in_diff = False
# Check the log messages for banned words.
log_process = subprocess.run(<|fim▁hole|> log_lines = log_process.stdout.strip().split('\n')
for line in log_lines:
for keyword in BANNED_KEYWORDS:
if re.search('(^|\W+){}(\W+|$)'.format(keyword), line):
banned_words_seen.add(keyword)
seen_in_log = True
# Check the diff for banned words.
diff_process = subprocess.run(
['git', 'diff', '-U0', '--no-color', args.commits],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf-8',
check=True)
diff_lines = diff_process.stdout.strip().split('\n')
for line in diff_lines:
if not line.startswith('+ '):
continue
for keyword in BANNED_KEYWORDS:
if re.search('(^|\W+){}(\W+|$)'.format(keyword), line):
banned_words_seen.add(keyword)
seen_in_diff = True
if banned_words_seen:
if seen_in_log and seen_in_diff:
where = 'commit message and diff'
elif seen_in_log:
where = 'commit message'
elif seen_in_diff:
where = 'commit diff'
print('Saw banned keywords in a {}: {}. '
'This indicates the branch is a work in progress and should not '
'be merged in its current '
'form.'.format(where, ', '.join(banned_words_seen)))
sys.exit(1)
if __name__ == '__main__':
main()<|fim▁end|> | ['git', 'log', '--no-color', args.commits + '..HEAD'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf-8',
check=True) |
<|file_name|>adapt-contrib-slider.js<|end_file_name|><|fim▁begin|>define(function(require) {
var QuestionView = require('coreViews/questionView');
var Adapt = require('coreJS/adapt');
var Slider = QuestionView.extend({
events: {
'click .slider-sliderange': 'onSliderSelected',
'click .slider-handle': 'preventEvent',
'click .slider-scale-number': 'onNumberSelected',
'touchstart .slider-handle':'onHandlePressed',
'mousedown .slider-handle': 'onHandlePressed',
'focus .slider-handle':'onHandleFocus',
'blur .slider-handle':'onHandleBlur'
},
// Used by the question to reset the question when revisiting the component
resetQuestionOnRevisit: function() {
this.setAllItemsEnabled(true);
this.deselectAllItems();
this.resetQuestion();
},
// Used by question to setup itself just before rendering
setupQuestion: function() {
if(!this.model.get('_items')) {
this.setupModelItems();
}
this.model.set({
_selectedItem: {}
});
this.restoreUserAnswers();
if (this.model.get('_isSubmitted')) return;
this.selectItem(0);
},
setupModelItems: function() {
var items = [];
var answer = this.model.get('_correctAnswer');
var range = this.model.get('_correctRange');
var start = this.model.get('_scaleStart');
var end = this.model.get('_scaleEnd');
for (var i = start; i <= end; i++) {
if (answer) {
items.push({value: i, selected: false, correct: (i == answer)});
} else {
items.push({value: i, selected: false, correct: (i >= range._bottom && i <= range._top)});
}
}
this.model.set('_items', items);
},
restoreUserAnswers: function() {
if (!this.model.get('_isSubmitted')) return;
var items = this.model.get('_items');
var userAnswer = this.model.get('_userAnswer');
for (var i = 0, l = items.length; i < l; i++) {
var item = items[i];
if (item.value == userAnswer) {
this.model.set('_selectedItem', item);
this.selectItem(this.getIndexFromValue(item.value));
break;
}
}
this.setQuestionAsSubmitted();
this.markQuestion();
this.setScore();
this.showMarking();
this.setupFeedback();
},
// Used by question to disable the question during submit and complete stages
disableQuestion: function() {
this.setAllItemsEnabled(false);
},
// Used by question to enable the question during interactions
enableQuestion: function() {
this.setAllItemsEnabled(true);
},
setAllItemsEnabled: function(isEnabled) {
if (isEnabled) {
this.$('.slider-widget').removeClass('disabled');
} else {
this.$('.slider-widget').addClass('disabled');
}
},
// Used by question to setup itself just after rendering
onQuestionRendered: function() {
this.setScalePositions();
this.onScreenSizeChanged();
this.showScaleMarker(true);
this.listenTo(Adapt, 'device:resize', this.onScreenSizeChanged);
this.setAltText(this.model.get('_scaleStart'));
this.setReadyStatus();
},
// this should make the slider handle, slider marker and slider bar to animate to give position
animateToPosition: function(newPosition) {
this.$('.slider-handle').stop(true).animate({
left: newPosition + 'px'
},200);
this.$('.slider-bar').stop(true).animate({width:newPosition + 'px'});
this.$('.slider-scale-marker').stop(true).animate({
left: newPosition + 'px'
},200);
this.$('.slider-bar').stop(true).animate({width:newPosition + 'px'});
},
// this shoud give the index of item using given slider value
getIndexFromValue: function(itemValue) {
var scaleStart = this.model.get('_scaleStart'),
scaleEnd = this.model.get('_scaleEnd');
return Math.floor(this.mapValue(itemValue, scaleStart, scaleEnd, 0, this.model.get('_items').length - 1));
},
// this should set given value to slider handle
setAltText: function(value) {
this.$('.slider-handle').attr('aria-valuenow', value);
},
mapIndexToPixels: function(value, $widthObject) {
var numberOfItems = this.model.get('_items').length,
width = $widthObject ? $widthObject.width() : this.$('.slider-sliderange').width();
return Math.round(this.mapValue(value, 0, numberOfItems - 1, 0, width));
},
mapPixelsToIndex: function(value) {
var numberOfItems = this.model.get('_items').length,
width = this.$('.slider-sliderange').width();
return Math.round(this.mapValue(value, 0, width, 0, numberOfItems - 1));
},
normalise: function(value, low, high) {
var range = high - low;
return (value - low) / range;
},
mapValue: function(value, inputLow, inputHigh, outputLow, outputHigh) {
var normal = this.normalise(value, inputLow, inputHigh);
return normal * (outputHigh - outputLow) + outputLow;
},
onDragReleased: function (event) {
event.preventDefault();
if (Modernizr.touch) {
this.$('.slider-handle').off('touchmove');
} else {
$(document).off('mousemove.adapt-contrib-slider');
}
var itemValue = this.model.get('_selectedItem').value;
var itemIndex = this.getIndexFromValue(itemValue);
this.animateToPosition(this.mapIndexToPixels(itemIndex));
this.setAltText(itemValue);
},
onHandleDragged: function (event) {
event.preventDefault();
var left = (event.pageX || event.originalEvent.touches[0].pageX) - event.data.offsetLeft;
left = Math.max(Math.min(left, event.data.width), 0);
this.$('.slider-handle').css({
left: left + 'px'
});
this.$('.slider-scale-marker').css({
left: left + 'px'
});
this.selectItem(this.mapPixelsToIndex(left));<|fim▁hole|> event.preventDefault();
this.$('.slider-handle').on('keydown', _.bind(this.onKeyDown, this));
},
onHandleBlur: function(event) {
event.preventDefault();
this.$('.slider-handle').off('keydown');
},
onHandlePressed: function (event) {
event.preventDefault();
if (!this.model.get('_isEnabled') || this.model.get('_isSubmitted')) return;
this.showScaleMarker(true);
var eventData = {
width:this.$('.slider-sliderange').width(),
offsetLeft: this.$('.slider-sliderange').offset().left
};
if(Modernizr.touch) {
this.$('.slider-handle').on('touchmove', eventData, _.bind(this.onHandleDragged, this));
this.$('.slider-handle').one('touchend', eventData, _.bind(this.onDragReleased, this));
} else {
$(document).on('mousemove.adapt-contrib-slider', eventData, _.bind(this.onHandleDragged, this));
$(document).one('mouseup', eventData, _.bind(this.onDragReleased, this));
}
},
onKeyDown: function(event) {
if(event.which == 9) return; // tab key
event.preventDefault();
var newItemIndex = this.getIndexFromValue(this.model.get('_selectedItem').value);
switch (event.which) {
case 40: // ↓ down
case 37: // ← left
newItemIndex = Math.max(newItemIndex - 1, 0);
break;
case 38: // ↑ up
case 39: // → right
newItemIndex = Math.min(newItemIndex + 1, this.model.get('_items').length - 1);
break;
}
this.selectItem(newItemIndex);
if(typeof newItemIndex == 'number') this.showScaleMarker(true);
this.animateToPosition(this.mapIndexToPixels(newItemIndex));
this.setAltText(this.getValueFromIndex(newItemIndex));
},
onSliderSelected: function (event) {
event.preventDefault();
if (!this.model.get('_isEnabled') || this.model.get('_isSubmitted')) {
return;
}
this.showScaleMarker(true);
var offsetLeft = this.$('.slider-sliderange').offset().left;
var width = this.$('.slider-sliderange').width();
var left = (event.pageX || event.originalEvent.touches[0].pageX) - offsetLeft;
left = Math.max(Math.min(left, width), 0);
var itemIndex = this.mapPixelsToIndex(left);
this.selectItem(itemIndex);
this.animateToPosition(this.mapIndexToPixels(itemIndex));
this.setAltText(this.getValueFromIndex(itemIndex));
},
onNumberSelected: function(event) {
event.preventDefault();
if (this.model.get('_isComplete')) {
return;
}
var itemValue = parseInt($(event.currentTarget).attr('data-id'));
var index = this.getIndexFromValue(itemValue);
var $scaler = this.$('.slider-scaler');
this.selectItem(index);
this.animateToPosition(this.mapIndexToPixels(index, $scaler));
this.setAltText(itemValue);
},
getValueFromIndex: function(index) {
return this.model.get('_items')[index].value;
},
preventEvent: function(event) {
event.preventDefault();
},
resetControlStyles: function() {
this.$('.slider-handle').empty();
this.showScaleMarker(false);
this.$('.slider-bar').animate({width:'0px'});
},
/**
* allow the user to submit immediately; the slider handle may already be in the position they want to choose
*/
canSubmit: function() {
return true;
},
// Blank method for question to fill out when the question cannot be submitted
onCannotSubmit: function() {},
//This preserves the state of the users answers for returning or showing the users answer
storeUserAnswer: function() {
this.model.set('_userAnswer', this.model.get('_selectedItem').value);
},
isCorrect: function() {
var numberOfCorrectAnswers = 0;
_.each(this.model.get('_items'), function(item, index) {
if(item.selected && item.correct) {
this.model.set('_isAtLeastOneCorrectSelection', true);
numberOfCorrectAnswers++;
}
}, this);
this.model.set('_numberOfCorrectAnswers', numberOfCorrectAnswers);
return this.model.get('_isAtLeastOneCorrectSelection') ? true : false;
},
// Used to set the score based upon the _questionWeight
setScore: function() {
var numberOfCorrectAnswers = this.model.get('_numberOfCorrectAnswers');
var questionWeight = this.model.get('_questionWeight');
var score = questionWeight * numberOfCorrectAnswers;
this.model.set('_score', score);
},
// This is important and should give the user feedback on how they answered the question
// Normally done through ticks and crosses by adding classes
showMarking: function() {
this.$('.slider-item').removeClass('correct incorrect')
.addClass(this.model.get('_selectedItem').correct ? 'correct' : 'incorrect');
},
isPartlyCorrect: function() {
return this.model.get('_isAtLeastOneCorrectSelection');
},
// Used by the question view to reset the stored user answer
resetUserAnswer: function() {
this.model.set({
_selectedItem: {},
_userAnswer: undefined
});
},
// Used by the question view to reset the look and feel of the component.
// This could also include resetting item data
resetQuestion: function() {
this.selectItem(0);
this.animateToPosition(0);
this.resetControlStyles();
this.showScaleMarker(true);
this.setAltText(this.model.get('_scaleStart'));
},
setScalePositions: function() {
var numberOfItems = this.model.get('_items').length;
_.each(this.model.get('_items'), function(item, index) {
var normalisedPosition = this.normalise(index, 0, numberOfItems -1);
this.$('.slider-scale-number').eq(index).data('normalisedPosition', normalisedPosition);
}, this);
},
showScale: function () {
this.$('.slider-markers').empty();
if (this.model.get('_showScale') === false) {
this.$('.slider-markers').eq(0).css({display: 'none'});
this.model.get('_showScaleIndicator')
? this.$('.slider-scale-numbers').eq(0).css({visibility: 'hidden'})
: this.$('.slider-scale-numbers').eq(0).css({display: 'none'});
} else {
var $scaler = this.$('.slider-scaler');
var $markers = this.$('.slider-markers');
for (var i = 0, count = this.model.get('_items').length; i < count; i++) {
$markers.append("<div class='slider-line component-item-color'>");
$('.slider-line', $markers).eq(i).css({left: this.mapIndexToPixels(i, $scaler) + 'px'});
}
var scaleWidth = $scaler.width(),
$numbers = this.$('.slider-scale-number');
for (var i = 0, count = this.model.get('_items').length; i < count; i++) {
var $number = $numbers.eq(i),
newLeft = Math.round($number.data('normalisedPosition') * scaleWidth);
$number.css({left: newLeft});
}
}
},
//Labels are enabled in slider.hbs. Here we manage their containing div.
showLabels: function () {
if(!this.model.get('labelStart') && !this.model.get('labelEnd')) {
this.$('.slider-scale-labels').eq(0).css({display: 'none'});
}
},
remapSliderBar: function() {
var $scaler = this.$('.slider-scaler');
var currentIndex = this.getIndexFromValue(this.model.get('_selectedItem').value);
this.$('.slider-handle').css({left: this.mapIndexToPixels(currentIndex, $scaler) + 'px'});
this.$('.slider-scale-marker').css({left: this.mapIndexToPixels(currentIndex, $scaler) + 'px'});
this.$('.slider-bar').width(this.mapIndexToPixels(currentIndex, $scaler));
},
onScreenSizeChanged: function() {
this.showScale();
this.showLabels();
this.remapSliderBar();
if (this.$('.slider-widget.user .button.model').css('display') === 'inline-block') {
this.hideCorrectAnswer();
} else if (this.$('.slider-widget.model .button.user ').css('display') === 'inline-block') {
this.showCorrectAnswer();
}
},
showCorrectAnswer: function() {
var answers = [];
var bottom = this.model.get('_correctRange')._bottom;
var top = this.model.get('_correctRange')._top;
var range = top - bottom;
var correctAnswer = this.model.get('_correctAnswer');
this.showScaleMarker(false);
if (correctAnswer) {
// Check that correctAnswer is neither undefined nor empty
answers.push(correctAnswer);
} else if (bottom !== undefined) {
for (var i = 0; i <= range; i++) {
answers.push(this.model.get('_items')[this.getIndexFromValue(bottom) + i].value);
}
} else {
console.log(this.constructor + "::WARNING: no correct answer or correct range set in JSON")
}
var middleAnswer = answers[Math.floor(answers.length / 2)];
this.animateToPosition(this.mapIndexToPixels(this.getIndexFromValue(middleAnswer)));
this.showModelAnswers(answers);
},
showModelAnswers: function(correctAnswerArray) {
var $parentDiv = this.$('.slider-modelranges');
_.each(correctAnswerArray, function(correctAnswer, index) {
$parentDiv.append($("<div class='slider-model-answer component-item-color component-item-text-color'>"));
var $element = $(this.$('.slider-modelranges .slider-model-answer')[index]),
startingLeft = this.mapIndexToPixels(this.getIndexFromValue(this.model.get('_selectedItem').value));
if(this.model.get('_showNumber')) $element.html(correctAnswer);
$element.css({left:startingLeft}).fadeIn(0, _.bind(function() {
$element.animate({left: this.mapIndexToPixels(this.getIndexFromValue(correctAnswer))});
}, this));
}, this);
},
// Used by the question to display the users answer and
// hide the correct answer
// Should use the values stored in storeUserAnswer
hideCorrectAnswer: function() {
var userAnswerIndex = this.getIndexFromValue(this.model.get('_userAnswer'));
this.$('.slider-modelranges').empty();
this.showScaleMarker(true);
this.selectItem(userAnswerIndex);
this.animateToPosition(this.mapIndexToPixels(userAnswerIndex));
},
// according to given item index this should make the item as selected
selectItem: function(itemIndex) {
this.$el.a11y_selected(false);
_.each(this.model.get('_items'), function(item, index) {
item.selected = (index == itemIndex);
if(item.selected) {
this.model.set('_selectedItem', item);
this.$('.slider-scale-number[data-id="'+(itemIndex+1)+'"]').a11y_selected(true);
}
}, this);
this.showNumber(true);
},
// this should reset the selected state of each item
deselectAllItems: function() {
_.each(this.model.get('_items'), function(item) {
item.selected = false;
}, this);
},
// this makes the marker visible or hidden
showScaleMarker: function(show) {
var $scaleMarker = this.$('.slider-scale-marker');
if (this.model.get('_showScaleIndicator')) {
this.showNumber(show);
if(show) {
$scaleMarker.addClass('display-block');
} else {
$scaleMarker.removeClass('display-block');
}
}
},
// this should add the current slider value to the marker
showNumber: function(show) {
var $scaleMarker = this.$('.slider-scale-marker');
if(this.model.get('_showNumber')) {
if(show) {
$scaleMarker.html(this.model.get('_selectedItem').value);
} else {
$scaleMarker.html = "";
}
}
},
/**
* Used by adapt-contrib-spoor to get the user's answers in the format required by the cmi.interactions.n.student_response data field
*/
getResponse:function() {
return this.model.get('_userAnswer').toString();
},
/**
* Used by adapt-contrib-spoor to get the type of this question in the format required by the cmi.interactions.n.type data field
*/
getResponseType:function() {
return "numeric";
}
});
Adapt.register('slider', Slider);
return Slider;
});<|fim▁end|> | },
onHandleFocus: function(event) { |
<|file_name|>StringUtilImpl.java<|end_file_name|><|fim▁begin|>package com.avsystem.scex.util.function;
import org.apache.commons.codec.digest.HmacAlgorithms;
import org.apache.commons.codec.digest.HmacUtils;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang3.StringUtils;
import java.util.Collection;
public class StringUtilImpl implements StringUtil {
public static final StringUtilImpl INSTANCE = new StringUtilImpl();
private StringUtilImpl() {
}
@Override
public String concat(String... parts) {
return StringFunctions.concat(parts);
}
@Override
public boolean contains(String list, String item) {
return StringFunctions.contains(list, item);
}
@Override
public double extract(String string) {
return StringFunctions.extract(string);
}
@Override
public String regexFind(String value, String pattern) {
return StringFunctions.regexFind(value, pattern);
}
@Override
public String regexFindGroup(String value, String pattern) {
return StringFunctions.regexFindGroup(value, pattern);
}
@Override
public boolean regexMatches(String value, String pattern) {
return StringFunctions.regexMatches(value, pattern);
}
@Override
public String regexReplace(String value, String pattern, String replacement) {
return StringFunctions.regexReplace(value, pattern, replacement);
}
@Override
public String regexReplaceAll(String value, String pattern, String replacement) {
return StringFunctions.regexReplaceAll(value, pattern, replacement);
}
@Override
public String slice(String item, int from) {
return StringFunctions.slice(item, from);
}
@Override
public String slice(String item, int from, int to, boolean dot) {
return StringFunctions.slice(item, from, to, dot);
}
@Override
public String stripToAlphanumeric(String source, String replacement) {
return StringFunctions.stripToAlphanumeric(source, replacement);
}
/**
* Remove end for string if exist
*
* @param source source string
* @param end string to remove from the end of source
* @return string with removed end
*/
@Override
public String removeEnd(String source, String end) {
if (source != null && end != null) {
return source.endsWith(end) ? source.substring(0, source.length() - end.length()) : source;
}
return null;
}
/**
* Remove start from string if exist
*
* @param source source string
* @param start string to remove from the start of source
* @return string with removed end
*/
@Override
public String removeStart(String source, String start) {
if (source != null && start != null) {
return source.startsWith(start) ? source.substring(start.length(), source.length()) : source;
}
return null;
}
@Override
public String removeTRRoot(String source) {
if (source != null) {
if (source.startsWith("InternetGatewayDevice.")) {
source = source.replaceFirst("InternetGatewayDevice.", "");
} else if (source.startsWith("Device.")) {
source = source.replaceFirst("Device.", "");
}
}
return source;
}
@Override
public String random(int length) {
return RandomStringUtils.randomAlphanumeric(length);
}
/**
* Left pad a String with a specified String.
*
* @param str the String to pad out, may be null
* @param size the size to pad to
* @param padStr the String to pad with, null or empty treated as single space
* @return right padded String
*/
@Override
public String leftPad(String str, int size, String padStr) {
return StringFunctions.leftPad(str, size, padStr);
}
/**
* Right pad a String with a specified String.
*
* @param str the String to pad out, may be null
* @param size the size to pad to
* @param padStr the String to pad with, null or empty treated as single space
* @return left padded String
*/
@Override
public String rightPad(String str, int size, String padStr) {
return StringFunctions.rightPad(str, size, padStr);
}
@Override
public String subString(String str, int from, int to) {
return StringUtils.substring(str, from, to);
}
@Override
public String[] split(String str, String separator) {
return StringUtils.split(str, separator);
}
@Override
public String trimToEmpty(String str) {
return StringUtils.trimToEmpty(str);
}
@Override
public String replace(String str, String find, String replacement) {
return StringUtils.replace(str, find, replacement);
}
@Override
public String join(Collection<String> list, String separator) {
return StringUtils.join(list, separator);
}
@Override
public boolean stringContains(String source, String item) {
return StringUtils.contains(source, item);
}
@Override
public String hmacMD5(String str, String key) {
return new HmacUtils(HmacAlgorithms.HMAC_MD5, key).hmacHex(str);<|fim▁hole|>}<|fim▁end|> | } |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright (C) 2013-2020 Blockstack PBC, a public benefit corporation
// Copyright (C) 2020 Stacks Open Internet Foundation
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
use std::convert::From;
use std::convert::TryFrom;
use std::error;
use std::fmt;
use std::io;
use std::io::prelude::*;
use std::io::{Read, Write};
use std::ops::Deref;
use std::ops::DerefMut;
use rusqlite::Error as RusqliteError;
use sha2::{Digest, Sha512Trunc256};
use crate::codec::MAX_MESSAGE_LEN;
use address::AddressHashMode;
use burnchains::Txid;
use chainstate::burn::operations::LeaderBlockCommitOp;
use chainstate::burn::ConsensusHash;
use chainstate::stacks::db::accounts::MinerReward;
use chainstate::stacks::db::blocks::MemPoolRejection;
use chainstate::stacks::db::StacksHeaderInfo;
use chainstate::stacks::index::Error as marf_error;
use clarity_vm::clarity::Error as clarity_error;
use net::Error as net_error;
use util::db::DBConn;
use util::db::Error as db_error;
use util::hash::Hash160;
use util::hash::Sha512Trunc256Sum;
use util::hash::HASH160_ENCODED_SIZE;
use util::secp256k1;
use util::secp256k1::MessageSignature;
use util::strings::StacksString;
use util::vrf::VRFProof;
use vm::contexts::GlobalContext;
use vm::costs::CostErrors;
use vm::costs::ExecutionCost;
use vm::errors::Error as clarity_interpreter_error;
use vm::representations::{ClarityName, ContractName};
use vm::types::{PrincipalData, QualifiedContractIdentifier, StandardPrincipalData, Value};
use crate::codec::{read_next, write_next, Error as codec_error, StacksMessageCodec};
use crate::types::chainstate::{
BlockHeaderHash, BurnchainHeaderHash, StacksAddress, StacksWorkScore,
};
use crate::types::chainstate::{StacksBlockHeader, StacksBlockId, StacksMicroblockHeader};
use crate::types::proof::{TrieHash, TRIEHASH_ENCODED_SIZE};
pub mod address;
pub mod auth;
pub mod block;
pub mod boot;
pub mod db;
pub mod events;
pub mod index;
pub mod miner;
pub mod transaction;
pub type StacksPublicKey = secp256k1::Secp256k1PublicKey;
pub type StacksPrivateKey = secp256k1::Secp256k1PrivateKey;
impl_byte_array_message_codec!(TrieHash, TRIEHASH_ENCODED_SIZE as u32);
impl_byte_array_message_codec!(Sha512Trunc256Sum, 32);
pub const C32_ADDRESS_VERSION_MAINNET_SINGLESIG: u8 = 22; // P
pub const C32_ADDRESS_VERSION_MAINNET_MULTISIG: u8 = 20; // M
pub const C32_ADDRESS_VERSION_TESTNET_SINGLESIG: u8 = 26; // T
pub const C32_ADDRESS_VERSION_TESTNET_MULTISIG: u8 = 21; // N
pub const STACKS_BLOCK_VERSION: u8 = 0;
pub const STACKS_MICROBLOCK_VERSION: u8 = 0;
pub const MAX_BLOCK_LEN: u32 = 2 * 1024 * 1024;
pub const MAX_TRANSACTION_LEN: u32 = MAX_BLOCK_LEN;
impl StacksBlockId {
pub fn new(
sortition_consensus_hash: &ConsensusHash,
block_hash: &BlockHeaderHash,
) -> StacksBlockId {
let mut hasher = Sha512Trunc256::new();
hasher.input(block_hash);
hasher.input(sortition_consensus_hash);
let h = Sha512Trunc256Sum::from_hasher(hasher);
StacksBlockId(h.0)
}
}
impl From<StacksAddress> for StandardPrincipalData {
fn from(addr: StacksAddress) -> StandardPrincipalData {
StandardPrincipalData(addr.version, addr.bytes.0)
}
}
impl From<StacksAddress> for PrincipalData {
fn from(addr: StacksAddress) -> PrincipalData {
PrincipalData::from(StandardPrincipalData::from(addr))
}
}
impl AddressHashMode {
pub fn to_version_mainnet(&self) -> u8 {
match *self {
AddressHashMode::SerializeP2PKH => C32_ADDRESS_VERSION_MAINNET_SINGLESIG,
_ => C32_ADDRESS_VERSION_MAINNET_MULTISIG,
}
}
pub fn to_version_testnet(&self) -> u8 {
match *self {
AddressHashMode::SerializeP2PKH => C32_ADDRESS_VERSION_TESTNET_SINGLESIG,
_ => C32_ADDRESS_VERSION_TESTNET_MULTISIG,
}
}
pub fn from_version(version: u8) -> AddressHashMode {
match version {
C32_ADDRESS_VERSION_TESTNET_SINGLESIG | C32_ADDRESS_VERSION_MAINNET_SINGLESIG => {
AddressHashMode::SerializeP2PKH
}
_ => AddressHashMode::SerializeP2SH,
}
}
}
#[derive(Debug)]
pub enum Error {
InvalidFee,
InvalidStacksBlock(String),
InvalidStacksMicroblock(String, BlockHeaderHash),
InvalidStacksTransaction(String, bool),
/// This error indicates that the considered transaction was skipped
/// because of the current state of the block assembly algorithm,
/// but the transaction otherwise may be valid (e.g., block assembly is
/// only considering STX transfers and this tx isn't a transfer).
StacksTransactionSkipped,
PostConditionFailed(String),
NoSuchBlockError,
InvalidChainstateDB,
BlockTooBigError,
TransactionTooBigError,
BlockCostExceeded,
NoTransactionsToMine,
MicroblockStreamTooLongError,
IncompatibleSpendingConditionError,
CostOverflowError(ExecutionCost, ExecutionCost, ExecutionCost),
ClarityError(clarity_error),
DBError(db_error),
NetError(net_error),
CodecError(codec_error),
MARFError(marf_error),
ReadError(io::Error),
WriteError(io::Error),
MemPoolError(String),
PoxAlreadyLocked,
PoxInsufficientBalance,
PoxNoRewardCycle,
}
impl From<marf_error> for Error {
fn from(e: marf_error) -> Error {
Error::MARFError(e)
}
}
impl From<clarity_error> for Error {
fn from(e: clarity_error) -> Error {
Error::ClarityError(e)
}
}
impl From<net_error> for Error {
fn from(e: net_error) -> Error {
Error::NetError(e)
}
}
impl From<codec_error> for Error {
fn from(e: codec_error) -> Error {
Error::CodecError(e)
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Error::InvalidFee => write!(f, "Invalid fee"),
Error::InvalidStacksBlock(ref s) => fmt::Display::fmt(s, f),
Error::InvalidStacksMicroblock(ref s, _) => fmt::Display::fmt(s, f),
Error::InvalidStacksTransaction(ref s, _) => fmt::Display::fmt(s, f),
Error::PostConditionFailed(ref s) => fmt::Display::fmt(s, f),
Error::NoSuchBlockError => write!(f, "No such Stacks block"),
Error::InvalidChainstateDB => write!(f, "Invalid chainstate database"),
Error::BlockTooBigError => write!(f, "Too much data in block"),
Error::TransactionTooBigError => write!(f, "Too much data in transaction"),
Error::BlockCostExceeded => write!(f, "Block execution budget exceeded"),
Error::MicroblockStreamTooLongError => write!(f, "Too many microblocks in stream"),
Error::IncompatibleSpendingConditionError => {
write!(f, "Spending condition is incompatible with this operation")
}
Error::CostOverflowError(ref c1, ref c2, ref c3) => write!(
f,
"{}",
&format!(
"Cost overflow: before={:?}, after={:?}, budget={:?}",
c1, c2, c3
)
),
Error::ClarityError(ref e) => fmt::Display::fmt(e, f),
Error::DBError(ref e) => fmt::Display::fmt(e, f),
Error::NetError(ref e) => fmt::Display::fmt(e, f),
Error::CodecError(ref e) => fmt::Display::fmt(e, f),
Error::MARFError(ref e) => fmt::Display::fmt(e, f),
Error::ReadError(ref e) => fmt::Display::fmt(e, f),
Error::WriteError(ref e) => fmt::Display::fmt(e, f),
Error::MemPoolError(ref s) => fmt::Display::fmt(s, f),
Error::NoTransactionsToMine => write!(f, "No transactions to mine"),
Error::PoxAlreadyLocked => write!(f, "Account has already locked STX for PoX"),
Error::PoxInsufficientBalance => write!(f, "Not enough STX to lock"),
Error::PoxNoRewardCycle => write!(f, "No such reward cycle"),
Error::StacksTransactionSkipped => {
write!(f, "Stacks transaction skipped during assembly")
}
}
}
}
impl error::Error for Error {
fn cause(&self) -> Option<&dyn error::Error> {
match *self {
Error::InvalidFee => None,
Error::InvalidStacksBlock(ref _s) => None,
Error::InvalidStacksMicroblock(ref _s, ref _h) => None,
Error::InvalidStacksTransaction(ref _s, _q) => None,
Error::PostConditionFailed(ref _s) => None,
Error::NoSuchBlockError => None,
Error::InvalidChainstateDB => None,
Error::BlockTooBigError => None,
Error::TransactionTooBigError => None,
Error::BlockCostExceeded => None,
Error::MicroblockStreamTooLongError => None,
Error::IncompatibleSpendingConditionError => None,
Error::CostOverflowError(..) => None,
Error::ClarityError(ref e) => Some(e),
Error::DBError(ref e) => Some(e),
Error::NetError(ref e) => Some(e),
Error::CodecError(ref e) => Some(e),
Error::MARFError(ref e) => Some(e),
Error::ReadError(ref e) => Some(e),
Error::WriteError(ref e) => Some(e),
Error::MemPoolError(ref _s) => None,
Error::NoTransactionsToMine => None,
Error::PoxAlreadyLocked => None,
Error::PoxInsufficientBalance => None,
Error::PoxNoRewardCycle => None,
Error::StacksTransactionSkipped => None,
}
}
}
impl Error {
fn name(&self) -> &'static str {
match self {
Error::InvalidFee => "InvalidFee",
Error::InvalidStacksBlock(ref _s) => "InvalidStacksBlock",
Error::InvalidStacksMicroblock(ref _s, ref _h) => "InvalidStacksMicroblock",
Error::InvalidStacksTransaction(ref _s, _q) => "InvalidStacksTransaction",
Error::PostConditionFailed(ref _s) => "PostConditionFailed",
Error::NoSuchBlockError => "NoSuchBlockError",
Error::InvalidChainstateDB => "InvalidChainstateDB",
Error::BlockTooBigError => "BlockTooBigError",
Error::TransactionTooBigError => "TransactionTooBigError",
Error::BlockCostExceeded => "BlockCostExceeded",
Error::MicroblockStreamTooLongError => "MicroblockStreamTooLongError",
Error::IncompatibleSpendingConditionError => "IncompatibleSpendingConditionError",
Error::CostOverflowError(..) => "CostOverflowError",
Error::ClarityError(ref _e) => "ClarityError",
Error::DBError(ref _e) => "DBError",
Error::NetError(ref _e) => "NetError",
Error::CodecError(ref _e) => "CodecError",
Error::MARFError(ref _e) => "MARFError",
Error::ReadError(ref _e) => "ReadError",
Error::WriteError(ref _e) => "WriteError",
Error::MemPoolError(ref _s) => "MemPoolError",
Error::NoTransactionsToMine => "NoTransactionsToMine",
Error::PoxAlreadyLocked => "PoxAlreadyLocked",
Error::PoxInsufficientBalance => "PoxInsufficientBalance",
Error::PoxNoRewardCycle => "PoxNoRewardCycle",
Error::StacksTransactionSkipped => "StacksTransactionSkipped",
}
}
pub fn into_json(&self) -> serde_json::Value {
let reason_code = self.name();
let reason_data = format!("{:?}", &self);
let result = json!({
"error": "chainstate error",
"reason": reason_code,
"reason_data": reason_data
});
result
}
}
impl From<RusqliteError> for Error {
fn from(e: RusqliteError) -> Error {
Error::DBError(db_error::SqliteError(e))
}
}
impl From<db_error> for Error {
fn from(e: db_error) -> Error {
Error::DBError(e)
}
}
impl From<clarity_interpreter_error> for Error {
fn from(e: clarity_interpreter_error) -> Error {
Error::ClarityError(clarity_error::Interpreter(e))
}
}
impl Error {
pub fn from_cost_error(
err: CostErrors,
cost_before: ExecutionCost,
context: &GlobalContext,
) -> Error {
match err {
CostErrors::CostBalanceExceeded(used, budget) => {
Error::CostOverflowError(cost_before, used, budget)
}
_ => {
let cur_cost = context.cost_track.get_total();
let budget = context.cost_track.get_limit();
Error::CostOverflowError(cost_before, cur_cost, budget)
}
}
}
}
impl Txid {
/// A Stacks transaction ID is a sha512/256 hash (not a double-sha256 hash)
pub fn from_stacks_tx(txdata: &[u8]) -> Txid {
let h = Sha512Trunc256Sum::from_data(txdata);
let mut bytes = [0u8; 32];
bytes.copy_from_slice(h.as_bytes());
Txid(bytes)
}
/// A sighash is calculated the same way as a txid
pub fn from_sighash_bytes(txdata: &[u8]) -> Txid {
Txid::from_stacks_tx(txdata)
}
}
/// How a transaction may be appended to the Stacks blockchain
#[repr(u8)]
#[derive(Debug, Clone, PartialEq, Copy, Serialize, Deserialize)]
pub enum TransactionAnchorMode {
OnChainOnly = 1, // must be included in a StacksBlock
OffChainOnly = 2, // must be included in a StacksMicroBlock
Any = 3, // either
}
#[repr(u8)]
#[derive(Debug, Clone, PartialEq, Copy, Serialize, Deserialize)]
pub enum TransactionAuthFlags {
// types of auth
AuthStandard = 0x04,
AuthSponsored = 0x05,
}
/// Transaction signatures are validated by calculating the public key from the signature, and
/// verifying that all public keys hash to the signing account's hash. To do so, we must preserve
/// enough information in the auth structure to recover each public key's bytes.
///
/// An auth field can be a public key or a signature. In both cases, the public key (either given
/// in-the-raw or embedded in a signature) may be encoded as compressed or uncompressed.
#[repr(u8)]
#[derive(Debug, Clone, PartialEq, Copy, Serialize, Deserialize)]
pub enum TransactionAuthFieldID {
// types of auth fields
PublicKeyCompressed = 0x00,
PublicKeyUncompressed = 0x01,
SignatureCompressed = 0x02,
SignatureUncompressed = 0x03,
}
#[repr(u8)]
#[derive(Debug, Clone, PartialEq, Copy, Serialize, Deserialize)]
pub enum TransactionPublicKeyEncoding {
// ways we can encode a public key
Compressed = 0x00,
Uncompressed = 0x01,
}
impl TransactionPublicKeyEncoding {
pub fn from_u8(n: u8) -> Option<TransactionPublicKeyEncoding> {
match n {
x if x == TransactionPublicKeyEncoding::Compressed as u8 => {
Some(TransactionPublicKeyEncoding::Compressed)
}
x if x == TransactionPublicKeyEncoding::Uncompressed as u8 => {
Some(TransactionPublicKeyEncoding::Uncompressed)
}
_ => None,
}
}
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub enum TransactionAuthField {
PublicKey(StacksPublicKey),
Signature(TransactionPublicKeyEncoding, MessageSignature),
}
impl TransactionAuthField {
pub fn is_public_key(&self) -> bool {
match *self {
TransactionAuthField::PublicKey(_) => true,
_ => false,
}
}
pub fn is_signature(&self) -> bool {
match *self {
TransactionAuthField::Signature(_, _) => true,
_ => false,
}
}
pub fn as_public_key(&self) -> Option<StacksPublicKey> {
match *self {
TransactionAuthField::PublicKey(ref pubk) => Some(pubk.clone()),
_ => None,
}
}
pub fn as_signature(&self) -> Option<(TransactionPublicKeyEncoding, MessageSignature)> {
match *self {
TransactionAuthField::Signature(ref key_fmt, ref sig) => {
Some((key_fmt.clone(), sig.clone()))
}
_ => None,
}
}
// TODO: enforce u8; 32
pub fn get_public_key(&self, sighash_bytes: &[u8]) -> Result<StacksPublicKey, net_error> {
match *self {
TransactionAuthField::PublicKey(ref pubk) => Ok(pubk.clone()),
TransactionAuthField::Signature(ref key_fmt, ref sig) => {
let mut pubk = StacksPublicKey::recover_to_pubkey(sighash_bytes, sig)
.map_err(|e| net_error::VerifyingError(e.to_string()))?;
pubk.set_compressed(if *key_fmt == TransactionPublicKeyEncoding::Compressed {
true
} else {
false
});
Ok(pubk)
}
}
}
}
// tag address hash modes as "singlesig" or "multisig" so we can't accidentally construct an
// invalid spending condition
#[repr(u8)]
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub enum SinglesigHashMode {
P2PKH = 0x00,
P2WPKH = 0x02,
}
#[repr(u8)]
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub enum MultisigHashMode {
P2SH = 0x01,
P2WSH = 0x03,
}
impl SinglesigHashMode {
pub fn to_address_hash_mode(&self) -> AddressHashMode {
match *self {
SinglesigHashMode::P2PKH => AddressHashMode::SerializeP2PKH,
SinglesigHashMode::P2WPKH => AddressHashMode::SerializeP2WPKH,
}
}
pub fn from_address_hash_mode(hm: AddressHashMode) -> Option<SinglesigHashMode> {
match hm {
AddressHashMode::SerializeP2PKH => Some(SinglesigHashMode::P2PKH),
AddressHashMode::SerializeP2WPKH => Some(SinglesigHashMode::P2WPKH),
_ => None,
}
}
pub fn from_u8(n: u8) -> Option<SinglesigHashMode> {
match n {
x if x == SinglesigHashMode::P2PKH as u8 => Some(SinglesigHashMode::P2PKH),
x if x == SinglesigHashMode::P2WPKH as u8 => Some(SinglesigHashMode::P2WPKH),
_ => None,
}
}
}
impl MultisigHashMode {
pub fn to_address_hash_mode(&self) -> AddressHashMode {
match *self {
MultisigHashMode::P2SH => AddressHashMode::SerializeP2SH,
MultisigHashMode::P2WSH => AddressHashMode::SerializeP2WSH,
}
}
pub fn from_address_hash_mode(hm: AddressHashMode) -> Option<MultisigHashMode> {
match hm {
AddressHashMode::SerializeP2SH => Some(MultisigHashMode::P2SH),
AddressHashMode::SerializeP2WSH => Some(MultisigHashMode::P2WSH),
_ => None,
}
}
pub fn from_u8(n: u8) -> Option<MultisigHashMode> {
match n {
x if x == MultisigHashMode::P2SH as u8 => Some(MultisigHashMode::P2SH),
x if x == MultisigHashMode::P2WSH as u8 => Some(MultisigHashMode::P2WSH),
_ => None,
}
}
}
/// A structure that encodes enough state to authenticate
/// a transaction's execution against a Stacks address.
/// public_keys + signatures_required determines the Principal.
/// nonce is the "check number" for the Principal.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct MultisigSpendingCondition {
pub hash_mode: MultisigHashMode,
pub signer: Hash160,
pub nonce: u64, // nth authorization from this account
pub tx_fee: u64, // microSTX/compute rate offered by this account
pub fields: Vec<TransactionAuthField>,
pub signatures_required: u16,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct SinglesigSpendingCondition {
pub hash_mode: SinglesigHashMode,
pub signer: Hash160,
pub nonce: u64, // nth authorization from this account
pub tx_fee: u64, // microSTX/compute rate offerred by this account
pub key_encoding: TransactionPublicKeyEncoding,
pub signature: MessageSignature,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub enum TransactionSpendingCondition {
Singlesig(SinglesigSpendingCondition),
Multisig(MultisigSpendingCondition),
}
/// Types of transaction authorizations
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub enum TransactionAuth {
Standard(TransactionSpendingCondition),
Sponsored(TransactionSpendingCondition, TransactionSpendingCondition), // the second account pays on behalf of the first account
}
/// A transaction that calls into a smart contract
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct TransactionContractCall {
pub address: StacksAddress,
pub contract_name: ContractName,
pub function_name: ClarityName,
pub function_args: Vec<Value>,
}
/// A transaction that instantiates a smart contract
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct TransactionSmartContract {
pub name: ContractName,
pub code_body: StacksString,
}
/// A coinbase commits to 32 bytes of control-plane information
pub struct CoinbasePayload(pub [u8; 32]);
impl_byte_array_message_codec!(CoinbasePayload, 32);
impl_array_newtype!(CoinbasePayload, u8, 32);
impl_array_hexstring_fmt!(CoinbasePayload);
impl_byte_array_newtype!(CoinbasePayload, u8, 32);
impl_byte_array_serde!(CoinbasePayload);
pub const CONIBASE_PAYLOAD_ENCODED_SIZE: u32 = 32;
pub struct TokenTransferMemo(pub [u8; 34]); // same length as it is in stacks v1
impl_byte_array_message_codec!(TokenTransferMemo, 34);
impl_array_newtype!(TokenTransferMemo, u8, 34);
impl_array_hexstring_fmt!(TokenTransferMemo);
impl_byte_array_newtype!(TokenTransferMemo, u8, 34);
impl_byte_array_serde!(TokenTransferMemo);
pub const TOKEN_TRANSFER_MEMO_LENGTH: usize = 34; // same as it is in Stacks v1
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub enum TransactionPayload {
TokenTransfer(PrincipalData, u64, TokenTransferMemo),
ContractCall(TransactionContractCall),
SmartContract(TransactionSmartContract),
PoisonMicroblock(StacksMicroblockHeader, StacksMicroblockHeader), // the previous epoch leader sent two microblocks with the same sequence, and this is proof
Coinbase(CoinbasePayload),
}
impl TransactionPayload {
pub fn name(&self) -> &'static str {
match self {
TransactionPayload::TokenTransfer(..) => "TokenTransfer",
TransactionPayload::ContractCall(..) => "ContractCall",
TransactionPayload::SmartContract(..) => "SmartContract",
TransactionPayload::PoisonMicroblock(..) => "PoisonMicroblock",
TransactionPayload::Coinbase(..) => "Coinbase",
}
}
}
#[repr(u8)]
#[derive(Debug, Clone, PartialEq, Copy, Serialize, Deserialize)]
pub enum TransactionPayloadID {
TokenTransfer = 0,
SmartContract = 1,
ContractCall = 2,
PoisonMicroblock = 3,
Coinbase = 4,
}
/// Encoding of an asset type identifier
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct AssetInfo {
pub contract_address: StacksAddress,
pub contract_name: ContractName,
pub asset_name: ClarityName,
}
/// numeric wire-format ID of an asset info type variant
#[repr(u8)]
#[derive(Debug, Clone, PartialEq, Copy, Serialize, Deserialize)]
pub enum AssetInfoID {
STX = 0,
FungibleAsset = 1,
NonfungibleAsset = 2,
}
impl AssetInfoID {
pub fn from_u8(b: u8) -> Option<AssetInfoID> {
match b {
0 => Some(AssetInfoID::STX),
1 => Some(AssetInfoID::FungibleAsset),
2 => Some(AssetInfoID::NonfungibleAsset),
_ => None,
}
}
}
#[repr(u8)]
#[derive(Debug, Clone, PartialEq, Copy, Serialize, Deserialize)]
pub enum FungibleConditionCode {
SentEq = 0x01,
SentGt = 0x02,
SentGe = 0x03,
SentLt = 0x04,
SentLe = 0x05,
}
impl FungibleConditionCode {
pub fn from_u8(b: u8) -> Option<FungibleConditionCode> {
match b {
0x01 => Some(FungibleConditionCode::SentEq),
0x02 => Some(FungibleConditionCode::SentGt),
0x03 => Some(FungibleConditionCode::SentGe),
0x04 => Some(FungibleConditionCode::SentLt),
0x05 => Some(FungibleConditionCode::SentLe),
_ => None,
}
}
pub fn check(&self, amount_sent_condition: u128, amount_sent: u128) -> bool {
match *self {
FungibleConditionCode::SentEq => amount_sent == amount_sent_condition,
FungibleConditionCode::SentGt => amount_sent > amount_sent_condition,
FungibleConditionCode::SentGe => amount_sent >= amount_sent_condition,
FungibleConditionCode::SentLt => amount_sent < amount_sent_condition,
FungibleConditionCode::SentLe => amount_sent <= amount_sent_condition,
}
}
}
#[repr(u8)]
#[derive(Debug, Clone, PartialEq, Copy, Serialize, Deserialize)]
pub enum NonfungibleConditionCode {
Sent = 0x10,
NotSent = 0x11,
}
impl NonfungibleConditionCode {
pub fn from_u8(b: u8) -> Option<NonfungibleConditionCode> {
match b {
0x10 => Some(NonfungibleConditionCode::Sent),
0x11 => Some(NonfungibleConditionCode::NotSent),
_ => None,
}
}
pub fn was_sent(nft_sent_condition: &Value, nfts_sent: &Vec<Value>) -> bool {
for asset_sent in nfts_sent.iter() {
if *asset_sent == *nft_sent_condition {
// asset was sent, and is no longer owned by this principal
return true;
}
}
return false;
}
pub fn check(&self, nft_sent_condition: &Value, nfts_sent: &Vec<Value>) -> bool {
match *self {
NonfungibleConditionCode::Sent => {
NonfungibleConditionCode::was_sent(nft_sent_condition, nfts_sent)
}
NonfungibleConditionCode::NotSent => {
!NonfungibleConditionCode::was_sent(nft_sent_condition, nfts_sent)
}
}
}
}
/// Post-condition principal.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub enum PostConditionPrincipal {
Origin,
Standard(StacksAddress),
Contract(StacksAddress, ContractName),
}
impl PostConditionPrincipal {
pub fn to_principal_data(&self, origin_principal: &PrincipalData) -> PrincipalData {
match *self {
PostConditionPrincipal::Origin => origin_principal.clone(),
PostConditionPrincipal::Standard(ref addr) => {
PrincipalData::Standard(StandardPrincipalData::from(addr.clone()))
}
PostConditionPrincipal::Contract(ref addr, ref contract_name) => {
PrincipalData::Contract(QualifiedContractIdentifier::new(
StandardPrincipalData::from(addr.clone()),
contract_name.clone(),
))
}
}
}
}
#[repr(u8)]
#[derive(Debug, Clone, PartialEq, Copy, Serialize, Deserialize)]
pub enum PostConditionPrincipalID {
Origin = 0x01,
Standard = 0x02,
Contract = 0x03,
}
/// Post-condition on a transaction
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub enum TransactionPostCondition {
STX(PostConditionPrincipal, FungibleConditionCode, u64),
Fungible(
PostConditionPrincipal,
AssetInfo,
FungibleConditionCode,
u64,
),
Nonfungible(
PostConditionPrincipal,
AssetInfo,
Value,
NonfungibleConditionCode,
),
}
/// Post-condition modes for unspecified assets
#[repr(u8)]
#[derive(Debug, Clone, PartialEq, Copy, Serialize, Deserialize)]
pub enum TransactionPostConditionMode {
Allow = 0x01, // allow any other changes not specified
Deny = 0x02, // deny any other changes not specified
}
/// Stacks transaction versions
#[repr(u8)]
#[derive(Debug, Clone, PartialEq, Copy, Serialize, Deserialize)]
pub enum TransactionVersion {
Mainnet = 0x00,
Testnet = 0x80,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct StacksTransaction {
pub version: TransactionVersion,
pub chain_id: u32,
pub auth: TransactionAuth,
pub anchor_mode: TransactionAnchorMode,
pub post_condition_mode: TransactionPostConditionMode,
pub post_conditions: Vec<TransactionPostCondition>,
pub payload: TransactionPayload,
}
#[derive(Debug, Clone, PartialEq)]
pub struct StacksTransactionSigner {
pub tx: StacksTransaction,
pub sighash: Txid,
origin_done: bool,
check_oversign: bool,
check_overlap: bool,
}
/// A block that contains blockchain-anchored data
/// (corresponding to a LeaderBlockCommitOp)
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct StacksBlock {
pub header: StacksBlockHeader,
pub txs: Vec<StacksTransaction>,
}
/// A microblock that contains non-blockchain-anchored data,
/// but is tied to an on-chain block
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct StacksMicroblock {
pub header: StacksMicroblockHeader,
pub txs: Vec<StacksTransaction>,
}
// values a miner uses to produce the next block
pub const MINER_BLOCK_CONSENSUS_HASH: ConsensusHash = ConsensusHash([1u8; 20]);
pub const MINER_BLOCK_HEADER_HASH: BlockHeaderHash = BlockHeaderHash([1u8; 32]);
/// A structure for incrementially building up a block
#[derive(Clone)]
pub struct StacksBlockBuilder {
pub chain_tip: StacksHeaderInfo,
pub header: StacksBlockHeader,
pub txs: Vec<StacksTransaction>,
pub micro_txs: Vec<StacksTransaction>,
pub total_anchored_fees: u64,
pub total_confirmed_streamed_fees: u64,
pub total_streamed_fees: u64,
anchored_done: bool,
bytes_so_far: u64,
prev_microblock_header: StacksMicroblockHeader,
miner_privkey: StacksPrivateKey,
miner_payouts: Option<(MinerReward, Vec<MinerReward>, MinerReward)>,
parent_microblock_hash: Option<BlockHeaderHash>,
miner_id: usize,
}
// maximum amount of data a leader can send during its epoch (2MB)
pub const MAX_EPOCH_SIZE: u32 = 2 * 1024 * 1024;
// maximum microblock size is 64KB, but note that the current leader has a space budget of
// $MAX_EPOCH_SIZE bytes (so the average microblock size needs to be 4kb if there are 256 of them)
pub const MAX_MICROBLOCK_SIZE: u32 = 65536;
#[cfg(test)]
pub mod test {
use chainstate::stacks::StacksPublicKey as PubKey;
use chainstate::stacks::*;
use core::*;
use net::codec::test::check_codec_and_corruption;
use net::codec::*;
use net::*;
use util::hash::*;
use util::log;
use vm::representations::{ClarityName, ContractName};
use super::*;
/// Make a representative of each kind of transaction we support
pub fn codec_all_transactions(
version: &TransactionVersion,
chain_id: u32,
anchor_mode: &TransactionAnchorMode,
post_condition_mode: &TransactionPostConditionMode,
) -> Vec<StacksTransaction> {
let addr = StacksAddress {
version: 1,
bytes: Hash160([0xff; 20]),
};
let asset_name = ClarityName::try_from("hello-asset").unwrap();
let asset_value = Value::buff_from(vec![0, 1, 2, 3]).unwrap();
let contract_name = ContractName::try_from("hello-world").unwrap();
let hello_contract_call = "hello contract call";
let hello_contract_name = "hello-contract-name";
let hello_contract_body = "hello contract code body";
let asset_info = AssetInfo {
contract_address: addr.clone(),
contract_name: contract_name.clone(),
asset_name: asset_name.clone(),
};
let mblock_header_1 = StacksMicroblockHeader {
version: 0x12,
sequence: 0x34,
prev_block: EMPTY_MICROBLOCK_PARENT_HASH.clone(),
tx_merkle_root: Sha512Trunc256Sum([1u8; 32]),
signature: MessageSignature([2u8; 65]),
};
let mblock_header_2 = StacksMicroblockHeader {
version: 0x12,
sequence: 0x34,
prev_block: EMPTY_MICROBLOCK_PARENT_HASH.clone(),
tx_merkle_root: Sha512Trunc256Sum([2u8; 32]),
signature: MessageSignature([3u8; 65]),
};
let spending_conditions = vec![
TransactionSpendingCondition::Singlesig(SinglesigSpendingCondition {
signer: Hash160([0x11; 20]),
hash_mode: SinglesigHashMode::P2PKH,
key_encoding: TransactionPublicKeyEncoding::Uncompressed,
nonce: 123,
tx_fee: 456,
signature: MessageSignature::from_raw(&vec![0xff; 65])
}),
TransactionSpendingCondition::Singlesig(SinglesigSpendingCondition {
signer: Hash160([0x11; 20]),
hash_mode: SinglesigHashMode::P2PKH,
key_encoding: TransactionPublicKeyEncoding::Compressed,
nonce: 234,
tx_fee: 567,
signature: MessageSignature::from_raw(&vec![0xff; 65])
}),
TransactionSpendingCondition::Multisig(MultisigSpendingCondition {
signer: Hash160([0x11; 20]),
hash_mode: MultisigHashMode::P2SH,
nonce: 345,
tx_fee: 678,
fields: vec![
TransactionAuthField::Signature(TransactionPublicKeyEncoding::Uncompressed, MessageSignature::from_raw(&vec![0xff; 65])),
TransactionAuthField::Signature(TransactionPublicKeyEncoding::Uncompressed, MessageSignature::from_raw(&vec![0xfe; 65])),
TransactionAuthField::PublicKey(PubKey::from_hex("04ef2340518b5867b23598a9cf74611f8b98064f7d55cdb8c107c67b5efcbc5c771f112f919b00a6c6c5f51f7c63e1762fe9fac9b66ec75a053db7f51f4a52712b").unwrap()),
],
signatures_required: 2
}),
TransactionSpendingCondition::Multisig(MultisigSpendingCondition {
signer: Hash160([0x11; 20]),
hash_mode: MultisigHashMode::P2SH,
nonce: 456,
tx_fee: 789,
fields: vec![
TransactionAuthField::Signature(TransactionPublicKeyEncoding::Compressed, MessageSignature::from_raw(&vec![0xff; 65])),
TransactionAuthField::Signature(TransactionPublicKeyEncoding::Compressed, MessageSignature::from_raw(&vec![0xfe; 65])),
TransactionAuthField::PublicKey(PubKey::from_hex("03ef2340518b5867b23598a9cf74611f8b98064f7d55cdb8c107c67b5efcbc5c77").unwrap())
],
signatures_required: 2
}),
TransactionSpendingCondition::Singlesig(SinglesigSpendingCondition {
signer: Hash160([0x11; 20]),
hash_mode: SinglesigHashMode::P2WPKH,
key_encoding: TransactionPublicKeyEncoding::Compressed,
nonce: 567,
tx_fee: 890,
signature: MessageSignature::from_raw(&vec![0xfe; 65]),
}),
TransactionSpendingCondition::Multisig(MultisigSpendingCondition {
signer: Hash160([0x11; 20]),
hash_mode: MultisigHashMode::P2WSH,
nonce: 678,
tx_fee: 901,
fields: vec![
TransactionAuthField::Signature(TransactionPublicKeyEncoding::Compressed, MessageSignature::from_raw(&vec![0xff; 65])),
TransactionAuthField::Signature(TransactionPublicKeyEncoding::Compressed, MessageSignature::from_raw(&vec![0xfe; 65])),
TransactionAuthField::PublicKey(PubKey::from_hex("03ef2340518b5867b23598a9cf74611f8b98064f7d55cdb8c107c67b5efcbc5c77").unwrap())
],
signatures_required: 2
})
];
let mut tx_auths = vec![];
for i in 0..spending_conditions.len() {
let spending_condition = &spending_conditions[i];
let next_spending_condition = &spending_conditions[(i + 1) % spending_conditions.len()];
tx_auths.push(TransactionAuth::Standard(spending_condition.clone()));
tx_auths.push(TransactionAuth::Sponsored(
spending_condition.clone(),
next_spending_condition.clone(),
));
}
let tx_post_condition_principals = vec![
PostConditionPrincipal::Origin,
PostConditionPrincipal::Standard(StacksAddress {
version: 1,
bytes: Hash160([1u8; 20]),
}),
PostConditionPrincipal::Contract(
StacksAddress {
version: 2,
bytes: Hash160([2u8; 20]),
},
ContractName::try_from("hello-world").unwrap(),
),
];
let mut tx_post_conditions = vec![];
for tx_pcp in tx_post_condition_principals {
tx_post_conditions.append(&mut vec![
vec![TransactionPostCondition::STX(
tx_pcp.clone(),
FungibleConditionCode::SentLt,
12345,
)],
vec![TransactionPostCondition::Fungible(
tx_pcp.clone(),
AssetInfo {
contract_address: addr.clone(),
contract_name: contract_name.clone(),
asset_name: asset_name.clone(),
},
FungibleConditionCode::SentGt,
23456,
)],
vec![TransactionPostCondition::Nonfungible(
tx_pcp.clone(),
AssetInfo {
contract_address: addr.clone(),
contract_name: contract_name.clone(),
asset_name: asset_name.clone(),
},
asset_value.clone(),
NonfungibleConditionCode::NotSent,
)],
vec![
TransactionPostCondition::STX(
tx_pcp.clone(),
FungibleConditionCode::SentLt,
12345,
),
TransactionPostCondition::Fungible(
tx_pcp.clone(),
AssetInfo {
contract_address: addr.clone(),
contract_name: contract_name.clone(),
asset_name: asset_name.clone(),
},
FungibleConditionCode::SentGt,
23456,
),
],
vec![
TransactionPostCondition::STX(
tx_pcp.clone(),
FungibleConditionCode::SentLt,
12345,
),
TransactionPostCondition::Nonfungible(
tx_pcp.clone(),
AssetInfo {
contract_address: addr.clone(),
contract_name: contract_name.clone(),
asset_name: asset_name.clone(),
},
asset_value.clone(),
NonfungibleConditionCode::NotSent,
),
],
vec![
TransactionPostCondition::Fungible(
tx_pcp.clone(),
AssetInfo {
contract_address: addr.clone(),
contract_name: contract_name.clone(),
asset_name: asset_name.clone(),
},
FungibleConditionCode::SentGt,
23456,
),
TransactionPostCondition::Nonfungible(
tx_pcp.clone(),
AssetInfo {
contract_address: addr.clone(),
contract_name: contract_name.clone(),
asset_name: asset_name.clone(),
},
asset_value.clone(),
NonfungibleConditionCode::NotSent,
),
],
vec![
TransactionPostCondition::STX(
tx_pcp.clone(),
FungibleConditionCode::SentLt,
12345,
),
TransactionPostCondition::Nonfungible(
tx_pcp.clone(),
AssetInfo {
contract_address: addr.clone(),
contract_name: contract_name.clone(),
asset_name: asset_name.clone(),
},
asset_value.clone(),
NonfungibleConditionCode::NotSent,
),
TransactionPostCondition::Fungible(
tx_pcp.clone(),
AssetInfo {
contract_address: addr.clone(),
contract_name: contract_name.clone(),
asset_name: asset_name.clone(),
},
FungibleConditionCode::SentGt,
23456,
),
],
]);
}
let stx_address = StacksAddress {
version: 1,
bytes: Hash160([0xff; 20]),
};
let tx_payloads = vec![
TransactionPayload::TokenTransfer(
stx_address.into(),
123,
TokenTransferMemo([0u8; 34]),
),
TransactionPayload::TokenTransfer(
PrincipalData::from(QualifiedContractIdentifier {
issuer: stx_address.into(),
name: "hello-contract-name".into(),
}),
123,
TokenTransferMemo([0u8; 34]),
),
TransactionPayload::ContractCall(TransactionContractCall {
address: StacksAddress {
version: 4,<|fim▁hole|> contract_name: ContractName::try_from("hello-contract-name").unwrap(),
function_name: ClarityName::try_from("hello-contract-call").unwrap(),
function_args: vec![Value::Int(0)],
}),
TransactionPayload::SmartContract(TransactionSmartContract {
name: ContractName::try_from(hello_contract_name).unwrap(),
code_body: StacksString::from_str(hello_contract_body).unwrap(),
}),
TransactionPayload::Coinbase(CoinbasePayload([0x12; 32])),
TransactionPayload::PoisonMicroblock(mblock_header_1, mblock_header_2),
];
// create all kinds of transactions
let mut all_txs = vec![];
for tx_auth in tx_auths.iter() {
for tx_post_condition in tx_post_conditions.iter() {
for tx_payload in tx_payloads.iter() {
match tx_payload {
// poison microblock and coinbase must be on-chain
TransactionPayload::Coinbase(_) => {
if *anchor_mode != TransactionAnchorMode::OnChainOnly {
continue;
}
}
TransactionPayload::PoisonMicroblock(_, _) => {
if *anchor_mode != TransactionAnchorMode::OnChainOnly {
continue;
}
}
_ => {}
}
let auth = tx_auth.clone();
let tx = StacksTransaction {
version: (*version).clone(),
chain_id: chain_id,
auth: auth,
anchor_mode: (*anchor_mode).clone(),
post_condition_mode: (*post_condition_mode).clone(),
post_conditions: tx_post_condition.clone(),
payload: tx_payload.clone(),
};
all_txs.push(tx);
}
}
}
all_txs
}
pub fn make_codec_test_block(num_txs: usize) -> StacksBlock {
let proof_bytes = hex_bytes("9275df67a68c8745c0ff97b48201ee6db447f7c93b23ae24cdc2400f52fdb08a1a6ac7ec71bf9c9c76e96ee4675ebff60625af28718501047bfd87b810c2d2139b73c23bd69de66360953a642c2a330a").unwrap();
let proof = VRFProof::from_bytes(&proof_bytes[..].to_vec()).unwrap();
let privk = StacksPrivateKey::from_hex(
"6d430bb91222408e7706c9001cfaeb91b08c2be6d5ac95779ab52c6b431950e001",
)
.unwrap();
let origin_auth = TransactionAuth::Standard(
TransactionSpendingCondition::new_singlesig_p2pkh(StacksPublicKey::from_private(
&privk,
))
.unwrap(),
);
let mut tx_coinbase = StacksTransaction::new(
TransactionVersion::Mainnet,
origin_auth.clone(),
TransactionPayload::Coinbase(CoinbasePayload([0u8; 32])),
);
tx_coinbase.anchor_mode = TransactionAnchorMode::OnChainOnly;
let mut all_txs = codec_all_transactions(
&TransactionVersion::Testnet,
0x80000000,
&TransactionAnchorMode::OnChainOnly,
&TransactionPostConditionMode::Allow,
);
// remove all coinbases, except for an initial coinbase
let mut txs_anchored = vec![];
txs_anchored.push(tx_coinbase);
for tx in all_txs.drain(..) {
match tx.payload {
TransactionPayload::Coinbase(_) => {
continue;
}
_ => {}
}
txs_anchored.push(tx);
if txs_anchored.len() >= num_txs {
break;
}
}
let txid_vecs = txs_anchored
.iter()
.map(|tx| tx.txid().as_bytes().to_vec())
.collect();
let merkle_tree = MerkleTree::<Sha512Trunc256Sum>::new(&txid_vecs);
let tx_merkle_root = merkle_tree.root();
let tr = tx_merkle_root.as_bytes().to_vec();
let work_score = StacksWorkScore {
burn: 123,
work: 456,
};
let header = StacksBlockHeader {
version: 0x01,
total_work: StacksWorkScore {
burn: 234,
work: 567,
},
proof: proof.clone(),
parent_block: BlockHeaderHash([5u8; 32]),
parent_microblock: BlockHeaderHash([6u8; 32]),
parent_microblock_sequence: 4,
tx_merkle_root: tx_merkle_root,
state_index_root: TrieHash([8u8; 32]),
microblock_pubkey_hash: Hash160([9u8; 20]),
};
StacksBlock {
header: header,
txs: txs_anchored,
}
}
}<|fim▁end|> | bytes: Hash160([0xfc; 20]),
}, |
<|file_name|>ez-relationlist-editview-tests.js<|end_file_name|><|fim▁begin|>/*
* Copyright (C) eZ Systems AS. All rights reserved.
* For full copyright and license information view LICENSE file distributed with this source code.
*/
YUI.add('ez-relationlist-editview-tests', function (Y) {
var viewTest,
registerTest,
universalDiscoveryRelationTest,
getFieldTest,
getEmptyFieldTest,
tapTest,
loadObjectRelationsTest,
initializerTest,
Assert = Y.Assert;
viewTest = new Y.Test.Case({
name: "eZ Relation list View test",
_getFieldDefinition: function (required) {
return {
isRequired: required
};
},
setUp: function () {
this.relatedContents = [];
this.fieldDefinitionIdentifier= "niceField";
this.fieldDefinition = {
fieldType: "ezobjectrelationlist",
identifier: this.fieldDefinitionIdentifier,
isRequired: false
};
this.field = {fieldValue: {destinationContentIds: [45, 42]}};
this.jsonContent = {};
this.jsonContentType = {};
this.jsonVersion = {};
this.loadingError = false;
this.content = new Y.Mock();
this.version = new Y.Mock();
this.contentType = new Y.Mock();
Y.Mock.expect(this.content, {
method: 'toJSON',
returns: this.jsonContent
});
Y.Mock.expect(this.version, {
method: 'toJSON',
returns: this.jsonVersion
});
Y.Mock.expect(this.contentType, {
method: 'toJSON',
returns: this.jsonContentType
});
this.view = new Y.eZ.RelationListEditView({
container: '.container',
field: this.field,
fieldDefinition: this.fieldDefinition,
content: this.content,
version: this.version,
contentType: this.contentType,
relatedContents: this.relatedContents,
translating: false,
});
},
tearDown: function () {
this.view.destroy();
delete this.view;
},
_testAvailableVariables: function (required, expectRequired) {
var fieldDefinition = this._getFieldDefinition(required),
that = this,
destContentToJSONArray = [];
this.view.set('fieldDefinition', fieldDefinition);
this.view.template = function (variables) {
Y.Assert.isObject(variables, "The template should receive some variables");
Y.Assert.areEqual(10, Y.Object.keys(variables).length, "The template should receive 10 variables");
Y.Assert.areSame(
that.jsonContent, variables.content,
"The content should be available in the field edit view template"
);
Y.Assert.areSame(
that.jsonVersion, variables.version,
"The version should be available in the field edit view template"
);
Y.Assert.areSame(
that.jsonContentType, variables.contentType,
"The contentType should be available in the field edit view template"
);
Y.Assert.areSame(
fieldDefinition, variables.fieldDefinition,
"The fieldDefinition should be available in the field edit view template"
);
Y.Assert.areSame(
that.field, variables.field,
"The field should be available in the field edit view template"
);
Y.Assert.isFalse(
variables.isNotTranslatable,
"The isNotTranslatable should be available in the field edit view template"
);
Y.Assert.areSame(
that.view.get('loadingError'), variables.loadingError,
"The field should be available in the field edit view template"
);
Y.Array.each(that.view.get('relatedContents'), function (destContent) {
destContentToJSONArray.push(destContent.toJSON());
});
for ( var i = 0; i<= variables.relatedContents.length; i++){
Y.Assert.areSame(
destContentToJSONArray[i],
variables.relatedContents[i],
"The field should be available in the field edit view template"
);
}
Y.Assert.areSame(expectRequired, variables.isRequired);
return '';
};
this.view.render();
},
"Test required field": function () {
this._testAvailableVariables(true, true);
},
"Test not required field": function () {
this._testAvailableVariables(false, false);
},
"Test validate no constraints": function () {
var fieldDefinition = this._getFieldDefinition(false);
this.view.set('fieldDefinition', fieldDefinition);
this.view.set('relatedContents', []);
this.view.render();
this.view.validate();
Y.Assert.isTrue(
this.view.isValid(),
"An empty relation is valid"
);
},
"Test validate required": function () {
var fieldDefinition = this._getFieldDefinition(true);
this.view.set('fieldDefinition', fieldDefinition);
this.view.set('relatedContents', []);
this.view.validate();
this.view.render();
Y.Assert.isFalse(
this.view.isValid(),
"An empty relation is NOT valid"
);
},
"Should render the view when the loadingError attribute changes": function () {
var templateCalled = false,
origTpl = this.view.template;
this.view.template = function (variables) {
templateCalled = true;
return origTpl.apply(this, arguments);
};
this.view.set('loadingError', true);
Y.Assert.isTrue(templateCalled, "The template has not been used");
},
"Should render the view when the destinationContent attribute changes": function () {
var templateCalled = false,
origTpl = this.view.template;
this.view.template = function (variables) {
templateCalled = true;
return origTpl.apply(this, arguments);
};
this.view.set('relatedContents', this.relatedContents);
Y.Assert.isTrue(templateCalled, "The template has not been used");
},
});
Y.Test.Runner.setName("eZ Relation list Edit View tests");
Y.Test.Runner.add(viewTest);
initializerTest = new Y.Test.Case({
name: "eZ Relation list initializing test",
_getFieldDefinition: function (required) {
return {
isRequired: required
};
},
setUp: function () {
this.fieldDefinitionIdentifier= "niceField";
this.fieldDefinition = {
fieldType: "ezobjectrelationlist",
identifier: this.fieldDefinitionIdentifier,
isRequired: false
};
this.field = {fieldValue: {destinationContentIds: [45, 42]}};
this.view = new Y.eZ.RelationListEditView({
field: this.field,
fieldDefinition: this.fieldDefinition,
relatedContents: this.relatedContents,
});
},
tearDown: function () {
this.view.destroy();
delete this.view;
},
"Should fill the destinationContentsIds attribute from the field": function () {
Y.Assert.isArray(
this.view.get('destinationContentsIds'),
"destinationContentsIds should be an array"
);
for (var i = 0; i <= this.view.get('destinationContentsIds').length; i++) {
Y.Assert.areSame(
this.view.get('field').fieldValue.destinationContentIds[i],<|fim▁hole|> this.view.get('destinationContentsIds')[i],
"The destinationContentId of the field value should be the same than the attribute"
);
}
},
});
Y.Test.Runner.add(initializerTest);
universalDiscoveryRelationTest = new Y.Test.Case({
name: "eZ Relation list universal discovery relation test",
_getFieldDefinition: function (required) {
return {
isRequired: required,
fieldSettings: {},
};
},
setUp: function () {
this.relatedContents = [];
this.fieldDefinitionIdentifier= "niceField";
this.fieldDefinition = {
fieldType: "ezobjectrelationlist",
identifier: this.fieldDefinitionIdentifier,
isRequired: false,
fieldSettings: {
selectionContentTypes: ['allowed_content_type_identifier']
}
};
this.field = {fieldValue: {destinationContentIds: [45, 42]}};
this.jsonContent = {};
this.jsonContentType = {};
this.jsonVersion = {};
this.content = new Y.Mock();
this.version = new Y.Mock();
this.contentType = new Y.Mock();
Y.Mock.expect(this.content, {
method: 'toJSON',
returns: this.jsonContent
});
Y.Mock.expect(this.version, {
method: 'toJSON',
returns: this.jsonVersion
});
Y.Mock.expect(this.contentType, {
method: 'toJSON',
returns: this.jsonContentType
});
this.view = new Y.eZ.RelationListEditView({
container: '.container',
field: this.field,
fieldDefinition: this.fieldDefinition,
content: this.content,
version: this.version,
contentType: this.contentType,
relatedContents: this.relatedContents,
});
},
tearDown: function () {
this.view.destroy();
delete this.view;
},
"Should validate when the universal discovery is canceled (empty relation)": function () {
var that = this,
fieldDefinition = this._getFieldDefinition(true);
this.view.set('fieldDefinition', fieldDefinition);
this.view._set('destinationContentsIds', null);
this.view.on('contentDiscover', function (e) {
that.resume(function () {
e.config.cancelDiscoverHandler.call(this);
Y.Assert.areSame(
this.view.get('errorStatus'),
'this.field.is.required domain=fieldedit',
'errorStatus should be true'
);
});
});
this.view.get('container').one('.ez-relation-discover').simulateGesture('tap');
this.wait();
},
"Should validate when the universal discovery is canceled": function () {
var that = this,
fieldDefinition = this._getFieldDefinition(false);
this.view.set('fieldDefinition', fieldDefinition);
this.view.on('contentDiscover', function (e) {
that.resume(function () {
e.config.cancelDiscoverHandler.call(this);
Y.Assert.isFalse(this.view.get('errorStatus'),'errorStatus should be false');
});
});
this.view.get('container').one('.ez-relation-discover').simulateGesture('tap');
this.wait();
},
"Should fill the relation with the universal discovery widget selection": function () {
var that = this,
contentInfoMock1 = new Y.Mock(),
contentInfoMock2 = new Y.Mock(),
fakeEventFacade = {selection: [{contentInfo: contentInfoMock1}, {contentInfo: contentInfoMock2}]},
contentIdsArray;
this.view._set('destinationContentsIds', [42, 45]);
contentIdsArray = Y.Array.dedupe(that.view.get('destinationContentsIds'));
Y.Mock.expect(contentInfoMock1, {
method: 'toJSON',
returns: {name: 'me', publishedDate: 'yesterday', lastModificationDate: 'tomorrow'}
});
Y.Mock.expect(contentInfoMock1, {
method: 'get',
args: ['contentId'],
returns: 42
});
Y.Mock.expect(contentInfoMock2, {
method: 'toJSON',
returns: {name: 'me', publishedDate: 'yesterday', lastModificationDate: 'tomorrow'}
});
Y.Mock.expect(contentInfoMock2, {
method: 'get',
args: ['contentId'],
returns: 51
});
this.view.on('contentDiscover', function (e) {
that.resume(function () {
Y.Array.each(fakeEventFacade.selection, function (selection) {
if ( that.view.get('destinationContentsIds').indexOf(selection.contentInfo.get('contentId')) == -1) {
contentIdsArray.push(selection.contentInfo.get('contentId'));
}
});
e.config.contentDiscoveredHandler.call(this, fakeEventFacade);
Y.ArrayAssert.itemsAreEqual(
contentIdsArray,
[42,45,51],
'destinationContentsIds should match the contentIds of the selected relation'
);
});
});
this.view.get('container').one('.ez-relation-discover').simulateGesture('tap');
this.wait();
},
"Should run the UniversalDiscoveryWidget": function () {
var that = this,
allowedContentType = new Y.Mock(),
notAllowedContentType = new Y.Mock();
Y.Mock.expect(allowedContentType, {
method: 'get',
args: ['identifier'],
returns: this.fieldDefinition.fieldSettings.selectionContentTypes[0]
});
Y.Mock.expect(notAllowedContentType, {
method: 'get',
args: ['identifier'],
returns: 'not_allowed_content_type_identifier'
});
this.view.on('contentDiscover', function (e) {
that.resume(function () {
Y.Assert.isObject(e.config, "contentDiscover config should be an object");
Y.Assert.isFunction(e.config.contentDiscoveredHandler, "config should have a function named contentDiscoveredHandler");
Y.Assert.isFunction(e.config.cancelDiscoverHandler, "config should have a function named cancelDiscoverHandler");
Y.Assert.isFunction(e.config.isSelectable, "config should have a function named isSelectable");
Y.Assert.isTrue(
e.config.isSelectable({contentType: allowedContentType}),
"isSelectable should return TRUE if selected content's content type is on allowed content types list"
);
Y.Assert.isFalse(
e.config.isSelectable({contentType: notAllowedContentType}),
"isSelectable should return FALSE if selected content's content type is not on allowed content types list"
);
Assert.isUndefined(
e.config.startingLocationId,
"The starting Location id parameter should not be set"
);
});
});
this.view.get('container').one('.ez-relation-discover').simulateGesture('tap');
this.wait();
},
"Should run the UniversalDiscoveryWidget starting at selectionDefaultLocation": function () {
var locationId = 'whatever/location/id';
this.fieldDefinition.fieldSettings.selectionContentTypes = [];
this.fieldDefinition.fieldSettings.selectionDefaultLocationHref = locationId;
this.view.on('contentDiscover', this.next(function (e) {
Assert.areEqual(
locationId,
e.config.startingLocationId,
"The starting Location id parameter should be set"
);
}, this));
this.view.get('container').one('.ez-relation-discover').simulateGesture('tap');
this.wait();
},
});
Y.Test.Runner.add(universalDiscoveryRelationTest);
tapTest = new Y.Test.Case({
name: "eZ Relation list tap test",
_getFieldDefinition: function (required) {
return {
isRequired: required
};
},
setUp: function () {
this.destinationContent1 = new Y.Mock();
this.destinationContent1ToJSON = {anythingJSONed: 'somethingJSONed'};
Y.Mock.expect(this.destinationContent1, {
method: 'toJSON',
returns: this.destinationContent1ToJSON
});
Y.Mock.expect(this.destinationContent1, {
method: 'get',
args: [Y.Mock.Value.String],
run: function (arg) {
if ( arg == 'contentId' ) {
return 45;
} else if ( arg == 'id') {
return "/api/ezp/v2/content/objects/45";
} else {
Y.Assert.fail('argument for get() not expected');
}
}
});
this.destinationContent2 = new Y.Mock();
this.destinationContent2ToJSON = {anythingJSONed2: 'somethingJSONed2'};
Y.Mock.expect(this.destinationContent2, {
method: 'toJSON',
returns: this.destinationContent2ToJSON
});
Y.Mock.expect(this.destinationContent2, {
method: 'get',
args: [Y.Mock.Value.String],
run: function (arg) {
if ( arg == 'contentId' ) {
return 42;
} else if ( arg == 'id') {
return "/api/ezp/v2/content/objects/42";
} else {
Y.Assert.fail('argument for get() not expected');
}
}
});
this.relatedContents = [this.destinationContent1, this.destinationContent2];
this.fieldDefinitionIdentifier= "niceField";
this.fieldDefinition = {
fieldType: "ezobjectrelationlist",
identifier: this.fieldDefinitionIdentifier,
isRequired: false,
fieldSettings: {},
};
this.field = {fieldValue: {destinationContentIds: [45, 42]}};
this.jsonContent = {};
this.jsonContentType = {};
this.jsonVersion = {};
this.content = new Y.Mock();
this.version = new Y.Mock();
this.contentType = new Y.Mock();
Y.Mock.expect(this.content, {
method: 'toJSON',
returns: this.jsonContent
});
Y.Mock.expect(this.version, {
method: 'toJSON',
returns: this.jsonVersion
});
Y.Mock.expect(this.contentType, {
method: 'toJSON',
returns: this.jsonContentType
});
this.view = new Y.eZ.RelationListEditView({
container: '.container',
field: this.field,
fieldDefinition: this.fieldDefinition,
content: this.content,
version: this.version,
contentType: this.contentType,
relatedContents: this.relatedContents,
});
},
tearDown: function () {
this.view.destroy();
delete this.view;
},
"Should prevent default behaviour of the tap event for select button": function () {
var that = this;
this.view.render();
this.view.get('container').once('tap', function (e) {
that.resume(function () {
Y.Assert.isTrue(
!!e.prevented,
"The tap event should have been prevented"
);
});
});
this.view.get('container').one('.ez-relation-discover').simulateGesture('tap');
this.wait();
},
"Should remove the relation related to the remove button when it is tapped": function () {
var that = this,
contentId = 42;
this.view.get('container').once('tap', function (e) {
that.resume(function () {
Y.Assert.isTrue(
!!e.prevented,
"The tap event should have been prevented"
);
Y.ArrayAssert.doesNotContain(
contentId,
that.view.get('destinationContentsIds'),
"The contentId of the relation removed should be deleted"
);
});
});
this.view.get('container').one('button[data-content-id="/api/ezp/v2/content/objects/'+ contentId +'"]').simulateGesture('tap');
this.wait();
},
"Should render the view and make the table disapear when we remove the last relation": function () {
var that = this,
contentId = 42;
this.view.set('relatedContents', [this.destinationContent2]);
this.view.render();
that.view.template = function () {
that.resume(function () {
Y.Assert.isNull(that.view.get('container').one('.ez-relationlist-contents'), 'The relation list table should have disapeared');
});
};
this.view.get('container').one('button[data-content-id="/api/ezp/v2/content/objects/'+ contentId +'"]').simulateGesture('tap');
this.wait();
},
"Should remove the table row of the relation when we tap on its remove button ": function () {
var that = this,
contentId = 42;
this.view.set('relatedContents', [this.destinationContent1, this.destinationContent2]);
this.view.render();
that.view.get('container').onceAfter(['webkitTransitionEnd', 'transitionend'], Y.bind(function () {
that.resume(function () {
Y.Assert.isNull(
that.view.get('container').one('tr[data-content-id="' + contentId + '"]'),
'The relation table row should have disapeared');
});
}, this));
this.view.get('container').one('button[data-content-id="/api/ezp/v2/content/objects/'+ contentId +'"]').simulateGesture('tap');
this.wait();
},
});
Y.Test.Runner.add(tapTest);
loadObjectRelationsTest = new Y.Test.Case({
name: "eZ Relations list loadObjectRelations event test",
_getFieldDefinition: function (required) {
return {
isRequired: required
};
},
setUp: function () {
this.fieldDefinitionIdentifier= "niceField";
this.fieldDefinition = {
fieldType: "ezobjectrelationlist",
identifier: this.fieldDefinitionIdentifier,
isRequired: false
};
this.field = {fieldValue: {destinationContentIds: [45, 42]}};
this.content = {};
this.view = new Y.eZ.RelationListEditView({
field: this.field,
fieldDefinition: this.fieldDefinition,
content: this.content,
});
},
tearDown: function () {
this.view.destroy();
delete this.view;
},
"Should fire the loadObjectRelations event": function () {
var loadContentEvent = false;
this.view.on('loadObjectRelations', Y.bind(function (e) {
Y.Assert.areSame(
this.fieldDefinitionIdentifier,
e.fieldDefinitionIdentifier,
"fieldDefinitionIdentifier is the same than the one in the field"
);
Y.Assert.areSame(
this.content,
e.content,
"The content should be provided in the event facade"
);
loadContentEvent = true;
}, this));
this.view.set('active', true);
Y.Assert.isTrue(loadContentEvent, "loadObjectRelations event should be fired when getting active");
},
"Should NOT fire the loadObjectRelations event if field is empty": function () {
var loadContentEvent = false,
that = this;
this.view.on('loadObjectRelations', function (e) {
Y.Assert.areSame(
that.fieldDefinitionIdentifier,
e.fieldDefinitionIdentifier,
"fieldDefinitionIdentifier is the same than the one in the field"
);
loadContentEvent = true;
});
this.view._set('destinationContentsIds', null);
this.view.set('active', true);
Y.Assert.isFalse(loadContentEvent, "loadContentEvent should NOT be called when changing active value");
},
});
Y.Test.Runner.add(loadObjectRelationsTest);
getFieldTest = new Y.Test.Case(
Y.merge(Y.eZ.Test.GetFieldTests, {
fieldDefinition: {isRequired: false},
ViewConstructor: Y.eZ.RelationListEditView,
value: {destinationContentsIds: [45, 42]},
newValue: [45, 42],
_setNewValue: function () {
this.view._set("destinationContentsIds", this.newValue);
},
_assertCorrectFieldValue: function (fieldValue, msg) {
Y.Assert.isObject(fieldValue, 'fieldValue should be an object');
Y.Assert.areEqual(this.newValue, fieldValue.destinationContentIds, msg);
},
})
);
Y.Test.Runner.add(getFieldTest);
getEmptyFieldTest = new Y.Test.Case(
Y.merge(Y.eZ.Test.GetFieldTests, {
fieldDefinition: {isRequired: false},
ViewConstructor: Y.eZ.RelationListEditView,
value: {destinationContentsIds: null},
newValue: null,
_setNewValue: function () {
this.view._set("destinationContentsIds", this.newValue);
},
_assertCorrectFieldValue: function (fieldValue, msg) {
Y.Assert.isObject(fieldValue, 'fieldValue should be an object');
Y.Assert.areEqual(this.newValue, fieldValue.destinationContentIds, msg);
},
})
);
Y.Test.Runner.add(getEmptyFieldTest);
registerTest = new Y.Test.Case(Y.eZ.EditViewRegisterTest);
registerTest.name = "Relation List Edit View registration test";
registerTest.viewType = Y.eZ.RelationListEditView;
registerTest.viewKey = "ezobjectrelationlist";
Y.Test.Runner.add(registerTest);
}, '', {requires: ['test', 'getfield-tests', 'node-event-simulate', 'editviewregister-tests', 'ez-relationlist-editview']});<|fim▁end|> | |
<|file_name|>validator.go<|end_file_name|><|fim▁begin|>package graphql
import (
"github.com/housinganywhere/graphql/language/kinds"
"github.com/housinganywhere/graphql/language/visitor"
"github.com/housinganywhere/graphql/gqlerrors"
"github.com/housinganywhere/graphql/language/ast"
)
type ValidationResult struct {
IsValid bool<|fim▁hole|>/**
* Implements the "Validation" section of the spec.
*
* Validation runs synchronously, returning an array of encountered errors, or
* an empty array if no errors were encountered and the document is valid.
*
* A list of specific validation rules may be provided. If not provided, the
* default list of rules defined by the GraphQL specification will be used.
*
* Each validation rules is a function which returns a visitor
* (see the language/visitor API). Visitor methods are expected to return
* GraphQLErrors, or Arrays of GraphQLErrors when invalid.
*/
func ValidateDocument(schema *Schema, astDoc *ast.Document, rules []ValidationRuleFn) (vr ValidationResult) {
if len(rules) == 0 {
rules = SpecifiedRules
}
vr.IsValid = false
if schema == nil {
vr.Errors = append(vr.Errors, gqlerrors.NewFormattedError("Must provide schema"))
return vr
}
if astDoc == nil {
vr.Errors = append(vr.Errors, gqlerrors.NewFormattedError("Must provide document"))
return vr
}
typeInfo := NewTypeInfo(&TypeInfoConfig{
Schema: schema,
})
vr.Errors = VisitUsingRules(schema, typeInfo, astDoc, rules)
if len(vr.Errors) == 0 {
vr.IsValid = true
}
return vr
}
// VisitUsingRules This uses a specialized visitor which runs multiple visitors in parallel,
// while maintaining the visitor skip and break API.
//
// @internal
// Had to expose it to unit test experimental customizable validation feature,
// but not meant for public consumption
func VisitUsingRules(schema *Schema, typeInfo *TypeInfo, astDoc *ast.Document, rules []ValidationRuleFn) []gqlerrors.FormattedError {
context := NewValidationContext(schema, astDoc, typeInfo)
visitors := []*visitor.VisitorOptions{}
for _, rule := range rules {
instance := rule(context)
visitors = append(visitors, instance.VisitorOpts)
}
// Visit the whole document with each instance of all provided rules.
visitor.Visit(astDoc, visitor.VisitWithTypeInfo(typeInfo, visitor.VisitInParallel(visitors...)), nil)
return context.Errors()
}
type HasSelectionSet interface {
GetKind() string
GetLoc() *ast.Location
GetSelectionSet() *ast.SelectionSet
}
var _ HasSelectionSet = (*ast.OperationDefinition)(nil)
var _ HasSelectionSet = (*ast.FragmentDefinition)(nil)
type VariableUsage struct {
Node *ast.Variable
Type Input
}
type ValidationContext struct {
schema *Schema
astDoc *ast.Document
typeInfo *TypeInfo
errors []gqlerrors.FormattedError
fragments map[string]*ast.FragmentDefinition
variableUsages map[HasSelectionSet][]*VariableUsage
recursiveVariableUsages map[*ast.OperationDefinition][]*VariableUsage
recursivelyReferencedFragments map[*ast.OperationDefinition][]*ast.FragmentDefinition
fragmentSpreads map[HasSelectionSet][]*ast.FragmentSpread
}
func NewValidationContext(schema *Schema, astDoc *ast.Document, typeInfo *TypeInfo) *ValidationContext {
return &ValidationContext{
schema: schema,
astDoc: astDoc,
typeInfo: typeInfo,
fragments: map[string]*ast.FragmentDefinition{},
variableUsages: map[HasSelectionSet][]*VariableUsage{},
recursiveVariableUsages: map[*ast.OperationDefinition][]*VariableUsage{},
recursivelyReferencedFragments: map[*ast.OperationDefinition][]*ast.FragmentDefinition{},
fragmentSpreads: map[HasSelectionSet][]*ast.FragmentSpread{},
}
}
func (ctx *ValidationContext) ReportError(err error) {
formattedErr := gqlerrors.FormatError(err)
ctx.errors = append(ctx.errors, formattedErr)
}
func (ctx *ValidationContext) Errors() []gqlerrors.FormattedError {
return ctx.errors
}
func (ctx *ValidationContext) Schema() *Schema {
return ctx.schema
}
func (ctx *ValidationContext) Document() *ast.Document {
return ctx.astDoc
}
func (ctx *ValidationContext) Fragment(name string) *ast.FragmentDefinition {
if len(ctx.fragments) == 0 {
if ctx.Document() == nil {
return nil
}
defs := ctx.Document().Definitions
fragments := map[string]*ast.FragmentDefinition{}
for _, def := range defs {
if def, ok := def.(*ast.FragmentDefinition); ok {
defName := ""
if def.Name != nil {
defName = def.Name.Value
}
fragments[defName] = def
}
}
ctx.fragments = fragments
}
f, _ := ctx.fragments[name]
return f
}
func (ctx *ValidationContext) FragmentSpreads(node HasSelectionSet) []*ast.FragmentSpread {
if spreads, ok := ctx.fragmentSpreads[node]; ok && spreads != nil {
return spreads
}
spreads := []*ast.FragmentSpread{}
setsToVisit := []*ast.SelectionSet{node.GetSelectionSet()}
for {
if len(setsToVisit) == 0 {
break
}
var set *ast.SelectionSet
// pop
set, setsToVisit = setsToVisit[len(setsToVisit)-1], setsToVisit[:len(setsToVisit)-1]
if set.Selections != nil {
for _, selection := range set.Selections {
switch selection := selection.(type) {
case *ast.FragmentSpread:
spreads = append(spreads, selection)
case *ast.Field:
if selection.SelectionSet != nil {
setsToVisit = append(setsToVisit, selection.SelectionSet)
}
case *ast.InlineFragment:
if selection.SelectionSet != nil {
setsToVisit = append(setsToVisit, selection.SelectionSet)
}
}
}
}
ctx.fragmentSpreads[node] = spreads
}
return spreads
}
func (ctx *ValidationContext) RecursivelyReferencedFragments(operation *ast.OperationDefinition) []*ast.FragmentDefinition {
if fragments, ok := ctx.recursivelyReferencedFragments[operation]; ok && fragments != nil {
return fragments
}
fragments := []*ast.FragmentDefinition{}
collectedNames := map[string]bool{}
nodesToVisit := []HasSelectionSet{operation}
for {
if len(nodesToVisit) == 0 {
break
}
var node HasSelectionSet
node, nodesToVisit = nodesToVisit[len(nodesToVisit)-1], nodesToVisit[:len(nodesToVisit)-1]
spreads := ctx.FragmentSpreads(node)
for _, spread := range spreads {
fragName := ""
if spread.Name != nil {
fragName = spread.Name.Value
}
if res, ok := collectedNames[fragName]; !ok || !res {
collectedNames[fragName] = true
fragment := ctx.Fragment(fragName)
if fragment != nil {
fragments = append(fragments, fragment)
nodesToVisit = append(nodesToVisit, fragment)
}
}
}
}
ctx.recursivelyReferencedFragments[operation] = fragments
return fragments
}
func (ctx *ValidationContext) VariableUsages(node HasSelectionSet) []*VariableUsage {
if usages, ok := ctx.variableUsages[node]; ok && usages != nil {
return usages
}
usages := []*VariableUsage{}
typeInfo := NewTypeInfo(&TypeInfoConfig{
Schema: ctx.schema,
})
visitor.Visit(node, visitor.VisitWithTypeInfo(typeInfo, &visitor.VisitorOptions{
KindFuncMap: map[string]visitor.NamedVisitFuncs{
kinds.VariableDefinition: visitor.NamedVisitFuncs{
Kind: func(p visitor.VisitFuncParams) (string, interface{}) {
return visitor.ActionSkip, nil
},
},
kinds.Variable: visitor.NamedVisitFuncs{
Kind: func(p visitor.VisitFuncParams) (string, interface{}) {
if node, ok := p.Node.(*ast.Variable); ok && node != nil {
usages = append(usages, &VariableUsage{
Node: node,
Type: typeInfo.InputType(),
})
}
return visitor.ActionNoChange, nil
},
},
},
}), nil)
ctx.variableUsages[node] = usages
return usages
}
func (ctx *ValidationContext) RecursiveVariableUsages(operation *ast.OperationDefinition) []*VariableUsage {
if usages, ok := ctx.recursiveVariableUsages[operation]; ok && usages != nil {
return usages
}
usages := ctx.VariableUsages(operation)
fragments := ctx.RecursivelyReferencedFragments(operation)
for _, fragment := range fragments {
fragmentUsages := ctx.VariableUsages(fragment)
usages = append(usages, fragmentUsages...)
}
ctx.recursiveVariableUsages[operation] = usages
return usages
}
func (ctx *ValidationContext) Type() Output {
return ctx.typeInfo.Type()
}
func (ctx *ValidationContext) ParentType() Composite {
return ctx.typeInfo.ParentType()
}
func (ctx *ValidationContext) InputType() Input {
return ctx.typeInfo.InputType()
}
func (ctx *ValidationContext) FieldDef() *FieldDefinition {
return ctx.typeInfo.FieldDef()
}
func (ctx *ValidationContext) Directive() *Directive {
return ctx.typeInfo.Directive()
}
func (ctx *ValidationContext) Argument() *Argument {
return ctx.typeInfo.Argument()
}<|fim▁end|> | Errors []gqlerrors.FormattedError
}
|
<|file_name|>GuiceIdentityGenerator.java<|end_file_name|><|fim▁begin|>/*
* Copyright © 2013-2021, The SeedStack authors <http://seedstack.org>
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
package org.seedstack.business.fixtures.identity;
import javax.inject.Named;
import org.seedstack.business.domain.Entity;
import org.seedstack.business.util.SequenceGenerator;<|fim▁hole|> @Override
public <E extends Entity<Long>> Long generate(Class<E> entityClass) {
return (long) Math.random();
}
}<|fim▁end|> |
@Named("guice")
public class GuiceIdentityGenerator implements SequenceGenerator { |
<|file_name|>test_airpollution_manager.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
import unittest
from pyowm.airpollutionapi30 import airpollution_client, airpollution_manager, coindex, so2index, ozone, no2index, airstatus
from pyowm.config import DEFAULT_CONFIG
from pyowm.constants import AIRPOLLUTION_API_VERSION
from pyowm.utils import timestamps
from tests.unit.airpollutionapi30.test_ozone import OZONE_JSON
from tests.unit.airpollutionapi30.test_coindex import COINDEX_JSON
from tests.unit.airpollutionapi30.test_no2index import NO2INDEX_JSON
from tests.unit.airpollutionapi30.test_so2index import SO2INDEX_JSON
from tests.unit.airpollutionapi30.test_airstatus import AIRSTATUS_JSON, AIRSTATUS_MULTIPLE_JSON
class TestAirPollutionManager(unittest.TestCase):
__test_instance = airpollution_manager.AirPollutionManager('fakeapikey', DEFAULT_CONFIG)
def mock_get_coi_returning_coindex_around_coords(self, params_dict):
return json.loads(COINDEX_JSON)
def mock_get_o3_returning_ozone_around_coords(self, params_dict):
return json.loads(OZONE_JSON)
def mock_get_no2_returning_no2index_around_coords(self, params_dict):
return json.loads(NO2INDEX_JSON)
def mock_get_air_pollution(self, params_dict):
return json.loads(AIRSTATUS_JSON)
def mock_get_forecast_air_pollution(self, params_dict):
return json.loads(AIRSTATUS_MULTIPLE_JSON)
def mock_get_historical_air_pollution(self, params_dict):
return json.loads(AIRSTATUS_MULTIPLE_JSON)
def mock_get_so2_returning_so2index_around_coords(self, params_dict):
return json.loads(SO2INDEX_JSON)
def test_instantiation_with_wrong_params(self):
self.assertRaises(AssertionError, airpollution_manager.AirPollutionManager, None, dict())
self.assertRaises(AssertionError, airpollution_manager.AirPollutionManager, 'apikey', None)
def test_get_uvindex_api_version(self):
result = self.__test_instance.airpollution_api_version()
self.assertIsInstance(result, tuple)
self.assertEqual(result, AIRPOLLUTION_API_VERSION)
def test_coindex_around_coords(self):
ref_to_original = airpollution_client.AirPollutionHttpClient.get_coi
airpollution_client.AirPollutionHttpClient.get_coi = \
self.mock_get_coi_returning_coindex_around_coords
result = self.__test_instance.coindex_around_coords(45, 9, interval='day')
airpollution_client.AirPollutionHttpClient.coi = ref_to_original
self.assertTrue(isinstance(result, coindex.COIndex))
self.assertIsNotNone(result.reference_time)
self.assertIsNotNone(result.reception_time())
loc = result.location
self.assertIsNotNone(loc)
self.assertIsNotNone(loc.lat)
self.assertIsNotNone(loc.lon)
self.assertIsNotNone(result.co_samples)
ref_to_original = airpollution_client.AirPollutionHttpClient.get_coi
airpollution_client.AirPollutionHttpClient.get_coi = \
self.mock_get_coi_returning_coindex_around_coords
result = self.__test_instance.coindex_around_coords(45, 9, interval=None)
airpollution_client.AirPollutionHttpClient.coi = ref_to_original
self.assertTrue(isinstance(result, coindex.COIndex))
self.assertEqual('year', result.interval)
def test_coindex_around_coords_fails_with_wrong_parameters(self):
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.coindex_around_coords, \
self.__test_instance, 43.7, -200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.coindex_around_coords, \
self.__test_instance, 43.7, 200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.coindex_around_coords, \
self.__test_instance, -200, 2.5)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.coindex_around_coords, \
self.__test_instance, 200, 2.5)
def test_ozone_around_coords(self):
ref_to_original = airpollution_client.AirPollutionHttpClient.get_o3
airpollution_client.AirPollutionHttpClient.get_o3 = \
self.mock_get_o3_returning_ozone_around_coords
result = self.__test_instance.ozone_around_coords(45, 9, interval='day')
airpollution_client.AirPollutionHttpClient.o3 = ref_to_original
self.assertTrue(isinstance(result, ozone.Ozone))
self.assertIsNotNone(result.reference_time)
self.assertIsNotNone(result.reception_time())
loc = result.location
self.assertIsNotNone(loc)
self.assertIsNotNone(loc.lat)
self.assertIsNotNone(loc.lon)
self.assertIsNotNone(result.du_value)
ref_to_original = airpollution_client.AirPollutionHttpClient.get_o3
airpollution_client.AirPollutionHttpClient.get_o3 = \
self.mock_get_o3_returning_ozone_around_coords
result = self.__test_instance.ozone_around_coords(45, 9, interval=None)
airpollution_client.AirPollutionHttpClient.o3 = ref_to_original
self.assertTrue(isinstance(result, ozone.Ozone))
self.assertEqual('year', result.interval)
def test_ozone_around_coords_fails_with_wrong_parameters(self):
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.ozone_around_coords, \
self.__test_instance, 43.7, -200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.ozone_around_coords, \
self.__test_instance, 43.7, 200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.ozone_around_coords, \
self.__test_instance, -200, 2.5)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.ozone_around_coords, \
self.__test_instance, 200, 2.5)
def test_no2index_around_coords(self):
ref_to_original = airpollution_client.AirPollutionHttpClient.get_no2
airpollution_client.AirPollutionHttpClient.get_no2 = \
self.mock_get_no2_returning_no2index_around_coords
result = self.__test_instance.no2index_around_coords(45, 9, interval='day')
airpollution_client.AirPollutionHttpClient.get_no2 = ref_to_original
self.assertTrue(isinstance(result, no2index.NO2Index))
self.assertIsNotNone(result.reference_time)
self.assertIsNotNone(result.reception_time())
loc = result.location
self.assertIsNotNone(loc)
self.assertIsNotNone(loc.lat)
self.assertIsNotNone(loc.lon)
self.assertIsNotNone(result.no2_samples)
ref_to_original = airpollution_client.AirPollutionHttpClient.get_no2
airpollution_client.AirPollutionHttpClient.get_no2 = \
self.mock_get_no2_returning_no2index_around_coords
result = self.__test_instance.no2index_around_coords(45, 9, interval=None)<|fim▁hole|>
def test_no2index_around_coords_fails_with_wrong_parameters(self):
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.no2index_around_coords, \
self.__test_instance, 43.7, -200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.no2index_around_coords, \
self.__test_instance, 43.7, 200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.no2index_around_coords, \
self.__test_instance, -200, 2.5)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.no2index_around_coords, \
self.__test_instance, 200, 2.5)
def test_so2index_around_coords(self):
ref_to_original = airpollution_client.AirPollutionHttpClient.get_so2
airpollution_client.AirPollutionHttpClient.get_so2 = \
self.mock_get_so2_returning_so2index_around_coords
result = self.__test_instance.so2index_around_coords(45, 9, interval='day')
airpollution_client.AirPollutionHttpClient.get_so2 = ref_to_original
self.assertTrue(isinstance(result, so2index.SO2Index))
self.assertIsNotNone(result.reference_time())
self.assertIsNotNone(result.reception_time())
loc = result.location
self.assertIsNotNone(loc)
self.assertIsNotNone(loc.lat)
self.assertIsNotNone(loc.lon)
self.assertIsNotNone(result.so2_samples)
self.assertIsNotNone(result.interval)
ref_to_original = airpollution_client.AirPollutionHttpClient.get_so2
airpollution_client.AirPollutionHttpClient.get_so2 = \
self.mock_get_so2_returning_so2index_around_coords
result = self.__test_instance.so2index_around_coords(45, 9, interval=None)
airpollution_client.AirPollutionHttpClient.get_so2 = ref_to_original
self.assertTrue(isinstance(result, so2index.SO2Index))
self.assertEqual('year', result.interval)
def test_so2index_around_coords_fails_with_wrong_parameters(self):
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.so2index_around_coords, \
self.__test_instance, 43.7, -200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.so2index_around_coords, \
self.__test_instance, 43.7, 200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.so2index_around_coords, \
self.__test_instance, -200, 2.5)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.so2index_around_coords, \
self.__test_instance, 200, 2.5)
def test_air_quality_at_coords(self):
ref_to_original = airpollution_client.AirPollutionHttpClient.get_air_pollution
airpollution_client.AirPollutionHttpClient.get_air_pollution = \
self.mock_get_air_pollution
result = self.__test_instance.air_quality_at_coords(45, 9)
airpollution_client.AirPollutionHttpClient.get_air_pollution = ref_to_original
self.assertTrue(isinstance(result, airstatus.AirStatus))
self.assertIsNotNone(result.reference_time)
self.assertIsNotNone(result.reception_time())
loc = result.location
self.assertIsNotNone(loc)
self.assertIsNotNone(loc.lat)
self.assertIsNotNone(loc.lon)
self.assertIsNotNone(result.air_quality_data)
def test_air_quality_at_coords_fails_with_wrong_parameters(self):
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_at_coords, \
self.__test_instance, 43.7, -200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_at_coords, \
self.__test_instance, 43.7, 200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_at_coords, \
self.__test_instance, -200, 2.5)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_at_coords, \
self.__test_instance, 200, 2.5)
def test_air_quality_forecast_at_coords(self):
ref_to_original = airpollution_client.AirPollutionHttpClient.get_forecast_air_pollution
airpollution_client.AirPollutionHttpClient.get_forecast_air_pollution = \
self.mock_get_forecast_air_pollution
result = self.__test_instance.air_quality_forecast_at_coords(45, 9)
airpollution_client.AirPollutionHttpClient.get_forecast_air_pollution = ref_to_original
self.assertTrue(isinstance(result, list))
for item in result:
self.assertIsInstance(item, airstatus.AirStatus)
self.assertIsNotNone(item.reference_time)
self.assertIsNotNone(item.reception_time())
loc = item.location
self.assertIsNotNone(loc)
self.assertIsNotNone(loc.lat)
self.assertIsNotNone(loc.lon)
self.assertIsNotNone(item.air_quality_data)
def test_air_quality_forecast_at_coords_fails_with_wrong_parameters(self):
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_forecast_at_coords, \
self.__test_instance, 43.7, -200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_forecast_at_coords, \
self.__test_instance, 43.7, 200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_forecast_at_coords, \
self.__test_instance, -200, 2.5)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_forecast_at_coords, \
self.__test_instance, 200, 2.5)
def test_air_quality_history_at_coords(self):
ref_to_original = airpollution_client.AirPollutionHttpClient.get_historical_air_pollution
airpollution_client.AirPollutionHttpClient.get_historical_air_pollution = \
self.mock_get_historical_air_pollution
result = self.__test_instance.air_quality_history_at_coords(45, 9, 12345678)
airpollution_client.AirPollutionHttpClient.get_historical_air_pollution = ref_to_original
self.assertTrue(isinstance(result, list))
for item in result:
self.assertIsInstance(item, airstatus.AirStatus)
self.assertIsNotNone(item.reference_time)
self.assertIsNotNone(item.reception_time())
loc = item.location
self.assertIsNotNone(loc)
self.assertIsNotNone(loc.lat)
self.assertIsNotNone(loc.lon)
self.assertIsNotNone(item.air_quality_data)
def test_air_quality_history_at_coords_fails_with_wrong_parameters(self):
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_history_at_coords, \
self.__test_instance, 43.7, -200.0, 12345678, 12349999)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_history_at_coords, \
self.__test_instance, 43.7, 200.0, 12345678, 12349999)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_history_at_coords, \
self.__test_instance, -200, 2.5, 12345678, 12349999)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_history_at_coords, \
self.__test_instance, 200, 2.5, 12345678, 12349999)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_history_at_coords, \
self.__test_instance, 200, 2.5, 'test')
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_history_at_coords, \
self.__test_instance, 200, 2.5, 'test', 'test2')
def test_air_quality_history_at_coords_clips_end_param_to_current_timestamp(self):
now = timestamps.now(timeformat='unix')
end = now + 99999999999
def assert_clipped(obj, params_dict):
self.assertEqual(params_dict['end'], now)
airpollution_client.AirPollutionHttpClient.get_historical_air_pollution = assert_clipped
_ = self.__test_instance.air_quality_history_at_coords(45, 9, 12345678, end=end)
def test_repr(self):
print(self.__test_instance)<|fim▁end|> | airpollution_client.AirPollutionHttpClient.get_no2 = ref_to_original
self.assertTrue(isinstance(result, no2index.NO2Index))
self.assertEqual('year', result.interval) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from .tensor import *<|fim▁hole|><|fim▁end|> | from .math import * |
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|>"""
WSGI config for ldstext project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""<|fim▁hole|>import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ldstext.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()<|fim▁end|> | |
<|file_name|>dialogflow_v3_generated_agents_export_agent_async.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for ExportAgent
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-dialogflowcx
# [START dialogflow_v3_generated_Agents_ExportAgent_async]
from google.cloud import dialogflowcx_v3
async def sample_export_agent():
# Create a client
client = dialogflowcx_v3.AgentsAsyncClient()
# Initialize request argument(s)
request = dialogflowcx_v3.ExportAgentRequest(
name="name_value",
)
# Make the request
operation = client.export_agent(request=request)
<|fim▁hole|>
response = await operation.result()
# Handle the response
print(response)
# [END dialogflow_v3_generated_Agents_ExportAgent_async]<|fim▁end|> | print("Waiting for operation to complete...") |
<|file_name|>url.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import urlparse
def uc2utf8(input):
## argh! this feels wrong, but seems to be needed.
if type(input) == unicode:
return input.encode('utf-8')
else:
return input
class URL:
"""
This class is for wrapping URLs into objects. It's used
internally in the library, end users should not need to know
anything about this class. All methods that accept URLs can be
fed either with an URL object, a string or an urlparse.ParsedURL
object.
Addresses may be one out of three:
1) a path relative to the DAV-root, i.e. "someuser/calendar" may
refer to
"http://my.davical-server.example.com/pycaldav.php/someuser/calendar".
2) an absolute path, i.e. "/pycaldav.php/someuser/calendar"
3) a fully qualified URL,
i.e. "http://someuser:somepass@my.davical-server.example.com/pycaldav.php/someuser/calendar".
Remark that hostname, port, user, pass is typically given when
instantiating the DAVClient object and cannot be overridden later.
As of 2013-11, some methods in the pycaldav library expected strings
and some expected urlparse.ParseResult objects, some expected
fully qualified URLs and most expected absolute paths. The purpose
of this class is to ensure consistency and at the same time
maintaining backward compatibility. Basically, all methods should
accept any kind of URL.
"""
def __init__(self, url):
if isinstance(url, urlparse.ParseResult) or isinstance(url, urlparse.SplitResult):
self.url_parsed = url
self.url_raw = None
else:
self.url_raw = url
self.url_parsed = None
def __nonzero__(self):
if self.url_raw or self.url_parsed:
return True
else:
return False
def __ne__(self, other):
return not self == other
def __eq__(self, other):
if str(self) == str(other):
return True
## The URLs could have insignificant differences
me = self.canonical()
if hasattr(other, 'canonical'):
other = other.canonical()
return str(me) == str(other)
## TODO: better naming? Will return url if url is already an URL
## object, else will instantiate a new URL object
@classmethod
def objectify(self, url):
if url is None:
return None
if isinstance(url, URL):
return url
else:
return URL(url)
## To deal with all kind of methods/properties in the ParseResult
## class
def __getattr__(self, attr):
if self.url_parsed is None:
self.url_parsed = urlparse.urlparse(self.url_raw)
if hasattr(self.url_parsed, attr):
return getattr(self.url_parsed, attr)
else:
return getattr(self.__unicode__(), attr)
## returns the url in text format
def __str__(self):
return self.__unicode__().encode('utf-8')
## returns the url in text format
def __unicode__(self):
if self.url_raw is None:
self.url_raw = self.url_parsed.geturl()
if isinstance(self.url_raw, unicode):
return self.url_raw
else:<|fim▁hole|> def __repr__(self):
return "URL(%s)" % str(self)
def is_auth(self):
return self.username is not None
def unauth(self):
if not self.is_auth():
return self
return URL.objectify(urlparse.ParseResult(
self.scheme, '%s:%s' % (self.hostname, self.port),
self.path.replace('//', '/'), self.params, self.query, self.fragment))
def canonical(self):
"""
a canonical URL ... remove authentication details, make sure there
are no double slashes, and to make sure the URL is always the same,
run it through the urlparser
"""
url = self.unauth()
## this is actually already done in the unauth method ...
if '//' in url.path:
raise NotImplementedError("remove the double slashes")
## TODO: optimize - we're going to burn some CPU cycles here
if url.endswith('/'):
url = URL.objectify(str(url)[:-1])
## This looks like a noop - but it may have the side effect
## that urlparser be run (actually not - unauth ensures we
## have an urlparse.ParseResult object)
url.scheme
## make sure to delete the string version
url.url_raw = None
return url
def join(self, path):
"""
assumes this object is the base URL or base path. If the path
is relative, it should be appended to the base. If the path
is absolute, it should be added to the connection details of
self. If the path already contains connection details and the
connection details differ from self, raise an error.
"""
if not path:
return self
path = URL.objectify(path)
if (
(path.scheme and self.scheme and path.scheme != self.scheme)
or
(path.hostname and self.hostname and path.hostname != self.hostname)
or
(path.port and self.port and path.port != self.port)
):
raise ValueError("%s can't be joined with %s" % (self, path))
if path.path[0] == '/':
ret_path = uc2utf8(path.path)
else:
sep = "/"
if self.path.endswith("/"):
sep = ""
ret_path = "%s%s%s" % (self.path, sep, uc2utf8(path.path))
return URL(urlparse.ParseResult(
self.scheme or path.scheme, self.netloc or path.netloc, ret_path, path.params, path.query, path.fragment))
def make(url):
"""Backward compatibility"""
return URL.objectify(url)<|fim▁end|> | return unicode(self.url_raw, 'utf-8')
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from zope.interface import classImplements
from pyramid.config import Configurator
from clld.interfaces import ILanguage, IMapMarker, IValueSet, IValue
from clld.web.app import MapMarker
from clld.db.models.common import Parameter_files
# we must make sure custom models are known at database initialization!
from tsammalex import models
from tsammalex.interfaces import IEcoregion, IImage
# associate Parameter_files with the IImage interface to make the model work as resource.
classImplements(Parameter_files, IImage)<|fim▁hole|>
_ = lambda s: s
_('Parameter')
_('Parameters')
_('Source')
_('Sources')
_('Value')
_('Values')
class TsammalexMapMarker(MapMarker):
def get_color(self, ctx, req):
lineage = None
if ctx and isinstance(ctx, (tuple, list)):
ctx = ctx[0]
if ILanguage.providedBy(ctx):
lineage = ctx.lineage
elif IValueSet.providedBy(ctx):
lineage = ctx.language.lineage
elif IValue.providedBy(ctx):
lineage = ctx.valueset.language.lineage
if isinstance(ctx, str):
lineage = req.db.query(models.Lineage)\
.filter(models.Lineage.name == ctx).one()
return lineage.color if lineage else 'ff6600'
def __call__(self, ctx, req):
return req.static_url('tsammalex:static/icons/%s.png' % self.get_color(ctx, req))
def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
config = Configurator(settings=settings)
config.include('clldmpg')
config.registry.registerUtility(TsammalexMapMarker(), IMapMarker)
config.registry.settings['home_comp'].append('contributors')
config.register_menu(
('dataset', dict(label='Home')),
'values',
'languages',
'parameters',
'ecoregions',
'sources',
'images',
#('contributors', dict(label='Contribute'))
('contribute', lambda ctx, req: (req.route_url('help'), 'Contribute!'))
)
config.register_resource('ecoregion', models.Ecoregion, IEcoregion, with_index=True)
config.register_resource('image', Parameter_files, IImage, with_index=True)
return config.make_wsgi_app()<|fim▁end|> | |
<|file_name|>bootstrap_js_tooltip_7.js<|end_file_name|><|fim▁begin|>/* ========================================================================
* Bootstrap: tooltip.js v3.2.0
* http://getbootstrap.com/javascript/#tooltip
* Inspired by the original jQuery.tipsy by Jason Frame
* ========================================================================
* Copyright 2011-2014 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+function ($) {
'use strict';
// TOOLTIP PUBLIC CLASS DEFINITION<|fim▁hole|> var Tooltip = function (element, options) {
this.type =
this.options =
this.enabled =
this.timeout =
this.hoverState =
this.$element = null
this.init('tooltip', element, options)
}
Tooltip.VERSION = '3.2.0'
Tooltip.TRANSITION_DURATION = 150
Tooltip.DEFAULTS = {
animation: true,
placement: 'top',
selector: false,
template: '<div class="tooltip" role="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>',
trigger: 'hover focus',
title: '',
delay: 0,
html: false,
container: false,
viewport: {
selector: 'body',
padding: 0
}
}
Tooltip.prototype.init = function (type, element, options) {
this.enabled = true
this.type = type
this.$element = $(element)
this.options = this.getOptions(options)
this.$viewport = this.options.viewport && $(this.options.viewport.selector || this.options.viewport)
var triggers = this.options.trigger.split(' ')
for (var i = triggers.length; i--;) {
var trigger = triggers[i]
if (trigger == 'click') {
this.$element.on('click.' + this.type, this.options.selector, $.proxy(this.toggle, this))
} else if (trigger != 'manual') {
var eventIn = trigger == 'hover' ? 'mouseenter' : 'focusin'
var eventOut = trigger == 'hover' ? 'mouseleave' : 'focusout'
this.$element.on(eventIn + '.' + this.type, this.options.selector, $.proxy(this.enter, this))
this.$element.on(eventOut + '.' + this.type, this.options.selector, $.proxy(this.leave, this))
}
}
this.options.selector ?
(this._options = $.extend({}, this.options, { trigger: 'manual', selector: '' })) :
this.fixTitle()
}
Tooltip.prototype.getDefaults = function () {
return Tooltip.DEFAULTS
}
Tooltip.prototype.getOptions = function (options) {
options = $.extend({}, this.getDefaults(), this.$element.data(), options)
if (options.delay && typeof options.delay == 'number') {
options.delay = {
show: options.delay,
hide: options.delay
}
}
return options
}
Tooltip.prototype.getDelegateOptions = function () {
var options = {}
var defaults = this.getDefaults()
this._options && $.each(this._options, function (key, value) {
if (defaults[key] != value) options[key] = value
})
return options
}
Tooltip.prototype.enter = function (obj) {
var self = obj instanceof this.constructor ?
obj : $(obj.currentTarget).data('bs.' + this.type)
if (self && self.$tip && self.$tip.is(':visible')) {
self.hoverState = 'in'
return
}
if (!self) {
self = new this.constructor(obj.currentTarget, this.getDelegateOptions())
$(obj.currentTarget).data('bs.' + this.type, self)
}
clearTimeout(self.timeout)
self.hoverState = 'in'
if (!self.options.delay || !self.options.delay.show) return self.show()
self.timeout = setTimeout(function () {
if (self.hoverState == 'in') self.show()
}, self.options.delay.show)
}
Tooltip.prototype.leave = function (obj) {
var self = obj instanceof this.constructor ?
obj : $(obj.currentTarget).data('bs.' + this.type)
if (!self) {
self = new this.constructor(obj.currentTarget, this.getDelegateOptions())
$(obj.currentTarget).data('bs.' + this.type, self)
}
clearTimeout(self.timeout)
self.hoverState = 'out'
if (!self.options.delay || !self.options.delay.hide) return self.hide()
self.timeout = setTimeout(function () {
if (self.hoverState == 'out') self.hide()
}, self.options.delay.hide)
}
Tooltip.prototype.show = function () {
var e = $.Event('show.bs.' + this.type)
if (this.hasContent() && this.enabled) {
this.$element.trigger(e)
var inDom = $.contains(this.$element[0].ownerDocument.documentElement, this.$element[0])
if (e.isDefaultPrevented() || !inDom) return
var that = this
var $tip = this.tip()
var tipId = this.getUID(this.type)
this.setContent()
$tip.attr('id', tipId)
this.$element.attr('aria-describedby', tipId)
if (this.options.animation) $tip.addClass('fade')
var placement = typeof this.options.placement == 'function' ?
this.options.placement.call(this, $tip[0], this.$element[0]) :
this.options.placement
var autoToken = /\s?auto?\s?/i
var autoPlace = autoToken.test(placement)
if (autoPlace) placement = placement.replace(autoToken, '') || 'top'
$tip
.detach()
.css({ top: 0, left: 0, display: 'block' })
.addClass(placement)
.data('bs.' + this.type, this)
this.options.container ? $tip.appendTo(this.options.container) : $tip.insertAfter(this.$element)
var pos = this.getPosition()
var actualWidth = $tip[0].offsetWidth
var actualHeight = $tip[0].offsetHeight
if (autoPlace) {
var orgPlacement = placement
var $container = this.options.container ? $(this.options.container) : this.$element.parent()
var containerDim = this.getPosition($container)
placement = placement == 'bottom' && pos.top + pos.height + actualHeight - containerDim.scroll > containerDim.height ? 'top' :
placement == 'top' && pos.top - containerDim.scroll - actualHeight < containerDim.top ? 'bottom' :
placement == 'right' && pos.right + actualWidth > containerDim.width ? 'left' :
placement == 'left' && pos.left - actualWidth < containerDim.left ? 'right' :
placement
$tip
.removeClass(orgPlacement)
.addClass(placement)
}
var calculatedOffset = this.getCalculatedOffset(placement, pos, actualWidth, actualHeight)
this.applyPlacement(calculatedOffset, placement)
var complete = function () {
that.$element.trigger('shown.bs.' + that.type)
that.hoverState = null
}
$.support.transition && this.$tip.hasClass('fade') ?
$tip
.one('bsTransitionEnd', complete)
.emulateTransitionEnd(Tooltip.TRANSITION_DURATION) :
complete()
}
}
Tooltip.prototype.applyPlacement = function (offset, placement) {
var $tip = this.tip()
var width = $tip[0].offsetWidth
var height = $tip[0].offsetHeight
// manually read margins because getBoundingClientRect includes difference
var marginTop = parseInt($tip.css('margin-top'), 10)
var marginLeft = parseInt($tip.css('margin-left'), 10)
// we must check for NaN for ie 8/9
if (isNaN(marginTop)) marginTop = 0
if (isNaN(marginLeft)) marginLeft = 0
offset.top = offset.top + marginTop
offset.left = offset.left + marginLeft
// $.fn.offset doesn't round pixel values
// so we use setOffset directly with our own function B-0
$.offset.setOffset($tip[0], $.extend({
using: function (props) {
$tip.css({
top: Math.round(props.top),
left: Math.round(props.left)
})
}
}, offset), 0)
$tip.addClass('in')
// check to see if placing tip in new offset caused the tip to resize itself
var actualWidth = $tip[0].offsetWidth
var actualHeight = $tip[0].offsetHeight
if (placement == 'top' && actualHeight != height) {
offset.top = offset.top + height - actualHeight
}
var delta = this.getViewportAdjustedDelta(placement, offset, actualWidth, actualHeight)
if (delta.left) offset.left += delta.left
else offset.top += delta.top
var isVertical = /top|bottom/.test(placement)
var arrowDelta = isVertical ? delta.left * 2 - width + actualWidth : delta.top * 2 - height + actualHeight
var arrowOffsetPosition = isVertical ? 'offsetWidth' : 'offsetHeight'
$tip.offset(offset)
this.replaceArrow(arrowDelta, $tip[0][arrowOffsetPosition], isVertical)
}
Tooltip.prototype.replaceArrow = function (delta, dimension, isHorizontal) {
this.arrow()
.css(isHorizontal ? 'left' : 'top', 50 * (1 - delta / dimension) + '%')
.css(isHorizontal ? 'top' : 'left', '')
}
Tooltip.prototype.setContent = function () {
var $tip = this.tip()
var title = this.getTitle()
$tip.find('.tooltip-inner')[this.options.html ? 'html' : 'text'](title)
$tip.removeClass('fade in top bottom left right')
}
Tooltip.prototype.hide = function (callback) {
var that = this
var $tip = this.tip()
var e = $.Event('hide.bs.' + this.type)
function complete() {
if (that.hoverState != 'in') $tip.detach()
that.$element
.removeAttr('aria-describedby')
.trigger('hidden.bs.' + that.type)
callback && callback()
}
this.$element.trigger(e)
if (e.isDefaultPrevented()) return
$tip.removeClass('in')
$.support.transition && this.$tip.hasClass('fade') ?
$tip
.one('bsTransitionEnd', complete)
.emulateTransitionEnd(Tooltip.TRANSITION_DURATION) :
complete()
this.hoverState = null
return this
}
Tooltip.prototype.fixTitle = function () {
var $e = this.$element
if ($e.attr('title') || typeof ($e.attr('data-original-title')) != 'string') {
$e.attr('data-original-title', $e.attr('title') || '').attr('title', '')
}
}
Tooltip.prototype.hasContent = function () {
return this.getTitle()
}
Tooltip.prototype.getPosition = function ($element) {
$element = $element || this.$element
var el = $element[0]
var isBody = el.tagName == 'BODY'
var elRect = el.getBoundingClientRect()
if (elRect.width == null) {
// width and height are missing in IE8, so compute them manually; see https://github.com/twbs/bootstrap/issues/14093
elRect = $.extend({}, elRect, { width: elRect.right - elRect.left, height: elRect.bottom - elRect.top })
}
var elOffset = isBody ? { top: 0, left: 0 } : $element.offset()
var scroll = { scroll: isBody ? document.documentElement.scrollTop || document.body.scrollTop : $element.scrollTop() }
var outerDims = isBody ? { width: $(window).width(), height: $(window).height() } : null
return $.extend({}, elRect, scroll, outerDims, elOffset)
}
Tooltip.prototype.getCalculatedOffset = function (placement, pos, actualWidth, actualHeight) {
return placement == 'bottom' ? { top: pos.top + pos.height, left: pos.left + pos.width / 2 - actualWidth / 2 } :
placement == 'top' ? { top: pos.top - actualHeight, left: pos.left + pos.width / 2 - actualWidth / 2 } :
placement == 'left' ? { top: pos.top + pos.height / 2 - actualHeight / 2, left: pos.left - actualWidth } :
/* placement == 'right' */ { top: pos.top + pos.height / 2 - actualHeight / 2, left: pos.left + pos.width }
}
Tooltip.prototype.getViewportAdjustedDelta = function (placement, pos, actualWidth, actualHeight) {
var delta = { top: 0, left: 0 }
if (!this.$viewport) return delta
var viewportPadding = this.options.viewport && this.options.viewport.padding || 0
var viewportDimensions = this.getPosition(this.$viewport)
if (/right|left/.test(placement)) {
var topEdgeOffset = pos.top - viewportPadding - viewportDimensions.scroll
var bottomEdgeOffset = pos.top + viewportPadding - viewportDimensions.scroll + actualHeight
if (topEdgeOffset < viewportDimensions.top) { // top overflow
delta.top = viewportDimensions.top - topEdgeOffset
} else if (bottomEdgeOffset > viewportDimensions.top + viewportDimensions.height) { // bottom overflow
delta.top = viewportDimensions.top + viewportDimensions.height - bottomEdgeOffset
}
} else {
var leftEdgeOffset = pos.left - viewportPadding
var rightEdgeOffset = pos.left + viewportPadding + actualWidth
if (leftEdgeOffset < viewportDimensions.left) { // left overflow
delta.left = viewportDimensions.left - leftEdgeOffset
} else if (rightEdgeOffset > viewportDimensions.width) { // right overflow
delta.left = viewportDimensions.left + viewportDimensions.width - rightEdgeOffset
}
}
return delta
}
Tooltip.prototype.getTitle = function () {
var title
var $e = this.$element
var o = this.options
title = $e.attr('data-original-title')
|| (typeof o.title == 'function' ? o.title.call($e[0]) : o.title)
return title
}
Tooltip.prototype.getUID = function (prefix) {
do prefix += ~~(Math.random() * 1000000)
while (document.getElementById(prefix))
return prefix
}
Tooltip.prototype.tip = function () {
return (this.$tip = this.$tip || $(this.options.template))
}
Tooltip.prototype.arrow = function () {
return (this.$arrow = this.$arrow || this.tip().find('.tooltip-arrow'))
}
Tooltip.prototype.enable = function () {
this.enabled = true
}
Tooltip.prototype.disable = function () {
this.enabled = false
}
Tooltip.prototype.toggleEnabled = function () {
this.enabled = !this.enabled
}
Tooltip.prototype.toggle = function (e) {
var self = this
if (e) {
self = $(e.currentTarget).data('bs.' + this.type)
if (!self) {
self = new this.constructor(e.currentTarget, this.getDelegateOptions())
$(e.currentTarget).data('bs.' + this.type, self)
}
}
self.tip().hasClass('in') ? self.leave(self) : self.enter(self)
}
Tooltip.prototype.destroy = function () {
var that = this
clearTimeout(this.timeout)
this.hide(function () {
that.$element.off('.' + that.type).removeData('bs.' + that.type)
})
}
// TOOLTIP PLUGIN DEFINITION
// =========================
function Plugin(option) {
return this.each(function () {
var $this = $(this)
var data = $this.data('bs.tooltip')
var options = typeof option == 'object' && option
if (!data && option == 'destroy') return
if (!data) $this.data('bs.tooltip', (data = new Tooltip(this, options)))
if (typeof option == 'string') data[option]()
})
}
var old = $.fn.tooltip
$.fn.tooltip = Plugin
$.fn.tooltip.Constructor = Tooltip
// TOOLTIP NO CONFLICT
// ===================
$.fn.tooltip.noConflict = function () {
$.fn.tooltip = old
return this
}
}(jQuery);<|fim▁end|> | // ===============================
|
<|file_name|>test_base.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from libcloud.utils.py3 import httplib
from io import BytesIO
from mock import Mock
from libcloud.utils.py3 import StringIO
from libcloud.utils.py3 import b
from libcloud.storage.base import StorageDriver
from libcloud.storage.base import DEFAULT_CONTENT_TYPE
from libcloud.test import unittest
from libcloud.test import MockHttp
<|fim▁hole|> body = 'ab'
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def root(self, method, url, body, headers):
body = 'ab'
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
class BaseStorageTests(unittest.TestCase):
def setUp(self):
self.send_called = 0
StorageDriver.connectionCls.conn_class = BaseMockRawResponse
self.driver1 = StorageDriver('username', 'key', host='localhost')
self.driver1.supports_chunked_encoding = True
self.driver2 = StorageDriver('username', 'key', host='localhost')
self.driver2.supports_chunked_encoding = False
self.driver1.strict_mode = False
self.driver1.strict_mode = False
def test__upload_object_iterator_must_have_next_method(self):
valid_iterators = [BytesIO(b('134')), StringIO('bar')]
invalid_iterators = ['foobar', '', False, True, 1, object()]
def upload_func(*args, **kwargs):
return True, 'barfoo', 100
kwargs = {'object_name': 'foo', 'content_type': 'foo/bar',
'upload_func': upload_func, 'upload_func_kwargs': {},
'request_path': '/', 'headers': {}}
for value in valid_iterators:
kwargs['stream'] = value
self.driver1._upload_object(**kwargs)
for value in invalid_iterators:
kwargs['stream'] = value
try:
self.driver1._upload_object(**kwargs)
except AttributeError:
pass
else:
self.fail('Exception was not thrown')
def test__get_hash_function(self):
self.driver1.hash_type = 'md5'
func = self.driver1._get_hash_function()
self.assertTrue(func)
self.driver1.hash_type = 'sha1'
func = self.driver1._get_hash_function()
self.assertTrue(func)
try:
self.driver1.hash_type = 'invalid-hash-function'
func = self.driver1._get_hash_function()
except RuntimeError:
pass
else:
self.fail('Invalid hash type but exception was not thrown')
def test_upload_no_content_type_supplied_or_detected(self):
iterator = StringIO()
upload_func = Mock()
upload_func.return_value = True, '', 0
# strict_mode is disabled, default content type should be used
self.driver1.connection = Mock()
self.driver1._upload_object(object_name='test',
content_type=None,
upload_func=upload_func,
upload_func_kwargs={},
request_path='/',
stream=iterator)
headers = self.driver1.connection.request.call_args[-1]['headers']
self.assertEqual(headers['Content-Type'], DEFAULT_CONTENT_TYPE)
# strict_mode is enabled, exception should be thrown
self.driver1.strict_mode = True
expected_msg = ('File content-type could not be guessed and no'
' content_type value is provided')
self.assertRaisesRegexp(AttributeError, expected_msg,
self.driver1._upload_object,
object_name='test',
content_type=None,
upload_func=upload_func,
upload_func_kwargs={},
request_path='/',
stream=iterator)
if __name__ == '__main__':
sys.exit(unittest.main())<|fim▁end|> | class BaseMockRawResponse(MockHttp):
def _(self, method, url, body, headers): |
<|file_name|>EnpassConversionHelper.js<|end_file_name|><|fim▁begin|>import * as randomMC from 'random-material-color';
import Localisation from '@js/Classes/Localisation';
import CustomFieldsHelper from '@js/Helper/Import/CustomFieldsHelper';
import ImportMappingHelper from '@js/Helper/Import/ImportMappingHelper';
export default class EnpassConversionHelper {
/**
*
* @param json
* @param options
* @returns {Promise<{data: {tags: Array, folders: Array, passwords: Array}, errors: Array}>}
*/
static async processJson(json, options) {
let data = JSON.parse(json);
if(!data.items) throw new Error('File does not implement Enpass 6 format');
if(!Array.isArray(data.folders)) data.folders = [];
let {tags, tagMap} = await this._processTags(data.folders),
folders = await this._processFolders(data.items),
{passwords, errors} = await this._processPasswords(data.items, tagMap, options);
return {
data: {tags, folders, passwords},
errors
};
}
/**
*
* @param data
* @returns {Promise<{tags: Array, tagMap}>}
* @private
*/
static async _processTags(data) {
let tags = [],
tagMap = {},
labelMap = await ImportMappingHelper.getTagLabelMapping();
for(let i = 0; i < data.length; i++) {
let tag = data[i],
id = tag.title.toLowerCase();
if(id === '') continue;
if(!labelMap.hasOwnProperty(id)) {
labelMap[id] = tag.uuid;
tagMap[tag.uuid] = tag.uuid;
tags.push({id: tag.uuid, label: tag.title, color: randomMC.getColor()});
} else {
tagMap[tag.uuid] = labelMap[id];
}
}
return {tags, tagMap};
}
/**
*
* @param data
* @returns {Promise<Array>}
* @private
*/
static async _processFolders(data) {
let folders = [],
categories = this._getCategoryLabels(),
labelMap = await ImportMappingHelper.getFolderLabelMapping();
for(let i = 0; i < data.length; i++) {
let folder = data[i].category,
label = folder.capitalize();
if(categories.hasOwnProperty(folder)) {
label = categories[folder];
}
let id = label.toLowerCase();
if(!labelMap.hasOwnProperty(id)) {
labelMap[id] = id;
folders.push({id, label});
}
data[i].category = labelMap[id];
}
return folders;
}
/**
*
* @param data
* @param tagMap
* @param options
* @returns {Promise<{passwords: Array, errors: Array}>}
* @private
*/
static async _processPasswords(data, tagMap, options) {
let passwords = [],
errors = [],
mapping = await ImportMappingHelper.getPasswordLabelMapping();
for(let i = 0; i < data.length; i++) {
let password = this._processPassword(data[i], mapping, tagMap, options.skipEmpty, errors);
passwords.push(password);
}
return {passwords, errors};
}
/**
*
* @param element
* @param mapping
* @param tagMap
* @param skipEmpty
* @param errors
* @returns {{customFields: Array, password: string, favorite: boolean, folder: string, label: string, notes: string}}
* @private
*/
static _processPassword(element, mapping, tagMap, skipEmpty, errors) {
let label = element.title;
if(element.hasOwnProperty('subtitle') && element.subtitle.length !== 0 &&
element.subtitle !== label &&
(!element.hasOwnProperty('template_type') || element.template_type !== 'login.default')) {
label = `${label} – ${element.subtitle}`;
}
let password = {
customFields: [],
password : 'password-missing-during-import',
favorite : element.favorite === 1,
folder : element.category,
notes : element.note,
label,
tags : []
};<|fim▁hole|>
if(element.hasOwnProperty('fields')) {
this._processPasswordFields(element, password, skipEmpty, errors);
}
if(element.hasOwnProperty('attachments')) {
this._logConversionError('"{label}" has files attached which can not be imported.', password, errors);
}
return password;
}
/**
*
* @param element
* @param password
* @param skipEmpty
* @param errors
* @private
*/
static _processPasswordFields(element, password, skipEmpty, errors) {
let commonFields = {password: false, username: false, url: false};
for(let i = 0; i < element.fields.length; i++) {
let field = element.fields[i];
if(field.type === 'section') continue;
if(skipEmpty && field.value === '') continue;
if(field.value !== '' && this._processIfCommonField(commonFields, field, password)) continue;
this._processCustomField(field, password, errors);
}
if(password.customFields.length === 0) delete password.customFields;
}
/**
*
* @param field
* @param errors
* @param password
* @private
*/
static _processCustomField(field, password, errors) {
let type = field.sensitive ? 'secret':field.type;
CustomFieldsHelper.createCustomField(password, errors, field.value, field.label, type);
}
/**
*
* @param baseFields
* @param field
* @param password
* @returns {boolean}
* @private
*/
static _processIfCommonField(baseFields, field, password) {
if(!baseFields.password && field.type === 'password') {
baseFields.password = true;
password.password = field.value;
password.edited = field.value_updated_at;
return true;
} else if(!baseFields.username && field.type === 'username') {
baseFields.username = true;
password.username = field.value;
return true;
} else if(!baseFields.url && field.type === 'url') {
baseFields.url = true;
password.url = field.value;
return true;
}
return false;
}
/**
*
* @param element
* @param password
* @param tagMap
* @private
*/
static _processPasswordTags(element, password, tagMap) {
if(element.hasOwnProperty('folders')) {
for(let i = 0; i < element.folders.length; i++) {
let id = element.folders[i].toLowerCase();
if(tagMap.hasOwnProperty(id)) password.tags.push(tagMap[id]);
}
}
}
/**
*
* @returns {{note: string, license: string, password: string, computer: string, identity: string, login: string, travel: string, creditcard: string, finance: string, misc: string}}
* @private
*/
static _getCategoryLabels() {
return {
login : Localisation.translate('Logins'),
creditcard: Localisation.translate('Credit Cards'),
identity : Localisation.translate('Identities'),
note : Localisation.translate('Notes'),
password : Localisation.translate('Passwords'),
finance : Localisation.translate('Finances'),
license : Localisation.translate('Licenses'),
travel : Localisation.translate('Travel'),
computer : Localisation.translate('Computers'),
misc : Localisation.translate('Miscellaneous')
};
}
/**
*
* @param text
* @param vars
* @param errors
* @private
*/
static _logConversionError(text, vars, errors) {
let message = Localisation.translate(text, vars);
errors.push(message);
console.error(message, vars);
}
}<|fim▁end|> |
ImportMappingHelper.checkPasswordDuplicate(mapping, password);
this._processPasswordTags(element, password, tagMap); |
<|file_name|>utils.js<|end_file_name|><|fim▁begin|>const TelegramBot = require('node-telegram-bot-api');
const EnchancedTelegramTest = require('./EnhancedTelegramTest');
const { track, trackInline } = require('../src/analytics');
const { initHandlers } = require('../src/handlers');
function createBot() {<|fim▁hole|>}
function createTestServer(callback) {
const bot = createBot();
const analytics = {
track: (msg, name) => track(msg, name, callback),
trackInline: msg => trackInline(msg, callback)
};
initHandlers(bot, analytics);
return new EnchancedTelegramTest(bot);
}
module.exports = {
createBot,
createTestServer
};<|fim▁end|> | return new TelegramBot('0123456789abcdef', { webhook: true }); |
<|file_name|>BouncyBall.java<|end_file_name|><|fim▁begin|>package net.yottabyte.game;
import java.awt.*;
/**
* @author Jason Fagan
*/
public class BouncyBall {
private int x;
private int y;
private int vx;
private int vy;
private int radius;
private int drag;
private int mass;
public BouncyBall(int x, int y, int vx, int vy, int radius, int mass, int drag) {
this.x = x;
this.y = y;
this.vx = vx;
this.vy = vy;
this.radius = radius;
this.mass = mass;
this.drag = drag;
}
public void move(Rectangle rect)
{
x += vx;
y += vy;
hitWall(rect);
}
public void paint(Graphics2D g2d) {
g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
g2d.setColor(Color.WHITE);
g2d.fillOval(x, y, radius, radius);
}
public void hitWall(Rectangle rect)
{
if (x <= 0) {
x = 0;
vx = -(vx * drag);
} else if (x + radius >= rect.width) {
x = rect.width - radius;
vx = -(vx * drag);
}
if (y < 0) {
y = 0;
vy = -(vy * drag);
} else if (y + (radius * 2) >= rect.height) {
y = rect.height - (radius * 2);
vy = -(vy * drag);
}
}
// see http://en.wikipedia.org/wiki/Elastic_collision
public boolean hasCollidedWith(BouncyBall ball) {
int dx = Math.abs(getCenterX() - ball.getCenterX());
int dy = Math.abs(getCenterY() - ball.getCenterY());
double distance = Math.sqrt(dx * dx + dy * dy);
return distance <= radius;
}
public void handleCollision(BouncyBall ball) {
int dx = getCenterX() - ball.getCenterX();
int dy = getCenterY() - ball.getCenterY();
// Calculate collision angle
double ca = Math.atan2(dy, dx);
// Calculate force magnitudes
double mgt1 = Math.sqrt(vx * vx + vy * vy);
double mgt2 = Math.sqrt(ball.getVx() * ball.getVx() + ball.getVy() * ball.getVy());
// Calculate direction
double dir1 = Math.atan2(vy, vx);
double dir2 = Math.atan2(ball.getVy(), ball.getVx());
// Calculate new velocities
double vx1 = mgt1 * Math.cos(dir1 - ca);
double vy1 = mgt1 * Math.sin(dir1 - ca);
double vx2 = mgt2 * Math.cos(dir2 - ca);
double vy2 = mgt2 * Math.sin(dir2 - ca);
double vfx1 = ((mass - ball.getMass()) * vx1 + (ball.getMass() + ball.getMass()) * vx2) / (mass + ball.getMass());
double fvx2 = ((mass + mass) * vx1 + (ball.getMass() - mass) * vx2) / (mass + ball.getMass());
double fvy1 = vy1;
double fvy2 = vy2;
vx = (int) (Math.cos(ca) * vfx1 + Math.cos(ca + Math.PI / 2) * fvy1);
vx = (int) (Math.sin(ca) * vfx1 + Math.sin(ca + Math.PI / 2) * fvy1);
ball.setVx((int) (Math.cos(ca) * fvx2 + Math.cos(ca + Math.PI / 2) * fvy2));
ball.setVy((int) (Math.sin(ca) * fvx2 + Math.sin(ca + Math.PI / 2) * fvy2));
}
public int getCenterX() {
return x - radius / 2;
}
public int getCenterY() {
return y - radius / 2;
}
public int getX() {
return x;
}
public void setX(int x) {
this.x = x;
}
public int getY() {
return y;
}
public void setY(int y) {
this.y = y;
}
public int getVx() {
return vx;
}
public void setVx(int vx) {
this.vx = vx;
}
public int getVy() {
return vy;
}
public void setVy(int vy) {
this.vy = vy;
}
public int getRadius() {
return radius;
}
public void setRadius(int radius) {
this.radius = radius;
}
public int getDrag() {
return drag;<|fim▁hole|> this.drag = drag;
}
public int getMass() {
return mass;
}
public void setMass(int mass) {
this.mass = mass;
}
}<|fim▁end|> | }
public void setDrag(int drag) { |
<|file_name|>local_data.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
Task local data management
Allows storing arbitrary types inside task-local-storage (TLS), to be accessed
anywhere within a task, keyed by a global pointer parameterized over the type of
the TLS slot. Useful for dynamic variables, singletons, and interfacing with
foreign code with bad callback interfaces.
To declare a new key for storing local data of a particular type, use the
`local_data_key!` macro. This macro will expand to a `static` item appropriately
named and annotated. This name is then passed to the functions in this module to
modify/read the slot specified by the key.
```rust
use std::local_data;
local_data_key!(key_int: int)
local_data_key!(key_vector: ~[int])
local_data::set(key_int, 3);
local_data::get(key_int, |opt| assert_eq!(opt.map(|x| *x), Some(3)));
local_data::set(key_vector, ~[4]);
local_data::get(key_vector, |opt| assert_eq!(*opt.unwrap(), ~[4]));
```
*/
// Casting 'Arcane Sight' reveals an overwhelming aura of Transmutation
// magic.
use cast;
use libc;
use prelude::*;
use rt::task::{Task, LocalStorage};
use util;
/**
* Indexes a task-local data slot. This pointer is used for comparison to
* differentiate keys from one another. The actual type `T` is not used anywhere
* as a member of this type, except that it is parameterized with it to define
* the type of each key's value.
*
* The value of each Key is of the singleton enum KeyValue. These also have the
* same name as `Key` and their purpose is to take up space in the programs data
* sections to ensure that each value of the `Key` type points to a unique
* location.
*/
pub type Key<T> = &'static KeyValue<T>;
#[allow(missing_doc)]
pub enum KeyValue<T> { Key }
#[allow(missing_doc)]
trait LocalData {}
impl<T: 'static> LocalData for T {}
// The task-local-map stores all TLS information for the currently running task.
// It is stored as an owned pointer into the runtime, and it's only allocated
// when TLS is used for the first time. This map must be very carefully
// constructed because it has many mutable loans unsoundly handed out on it to
// the various invocations of TLS requests.
//
// One of the most important operations is loaning a value via `get` to a<|fim▁hole|>// invalidated because upon returning its loan state must be updated. Currently
// the TLS map is a vector, but this is possibly dangerous because the vector
// can be reallocated/moved when new values are pushed onto it.
//
// This problem currently isn't solved in a very elegant way. Inside the `get`
// function, it internally "invalidates" all references after the loan is
// finished and looks up into the vector again. In theory this will prevent
// pointers from being moved under our feet so long as LLVM doesn't go too crazy
// with the optimizations.
//
// n.b. If TLS is used heavily in future, this could be made more efficient with
// a proper map.
#[doc(hidden)]
pub type Map = ~[Option<(*libc::c_void, TLSValue, LoanState)>];
type TLSValue = ~LocalData;
// Gets the map from the runtime. Lazily initialises if not done so already.
unsafe fn get_local_map() -> &mut Map {
use rt::local::Local;
let task: *mut Task = Local::unsafe_borrow();
match &mut (*task).storage {
// If the at_exit function is already set, then we just need to take
// a loan out on the TLS map stored inside
&LocalStorage(Some(ref mut map_ptr)) => {
return map_ptr;
}
// If this is the first time we've accessed TLS, perform similar
// actions to the oldsched way of doing things.
&LocalStorage(ref mut slot) => {
*slot = Some(~[]);
match *slot {
Some(ref mut map_ptr) => { return map_ptr }
None => abort()
}
}
}
}
#[deriving(Eq)]
enum LoanState {
NoLoan, ImmLoan, MutLoan
}
impl LoanState {
fn describe(&self) -> &'static str {
match *self {
NoLoan => "no loan",
ImmLoan => "immutable",
MutLoan => "mutable"
}
}
}
fn key_to_key_value<T: 'static>(key: Key<T>) -> *libc::c_void {
unsafe { cast::transmute(key) }
}
/// Removes a task-local value from task-local storage. This will return
/// Some(value) if the key was present in TLS, otherwise it will return None.
///
/// A runtime assertion will be triggered it removal of TLS value is attempted
/// while the value is still loaned out via `get` or `get_mut`.
pub fn pop<T: 'static>(key: Key<T>) -> Option<T> {
let map = unsafe { get_local_map() };
let key_value = key_to_key_value(key);
for entry in map.mut_iter() {
match *entry {
Some((k, _, loan)) if k == key_value => {
if loan != NoLoan {
fail!("TLS value cannot be removed because it is currently \
borrowed as {}", loan.describe());
}
// Move the data out of the `entry` slot via util::replace.
// This is guaranteed to succeed because we already matched
// on `Some` above.
let data = match util::replace(entry, None) {
Some((_, data, _)) => data,
None => abort()
};
// Move `data` into transmute to get out the memory that it
// owns, we must free it manually later.
let (_vtable, alloc): (uint, ~T) = unsafe {
cast::transmute(data)
};
// Now that we own `alloc`, we can just move out of it as we
// would with any other data.
return Some(*alloc);
}
_ => {}
}
}
return None;
}
/// Retrieves a value from TLS. The closure provided is yielded `Some` of a
/// reference to the value located in TLS if one exists, or `None` if the key
/// provided is not present in TLS currently.
///
/// It is considered a runtime error to attempt to get a value which is already
/// on loan via the `get_mut` method provided.
pub fn get<T: 'static, U>(key: Key<T>, f: |Option<&T>| -> U) -> U {
get_with(key, ImmLoan, f)
}
/// Retrieves a mutable value from TLS. The closure provided is yielded `Some`
/// of a reference to the mutable value located in TLS if one exists, or `None`
/// if the key provided is not present in TLS currently.
///
/// It is considered a runtime error to attempt to get a value which is already
/// on loan via this or the `get` methods. This is similar to how it's a runtime
/// error to take two mutable loans on an `@mut` box.
pub fn get_mut<T: 'static, U>(key: Key<T>, f: |Option<&mut T>| -> U) -> U {
get_with(key, MutLoan, |x| {
match x {
None => f(None),
// We're violating a lot of compiler guarantees with this
// invocation of `transmute_mut`, but we're doing runtime checks to
// ensure that it's always valid (only one at a time).
//
// there is no need to be upset!
Some(x) => { f(Some(unsafe { cast::transmute_mut(x) })) }
}
})
}
fn get_with<T:'static,
U>(
key: Key<T>,
state: LoanState,
f: |Option<&T>| -> U)
-> U {
// This function must be extremely careful. Because TLS can store owned
// values, and we must have some form of `get` function other than `pop`,
// this function has to give a `&` reference back to the caller.
//
// One option is to return the reference, but this cannot be sound because
// the actual lifetime of the object is not known. The slot in TLS could not
// be modified until the object goes out of scope, but the TLS code cannot
// know when this happens.
//
// For this reason, the reference is yielded to a specified closure. This
// way the TLS code knows exactly what the lifetime of the yielded pointer
// is, allowing callers to acquire references to owned data. This is also
// sound so long as measures are taken to ensure that while a TLS slot is
// loaned out to a caller, it's not modified recursively.
let map = unsafe { get_local_map() };
let key_value = key_to_key_value(key);
let pos = map.iter().position(|entry| {
match *entry {
Some((k, _, _)) if k == key_value => true, _ => false
}
});
match pos {
None => { return f(None); }
Some(i) => {
let ret;
let mut return_loan = false;
match map[i] {
Some((_, ref data, ref mut loan)) => {
match (state, *loan) {
(_, NoLoan) => {
*loan = state;
return_loan = true;
}
(ImmLoan, ImmLoan) => {}
(want, cur) => {
fail!("TLS slot cannot be borrowed as {} because \
it is already borrowed as {}",
want.describe(), cur.describe());
}
}
// data was created with `~T as ~LocalData`, so we extract
// pointer part of the trait, (as ~T), and then use
// compiler coercions to achieve a '&' pointer.
unsafe {
match *cast::transmute::<&TLSValue, &(uint, ~T)>(data){
(_vtable, ref alloc) => {
let value: &T = *alloc;
ret = f(Some(value));
}
}
}
}
_ => abort()
}
// n.b. 'data' and 'loans' are both invalid pointers at the point
// 'f' returned because `f` could have appended more TLS items which
// in turn relocated the vector. Hence we do another lookup here to
// fixup the loans.
if return_loan {
match map[i] {
Some((_, _, ref mut loan)) => { *loan = NoLoan; }
None => abort()
}
}
return ret;
}
}
}
fn abort() -> ! {
unsafe { libc::abort() }
}
/// Inserts a value into task local storage. If the key is already present in
/// TLS, then the previous value is removed and replaced with the provided data.
///
/// It is considered a runtime error to attempt to set a key which is currently
/// on loan via the `get` or `get_mut` methods.
pub fn set<T: 'static>(key: Key<T>, data: T) {
let map = unsafe { get_local_map() };
let keyval = key_to_key_value(key);
// When the task-local map is destroyed, all the data needs to be cleaned
// up. For this reason we can't do some clever tricks to store '~T' as a
// '*c_void' or something like that. To solve the problem, we cast
// everything to a trait (LocalData) which is then stored inside the map.
// Upon destruction of the map, all the objects will be destroyed and the
// traits have enough information about them to destroy themselves.
let data = ~data as ~LocalData:;
fn insertion_position(map: &mut Map,
key: *libc::c_void) -> Option<uint> {
// First see if the map contains this key already
let curspot = map.iter().position(|entry| {
match *entry {
Some((ekey, _, loan)) if key == ekey => {
if loan != NoLoan {
fail!("TLS value cannot be overwritten because it is
already borrowed as {}", loan.describe())
}
true
}
_ => false,
}
});
// If it doesn't contain the key, just find a slot that's None
match curspot {
Some(i) => Some(i),
None => map.iter().position(|entry| entry.is_none())
}
}
// The type of the local data map must ascribe to Send, so we do the
// transmute here to add the Send bound back on. This doesn't actually
// matter because TLS will always own the data (until its moved out) and
// we're not actually sending it to other schedulers or anything.
let data: ~LocalData = unsafe { cast::transmute(data) };
match insertion_position(map, keyval) {
Some(i) => { map[i] = Some((keyval, data, NoLoan)); }
None => { map.push(Some((keyval, data, NoLoan))); }
}
}
/// Modifies a task-local value by temporarily removing it from task-local
/// storage and then re-inserting if `Some` is returned from the closure.
///
/// This function will have the same runtime errors as generated from `pop` and
/// `set` (the key must not currently be on loan
pub fn modify<T: 'static>(key: Key<T>, f: |Option<T>| -> Option<T>) {
match f(pop(key)) {
Some(next) => { set(key, next); }
None => {}
}
}
#[cfg(test)]
mod tests {
use prelude::*;
use super::*;
use task;
#[test]
fn test_tls_multitask() {
static my_key: Key<~str> = &Key;
set(my_key, ~"parent data");
do task::spawn {
// TLS shouldn't carry over.
assert!(get(my_key, |k| k.map(|k| (*k).clone())).is_none());
set(my_key, ~"child data");
assert!(get(my_key, |k| k.map(|k| (*k).clone())).unwrap() ==
~"child data");
// should be cleaned up for us
}
// Must work multiple times
assert!(get(my_key, |k| k.map(|k| (*k).clone())).unwrap() == ~"parent data");
assert!(get(my_key, |k| k.map(|k| (*k).clone())).unwrap() == ~"parent data");
assert!(get(my_key, |k| k.map(|k| (*k).clone())).unwrap() == ~"parent data");
}
#[test]
fn test_tls_overwrite() {
static my_key: Key<~str> = &Key;
set(my_key, ~"first data");
set(my_key, ~"next data"); // Shouldn't leak.
assert!(get(my_key, |k| k.map(|k| (*k).clone())).unwrap() == ~"next data");
}
#[test]
fn test_tls_pop() {
static my_key: Key<~str> = &Key;
set(my_key, ~"weasel");
assert!(pop(my_key).unwrap() == ~"weasel");
// Pop must remove the data from the map.
assert!(pop(my_key).is_none());
}
#[test]
fn test_tls_modify() {
static my_key: Key<~str> = &Key;
modify(my_key, |data| {
match data {
Some(ref val) => fail!("unwelcome value: {}", *val),
None => Some(~"first data")
}
});
modify(my_key, |data| {
match data {
Some(~"first data") => Some(~"next data"),
Some(ref val) => fail!("wrong value: {}", *val),
None => fail!("missing value")
}
});
assert!(pop(my_key).unwrap() == ~"next data");
}
#[test]
fn test_tls_crust_automorestack_memorial_bug() {
// This might result in a stack-canary clobber if the runtime fails to
// set sp_limit to 0 when calling the cleanup extern - it might
// automatically jump over to the rust stack, which causes next_c_sp
// to get recorded as something within a rust stack segment. Then a
// subsequent upcall (esp. for logging, think vsnprintf) would run on
// a stack smaller than 1 MB.
static my_key: Key<~str> = &Key;
do task::spawn {
set(my_key, ~"hax");
}
}
#[test]
fn test_tls_multiple_types() {
static str_key: Key<~str> = &Key;
static box_key: Key<@()> = &Key;
static int_key: Key<int> = &Key;
do task::spawn {
set(str_key, ~"string data");
set(box_key, @());
set(int_key, 42);
}
}
#[test]
#[allow(dead_code)]
fn test_tls_overwrite_multiple_types() {
static str_key: Key<~str> = &Key;
static box_key: Key<@()> = &Key;
static int_key: Key<int> = &Key;
do task::spawn {
set(str_key, ~"string data");
set(str_key, ~"string data 2");
set(box_key, @());
set(box_key, @());
set(int_key, 42);
// This could cause a segfault if overwriting-destruction is done
// with the crazy polymorphic transmute rather than the provided
// finaliser.
set(int_key, 31337);
}
}
#[test]
#[should_fail]
fn test_tls_cleanup_on_failure() {
static str_key: Key<~str> = &Key;
static box_key: Key<@()> = &Key;
static int_key: Key<int> = &Key;
set(str_key, ~"parent data");
set(box_key, @());
do task::spawn {
// spawn_linked
set(str_key, ~"string data");
set(box_key, @());
set(int_key, 42);
fail!();
}
// Not quite nondeterministic.
set(int_key, 31337);
fail!();
}
#[test]
fn test_static_pointer() {
static key: Key<&'static int> = &Key;
static VALUE: int = 0;
let v: &'static int = &VALUE;
set(key, v);
}
#[test]
fn test_owned() {
static key: Key<~int> = &Key;
set(key, ~1);
get(key, |v| {
get(key, |v| {
get(key, |v| {
assert_eq!(**v.unwrap(), 1);
});
assert_eq!(**v.unwrap(), 1);
});
assert_eq!(**v.unwrap(), 1);
});
set(key, ~2);
get(key, |v| {
assert_eq!(**v.unwrap(), 2);
})
}
#[test]
fn test_get_mut() {
static key: Key<int> = &Key;
set(key, 1);
get_mut(key, |v| {
*v.unwrap() = 2;
});
get(key, |v| {
assert_eq!(*v.unwrap(), 2);
})
}
#[test]
fn test_same_key_type() {
static key1: Key<int> = &Key;
static key2: Key<int> = &Key;
static key3: Key<int> = &Key;
static key4: Key<int> = &Key;
static key5: Key<int> = &Key;
set(key1, 1);
set(key2, 2);
set(key3, 3);
set(key4, 4);
set(key5, 5);
get(key1, |x| assert_eq!(*x.unwrap(), 1));
get(key2, |x| assert_eq!(*x.unwrap(), 2));
get(key3, |x| assert_eq!(*x.unwrap(), 3));
get(key4, |x| assert_eq!(*x.unwrap(), 4));
get(key5, |x| assert_eq!(*x.unwrap(), 5));
}
#[test]
#[should_fail]
fn test_nested_get_set1() {
static key: Key<int> = &Key;
set(key, 4);
get(key, |_| {
set(key, 4);
})
}
#[test]
#[should_fail]
fn test_nested_get_mut2() {
static key: Key<int> = &Key;
set(key, 4);
get(key, |_| {
get_mut(key, |_| {})
})
}
#[test]
#[should_fail]
fn test_nested_get_mut3() {
static key: Key<int> = &Key;
set(key, 4);
get_mut(key, |_| {
get(key, |_| {})
})
}
#[test]
#[should_fail]
fn test_nested_get_mut4() {
static key: Key<int> = &Key;
set(key, 4);
get_mut(key, |_| {
get_mut(key, |_| {})
})
}
}<|fim▁end|> | // caller. In doing so, the slot that the TLS entry is occupying cannot be |
<|file_name|>new-speaker-test.js<|end_file_name|><|fim▁begin|>import { test } from 'ember-qunit';
import moduleFor from 'open-event-frontend/tests/helpers/unit-helper';
<|fim▁hole|>moduleFor('controller:public/cfs/new-speaker', 'Unit | Controller | public/cfs/new speaker', []);
test('it exists', function(assert) {
let controller = this.subject();
assert.ok(controller);
});<|fim▁end|> | |
<|file_name|>common.rs<|end_file_name|><|fim▁begin|>use byteorder::ByteOrder;
/// Timestamp resolution of the pcap
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum TsResolution {
MicroSecond,
NanoSecond
}
/// Endianness of the pcap
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum Endianness {
Big,
Little
}
impl Endianness {
pub fn is_little(self) -> bool {
match self {
Endianness::Big => false,
Endianness::Little => true
}
}
pub fn is_big(self) -> bool {
match self {
Endianness::Big => true,
Endianness::Little => false
}
}
pub fn new<B: ByteOrder>() -> Self {
if B::read_u32(&[0,0,0,1]) == 1 {
Endianness::Big
}
else {
Endianness::Little
}
}
}
/// Data link type
///
/// The link-layer header type specifies the first protocol of the packet.
///
/// See [http://www.tcpdump.org/linktypes.html](http://www.tcpdump.org/linktypes.html)
#[allow(non_camel_case_types)]
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum DataLink {
NULL,
ETHERNET,
AX25,
IEEE802_5,
ARCNET_BSD,
SLIP,
PPP,
FDDI,
PPP_HDLC,
PPP_ETHER,
ATM_RFC1483,
RAW,
C_HDLC,
IEEE802_11,
FRELAY,
LOOP,
LINUX_SLL,
LTALK,
PFLOG,
IEEE802_11_PRISM,
IP_OVER_FC,
SUNATM,
IEEE802_11_RADIOTAP,
ARCNET_LINUX,
APPLE_IP_OVER_IEEE1394,
MTP2_WITH_PHDR,
MTP2,
MTP3,
SCCP,
DOCSIS,
LINUX_IRDA,
USER0,
USER1,
USER2,
USER3,
USER4,
USER5,
USER6,
USER7,
USER8,
USER9,
USER10,
USER11,
USER12,
USER13,
USER14,
USER15,
IEEE802_11_AVS,
BACNET_MS_TP,
PPP_PPPD,
GPRS_LLC,
GPF_T,
GPF_F,
LINUX_LAPD,
BLUETOOTH_HCI_H4,
USB_LINUX,
PPI,
IEEE802_15_4,
SITA,
ERF,
BLUETOOTH_HCI_H4_WITH_PHDR,
AX25_KISS,
LAPD,
PPP_WITH_DIR,
C_HDLC_WITH_DIR,
FRELAY_WITH_DIR,
IPMB_LINUX,
IEEE802_15_4_NONASK_PHY,
USB_LINUX_MMAPPED,
FC_2,
FC_2_WITH_FRAME_DELIMS,
IPNET,
CAN_SOCKETCAN,
IPV4,
IPV6,
IEEE802_15_4_NOFCS,
DBUS,
DVB_CI,
MUX27010,
STANAG_5066_D_PDU,
NFLOG,
NETANALYZER,
NETANALYZER_TRANSPARENT,
IPOIB,
MPEG_2_TS,
NG40,
NFC_LLCP,
INFINIBAND,
SCTP,
USBPCAP,
RTAC_SERIAL,
BLUETOOTH_LE_LL,<|fim▁hole|> BLUETOOTH_LE_LL_WITH_PHDR,
PROFIBUS_DL,
PKTAP,
EPON,
IPMI_HPM_2,
ZWAVE_R1_R2,
ZWAVE_R3,
WATTSTOPPER_DLM,
ISO_14443,
RDS,
USB_DARWIN,
SDLC,
Unknown(u32)
}
impl From<u32> for DataLink {
fn from(n: u32) -> DataLink {
match n {
0 => DataLink::NULL,
1 => DataLink::ETHERNET,
3 => DataLink::AX25,
6 => DataLink::IEEE802_5,
7 => DataLink::ARCNET_BSD,
8 => DataLink::SLIP,
9 => DataLink::PPP,
10 => DataLink::FDDI,
50 => DataLink::PPP_HDLC,
51 => DataLink::PPP_ETHER,
100 => DataLink::ATM_RFC1483,
101 => DataLink::RAW,
104 => DataLink::C_HDLC,
105 => DataLink::IEEE802_11,
107 => DataLink::FRELAY,
108 => DataLink::LOOP,
113 => DataLink::LINUX_SLL,
114 => DataLink::LTALK,
117 => DataLink::PFLOG,
119 => DataLink::IEEE802_11_PRISM,
122 => DataLink::IP_OVER_FC,
123 => DataLink::SUNATM,
127 => DataLink::IEEE802_11_RADIOTAP,
129 => DataLink::ARCNET_LINUX,
138 => DataLink::APPLE_IP_OVER_IEEE1394,
139 => DataLink::MTP2_WITH_PHDR,
140 => DataLink::MTP2,
141 => DataLink::MTP3,
142 => DataLink::SCCP,
143 => DataLink::DOCSIS,
144 => DataLink::LINUX_IRDA,
147 => DataLink::USER0,
148 => DataLink::USER1,
149 => DataLink::USER2,
150 => DataLink::USER3,
151 => DataLink::USER4,
152 => DataLink::USER5,
153 => DataLink::USER6,
154 => DataLink::USER7,
155 => DataLink::USER8,
156 => DataLink::USER9,
157 => DataLink::USER10,
158 => DataLink::USER11,
159 => DataLink::USER12,
160 => DataLink::USER13,
161 => DataLink::USER14,
162 => DataLink::USER15,
163 => DataLink::IEEE802_11_AVS,
165 => DataLink::BACNET_MS_TP,
166 => DataLink::PPP_PPPD,
169 => DataLink::GPRS_LLC,
170 => DataLink::GPF_T,
171 => DataLink::GPF_F,
177 => DataLink::LINUX_LAPD,
187 => DataLink::BLUETOOTH_HCI_H4,
189 => DataLink::USB_LINUX,
192 => DataLink::PPI,
195 => DataLink::IEEE802_15_4,
196 => DataLink::SITA,
197 => DataLink::ERF,
201 => DataLink::BLUETOOTH_HCI_H4_WITH_PHDR,
202 => DataLink::AX25_KISS,
203 => DataLink::LAPD,
204 => DataLink::PPP_WITH_DIR,
205 => DataLink::C_HDLC_WITH_DIR,
206 => DataLink::FRELAY_WITH_DIR,
209 => DataLink::IPMB_LINUX,
215 => DataLink::IEEE802_15_4_NONASK_PHY,
220 => DataLink::USB_LINUX_MMAPPED,
224 => DataLink::FC_2,
225 => DataLink::FC_2_WITH_FRAME_DELIMS,
226 => DataLink::IPNET,
227 => DataLink::CAN_SOCKETCAN,
228 => DataLink::IPV4,
229 => DataLink::IPV6,
230 => DataLink::IEEE802_15_4_NOFCS,
231 => DataLink::DBUS,
235 => DataLink::DVB_CI,
236 => DataLink::MUX27010,
237 => DataLink::STANAG_5066_D_PDU,
239 => DataLink::NFLOG,
240 => DataLink::NETANALYZER,
241 => DataLink::NETANALYZER_TRANSPARENT,
242 => DataLink::IPOIB,
243 => DataLink::MPEG_2_TS,
244 => DataLink::NG40,
245 => DataLink::NFC_LLCP,
247 => DataLink::INFINIBAND,
248 => DataLink::SCTP,
249 => DataLink::USBPCAP,
250 => DataLink::RTAC_SERIAL,
251 => DataLink::BLUETOOTH_LE_LL,
253 => DataLink::NETLINK,
254 => DataLink::BLUETOOTH_LINUX_MONITOR,
255 => DataLink::BLUETOOTH_BREDR_BB,
256 => DataLink::BLUETOOTH_LE_LL_WITH_PHDR,
257 => DataLink::PROFIBUS_DL,
258 => DataLink::PKTAP,
259 => DataLink::EPON,
260 => DataLink::IPMI_HPM_2,
261 => DataLink::ZWAVE_R1_R2,
262 => DataLink::ZWAVE_R3,
263 => DataLink::WATTSTOPPER_DLM,
264 => DataLink::ISO_14443,
265 => DataLink::RDS,
266 => DataLink::USB_DARWIN,
268 => DataLink::SDLC,
_ => DataLink::Unknown(n)
}
}
}
impl From<DataLink> for u32 {
fn from(link: DataLink) -> u32 {
match link {
DataLink::NULL => 0,
DataLink::ETHERNET => 1,
DataLink::AX25 => 3,
DataLink::IEEE802_5 => 6,
DataLink::ARCNET_BSD => 7,
DataLink::SLIP => 8,
DataLink::PPP => 9,
DataLink::FDDI => 10,
DataLink::PPP_HDLC => 50,
DataLink::PPP_ETHER => 51,
DataLink::ATM_RFC1483 => 100,
DataLink::RAW => 101,
DataLink::C_HDLC => 104,
DataLink::IEEE802_11 => 105,
DataLink::FRELAY => 107,
DataLink::LOOP => 108,
DataLink::LINUX_SLL => 113,
DataLink::LTALK => 114,
DataLink::PFLOG => 117,
DataLink::IEEE802_11_PRISM => 119,
DataLink::IP_OVER_FC => 122,
DataLink::SUNATM => 123,
DataLink::IEEE802_11_RADIOTAP => 127,
DataLink::ARCNET_LINUX => 129,
DataLink::APPLE_IP_OVER_IEEE1394 => 138,
DataLink::MTP2_WITH_PHDR => 139,
DataLink::MTP2 => 140,
DataLink::MTP3 => 141,
DataLink::SCCP => 142,
DataLink::DOCSIS => 143,
DataLink::LINUX_IRDA => 144,
DataLink::USER0 => 147,
DataLink::USER1 => 148,
DataLink::USER2 => 149,
DataLink::USER3 => 150,
DataLink::USER4 => 151,
DataLink::USER5 => 152,
DataLink::USER6 => 153,
DataLink::USER7 => 154,
DataLink::USER8 => 155,
DataLink::USER9 => 156,
DataLink::USER10 => 157,
DataLink::USER11 => 158,
DataLink::USER12 => 159,
DataLink::USER13 => 160,
DataLink::USER14 => 161,
DataLink::USER15 => 162,
DataLink::IEEE802_11_AVS => 163,
DataLink::BACNET_MS_TP => 165,
DataLink::PPP_PPPD => 166,
DataLink::GPRS_LLC => 169,
DataLink::GPF_T => 170,
DataLink::GPF_F => 171,
DataLink::LINUX_LAPD => 177,
DataLink::BLUETOOTH_HCI_H4 => 187,
DataLink::USB_LINUX => 189,
DataLink::PPI => 192,
DataLink::IEEE802_15_4 => 195,
DataLink::SITA => 196,
DataLink::ERF => 197,
DataLink::BLUETOOTH_HCI_H4_WITH_PHDR => 201,
DataLink::AX25_KISS => 202,
DataLink::LAPD => 203,
DataLink::PPP_WITH_DIR => 204,
DataLink::C_HDLC_WITH_DIR => 205,
DataLink::FRELAY_WITH_DIR => 206,
DataLink::IPMB_LINUX => 209,
DataLink::IEEE802_15_4_NONASK_PHY => 215,
DataLink::USB_LINUX_MMAPPED => 220,
DataLink::FC_2 => 224,
DataLink::FC_2_WITH_FRAME_DELIMS => 225,
DataLink::IPNET => 226,
DataLink::CAN_SOCKETCAN => 227,
DataLink::IPV4 => 228,
DataLink::IPV6 => 229,
DataLink::IEEE802_15_4_NOFCS => 230,
DataLink::DBUS => 231,
DataLink::DVB_CI => 235,
DataLink::MUX27010 => 236,
DataLink::STANAG_5066_D_PDU => 237,
DataLink::NFLOG => 239,
DataLink::NETANALYZER => 240,
DataLink::NETANALYZER_TRANSPARENT => 241,
DataLink::IPOIB => 242,
DataLink::MPEG_2_TS => 243,
DataLink::NG40 => 244,
DataLink::NFC_LLCP => 245,
DataLink::INFINIBAND => 247,
DataLink::SCTP => 248,
DataLink::USBPCAP => 249,
DataLink::RTAC_SERIAL => 250,
DataLink::BLUETOOTH_LE_LL => 251,
DataLink::NETLINK => 253,
DataLink::BLUETOOTH_LINUX_MONITOR => 254,
DataLink::BLUETOOTH_BREDR_BB => 255,
DataLink::BLUETOOTH_LE_LL_WITH_PHDR => 256,
DataLink::PROFIBUS_DL => 257,
DataLink::PKTAP => 258,
DataLink::EPON => 259,
DataLink::IPMI_HPM_2 => 260,
DataLink::ZWAVE_R1_R2 => 261,
DataLink::ZWAVE_R3 => 262,
DataLink::WATTSTOPPER_DLM => 263,
DataLink::ISO_14443 => 264,
DataLink::RDS => 265,
DataLink::USB_DARWIN => 266,
DataLink::SDLC => 268,
DataLink::Unknown(n) => n
}
}
}<|fim▁end|> | NETLINK,
BLUETOOTH_LINUX_MONITOR,
BLUETOOTH_BREDR_BB, |
<|file_name|>expected.js<|end_file_name|><|fim▁begin|>"use strict";
var _foob, _foob$test;
var _toArray = function (arr) { return Array.isArray(arr) ? arr : Array.from(arr); };
<|fim▁hole|>(_foob = foob).add.apply(_foob, [foo, bar].concat(_toArray(numbers)));
(_foob$test = foob.test).add.apply(_foob$test, [foo, bar].concat(_toArray(numbers)));<|fim▁end|> | |
<|file_name|>matching_layer.py<|end_file_name|><|fim▁begin|>"""An implementation of Matching Layer."""
import typing
import tensorflow as tf
from keras.engine import Layer
class MatchingLayer(Layer):
"""
Layer that computes a matching matrix between samples in two tensors.
:param normalize: Whether to L2-normalize samples along the
dot product axis before taking the dot product.
If set to True, then the output of the dot product
is the cosine proximity between the two samples.
:param matching_type: the similarity function for matching
:param kwargs: Standard layer keyword arguments.
Examples:
>>> import matchzoo as mz
>>> layer = mz.layers.MatchingLayer(matching_type='dot',
... normalize=True)
>>> num_batch, left_len, right_len, num_dim = 5, 3, 2, 10
>>> layer.build([[num_batch, left_len, num_dim],
... [num_batch, right_len, num_dim]])
"""
def __init__(self, normalize: bool = False,
matching_type: str = 'dot', **kwargs):
""":class:`MatchingLayer` constructor."""
super().__init__(**kwargs)
self._normalize = normalize
self._validate_matching_type(matching_type)
self._matching_type = matching_type
self._shape1 = None
self._shape2 = None
@classmethod
def _validate_matching_type(cls, matching_type: str = 'dot'):
valid_matching_type = ['dot', 'mul', 'plus', 'minus', 'concat']
if matching_type not in valid_matching_type:
raise ValueError(f"{matching_type} is not a valid matching type, "
f"{valid_matching_type} expected.")
def build(self, input_shape: list):
"""
Build the layer.
:param input_shape: the shapes of the input tensors,
for MatchingLayer we need tow input tensors.
"""
# Used purely for shape validation.
if not isinstance(input_shape, list) or len(input_shape) != 2:
raise ValueError('A `MatchingLayer` layer should be called '
'on a list of 2 inputs.')
self._shape1 = input_shape[0]
self._shape2 = input_shape[1]
for idx in 0, 2:
if self._shape1[idx] != self._shape2[idx]:
raise ValueError(
'Incompatible dimensions: '
f'{self._shape1[idx]} != {self._shape2[idx]}.'
f'Layer shapes: {self._shape1}, {self._shape2}.'
)
def call(self, inputs: list, **kwargs) -> typing.Any:
"""
The computation logic of MatchingLayer.
:param inputs: two input tensors.
"""
x1 = inputs[0]
x2 = inputs[1]
if self._matching_type == 'dot':
if self._normalize:<|fim▁hole|> if self._matching_type == 'mul':
def func(x, y):
return x * y
elif self._matching_type == 'plus':
def func(x, y):
return x + y
elif self._matching_type == 'minus':
def func(x, y):
return x - y
elif self._matching_type == 'concat':
def func(x, y):
return tf.concat([x, y], axis=3)
else:
raise ValueError(f"Invalid matching type."
f"{self._matching_type} received."
f"Mut be in `dot`, `mul`, `plus`, "
f"`minus` and `concat`.")
x1_exp = tf.stack([x1] * self._shape2[1], 2)
x2_exp = tf.stack([x2] * self._shape1[1], 1)
return func(x1_exp, x2_exp)
def compute_output_shape(self, input_shape: list) -> tuple:
"""
Calculate the layer output shape.
:param input_shape: the shapes of the input tensors,
for MatchingLayer we need tow input tensors.
"""
if not isinstance(input_shape, list) or len(input_shape) != 2:
raise ValueError('A `MatchingLayer` layer should be called '
'on a list of 2 inputs.')
shape1 = list(input_shape[0])
shape2 = list(input_shape[1])
if len(shape1) != 3 or len(shape2) != 3:
raise ValueError('A `MatchingLayer` layer should be called '
'on 2 inputs with 3 dimensions.')
if shape1[0] != shape2[0] or shape1[2] != shape2[2]:
raise ValueError('A `MatchingLayer` layer should be called '
'on 2 inputs with same 0,2 dimensions.')
if self._matching_type in ['mul', 'plus', 'minus']:
return shape1[0], shape1[1], shape2[1], shape1[2]
elif self._matching_type == 'dot':
return shape1[0], shape1[1], shape2[1], 1
elif self._matching_type == 'concat':
return shape1[0], shape1[1], shape2[1], shape1[2] + shape2[2]
else:
raise ValueError(f"Invalid `matching_type`."
f"{self._matching_type} received."
f"Must be in `mul`, `plus`, `minus` "
f"`dot` and `concat`.")
def get_config(self) -> dict:
"""Get the config dict of MatchingLayer."""
config = {
'normalize': self._normalize,
'matching_type': self._matching_type,
}
base_config = super(MatchingLayer, self).get_config()
return dict(list(base_config.items()) + list(config.items()))<|fim▁end|> | x1 = tf.math.l2_normalize(x1, axis=2)
x2 = tf.math.l2_normalize(x2, axis=2)
return tf.expand_dims(tf.einsum('abd,acd->abc', x1, x2), 3)
else: |
<|file_name|>ieq30pro.py<|end_file_name|><|fim▁begin|>import re
import time
from astropy import units as u
from astropy.coordinates import SkyCoord
from panoptes.utils.time import current_time
from panoptes.utils import error as error
from panoptes.pocs.mount.serial import AbstractSerialMount
class Mount(AbstractSerialMount):
"""
Mount class for iOptron mounts. Overrides the base `initialize` method
and providers some helper methods to convert coordinates.
"""
def __init__(self, *args, **kwargs):
super(Mount, self).__init__(*args, **kwargs)
self.logger.info('Creating iOptron mount')
# Regexp to match the iOptron RA/Dec format
self._ra_format = r'(?P<ra_millisecond>\d{8})'
self._dec_format = r'(?P<dec_sign>[\+\-])(?P<dec_arcsec>\d{8})'
self._coords_format = re.compile(self._dec_format + self._ra_format)
self._raw_status = None
self._status_format = re.compile(
'(?P<gps>[0-2]{1})' +
'(?P<state>[0-7]{1})' +
'(?P<tracking>[0-4]{1})' +
'(?P<movement_speed>[1-9]{1})' +
'(?P<time_source>[1-3]{1})' +
'(?P<hemisphere>[01]{1})'
)
self._status_lookup = {
'gps': {
'0': 'Off',
'1': 'On',
'2': 'Data Extracted'
},
'state': {
'0': 'Stopped - Not at Zero Position',
'1': 'Tracking (PEC disabled)',
'2': 'Slewing',
'3': 'Guiding',
'4': 'Meridian Flipping',
'5': 'Tracking (PEC enabled)',
'6': 'Parked',
'7': 'Stopped - Zero Position'
},
'tracking': {
'0': 'Sidereal',
'1': 'Lunar',
'2': 'Solar',
'3': 'King',
'4': 'Custom'
},
'movement_speed': {
'1': '1x sidereal',
'2': '2x sidereal',
'3': '8x sidereal',
'4': '16x sidereal',
'5': '64x sidereal',
'6': '128x sidereal',
'7': '256x sidereal',
'8': '512x sidereal',
'9': 'Max sidereal',
},
'time_source': {
'1': 'RS-232',
'2': 'Hand Controller',
'3': 'GPS'
},
'hemisphere': {
'0': 'Southern',
'1': 'Northern'
}
}
self.logger.info('Mount created')
################################################################################################
# Properties
################################################################################################
@property
def is_home(self):
""" bool: Mount home status. """
self._is_home = 'Stopped - Zero Position' in self.status.get('state', '')
return self._is_home
@property
def is_tracking(self):
""" bool: Mount tracking status. """
self._is_tracking = 'Tracking' in self.status.get('state', '')
return self._is_tracking
@property
def is_slewing(self):
""" bool: Mount slewing status. """
self._is_slewing = 'Slewing' in self.status.get('state', '')
return self._is_slewing
################################################################################################
# Public Methods
################################################################################################
def initialize(self, set_rates=True, unpark=False, *arg, **kwargs):
""" Initialize the connection with the mount and setup for location.
iOptron mounts are initialized by sending the following two commands
to the mount:
* Version
* MountInfo
If the mount is successfully initialized, the `_setup_location_for_mount` method
is also called.
Returns:
bool: Returns the value from `self.is_initialized`.
"""
if not self.is_connected:
self.logger.info(f'Connecting to mount {__name__}')
self.connect()
if self.is_connected and not self.is_initialized:
self.logger.info(f'Initializing {__name__} mount')
# We trick the mount into thinking it's initialized while we
# initialize otherwise the `query` method will test
# to see if initialized and be put into loop.
self._is_initialized = True
actual_version = self.query('version')
actual_mount_info = self.query('mount_info')
expected_version = self.commands.get('version').get('response')
expected_mount_info = self.commands.get('mount_info').get('response')
self._is_initialized = False
# Test our init procedure for iOptron
if actual_version != expected_version or actual_mount_info != expected_mount_info:
self.logger.debug(f'{actual_version} != {expected_version}')
self.logger.debug(f'{actual_mount_info} != {expected_mount_info}')
raise error.MountNotFound('Problem initializing mount')
else:
self._is_initialized = True
self._setup_location_for_mount()
if set_rates:
self._set_initial_rates()
self.logger.info(f'Mount initialized: {self.is_initialized}')
return self.is_initialized
def park(self,
ra_direction='west',
ra_seconds=11.,
dec_direction='south',
dec_seconds=15.,
*args, **kwargs):
"""Slews to the park position and parks the mount.
This will first move the mount to the home position, then move the RA axis
in the direction specified at 0.9x sidereal rate (the fastest) for the number
of seconds requested. Then move the Dec axis in a similar manner. This should
be adjusted for the particular parking position desired.
Note:
When mount is parked no movement commands will be accepted.
Args:
ra_direction (str, optional): The direction to move the RA axis from
the home position. Defaults to 'west' for northern hemisphere.
ra_seconds (float, optional): The number of seconds at fastest move
speed to move the RA axis from the home position.
dec_direction (str, optional): The direction to move the Dec axis
from the home position. Defaults to 'south' for northern hemisphere.
dec_seconds (float, optional): The number of seconds at the fastest
move speed to move the Dec axis from the home position.
Returns:
bool: indicating success
"""
if self.is_parked:
self.logger.info('Mount is parked')
return self._is_parked
if self.slew_to_home(blocking=True):
# The mount is currently not parking in correct position so we manually move it there.
self.query('set_button_moving_rate', 9)
self.move_direction(direction=ra_direction, seconds=ra_seconds)
while self.is_slewing:
self.logger.debug('Slewing RA axis to park position...')
time.sleep(3)
self.move_direction(direction=dec_direction, seconds=dec_seconds)
while self.is_slewing:
self.logger.debug('Slewing Dec axis to park position...')
time.sleep(3)
self._is_parked = True
self.logger.debug(f'Mount parked: {self.is_parked}')
return self._is_parked
################################################################################################
# Private Methods
################################################################################################
def _set_initial_rates(self):
# Make sure we start at sidereal
self.set_tracking_rate()
self.logger.debug('Setting manual moving rate to max')
self.query('set_button_moving_rate', 9)
self.logger.debug(f'Mount guide rate: {self.query("get_guide_rate")}')
self.query('set_guide_rate', '9090')
guide_rate = self.query('get_guide_rate')
self.ra_guide_rate = int(guide_rate[0:2]) / 100
self.dec_guide_rate = int(guide_rate[2:]) / 100
self.logger.debug(f'Mount guide rate: {self.ra_guide_rate} {self.dec_guide_rate}')
def _setup_location_for_mount(self):
"""
Sets the mount up to the current location. Mount must be initialized first.
This uses mount.location (an astropy.coords.EarthLocation) to set
most of the params and the rest is read from a config file. Users
should not call this directly.
Includes:
* Latitude set_long
* Longitude set_lat
* Daylight Savings disable_daylight_savings
* Universal Time Offset set_gmt_offset<|fim▁hole|> * Current Date set_local_date
* Current Time set_local_time
"""
assert self.is_initialized, self.logger.warning('Mount has not been initialized')
assert self.location is not None, self.logger.warning(
'Please set a location before attempting setup')
self.logger.info('Setting up mount for location')
# Location
# Adjust the lat/long for format expected by iOptron
lat = '{:+07.0f}'.format(self.location.lat.to(u.arcsecond).value)
lon = '{:+07.0f}'.format(self.location.lon.to(u.arcsecond).value)
self.query('set_long', lon)
self.query('set_lat', lat)
# Time
self.query('disable_daylight_savings')
gmt_offset = self.get_config('location.gmt_offset', default=0)
self.query('set_gmt_offset', gmt_offset)
now = current_time() + gmt_offset * u.minute
self.query('set_local_time', now.datetime.strftime("%H%M%S"))
self.query('set_local_date', now.datetime.strftime("%y%m%d"))
def _mount_coord_to_skycoord(self, mount_coords):
"""
Converts between iOptron RA/Dec format and a SkyCoord
Args:
mount_coords (str): Coordinates as returned by mount
Returns:
astropy.SkyCoord: Mount coordinates as astropy SkyCoord with
EarthLocation included.
"""
coords_match = self._coords_format.fullmatch(mount_coords)
coords = None
if coords_match is not None:
ra = (coords_match.group('ra_millisecond') * u.millisecond).to(u.hour)
dec = (coords_match.group('dec_arcsec') * u.centiarcsecond).to(u.arcsec)
dec_sign = coords_match.group('dec_sign')
if dec_sign == '-':
dec = dec * -1
coords = SkyCoord(ra=ra, dec=dec, frame='icrs', unit=(u.hour, u.arcsecond))
else:
self.logger.warning('Cannot create SkyCoord from mount coordinates')
return coords
def _skycoord_to_mount_coord(self, coords):
"""
Converts between SkyCoord and a iOptron RA/Dec format.
`
TTTTTTTT(T) 0.01 arc-seconds
XXXXX(XXX) milliseconds
Command: “:SrXXXXXXXX#”
Defines the commanded right ascension, RA. Slew, calibrate and
park commands operate on the most recently defined right ascension.
Command: “:SdsTTTTTTTT#”
Defines the commanded declination, Dec. Slew, calibrate and
park commands operate on the most recently defined declination.
`
@param coords astropy.coordinates.SkyCoord
@retval A tuple of RA/Dec coordinates
"""
# RA in milliseconds
ra_ms = (coords.ra.hour * u.hour).to(u.millisecond)
mount_ra = f'{ra_ms.value:08.0f}'
self.logger.debug(f'RA (ms): {ra_ms}')
dec_dms = (coords.dec.degree * u.degree).to(u.centiarcsecond)
self.logger.debug(f'Dec (centiarcsec): {dec_dms}')
mount_dec = f'{dec_dms.value:=+08.0f}'
mount_coords = (mount_ra, mount_dec)
return mount_coords
@property
def _set_zero_position(self):
""" Sets the current position as the zero position.
The iOptron allows you to set the current position directly, so
we simply call the iOptron command.
"""
self.logger.info('Setting zero position')
return self.query('set_zero_position')<|fim▁end|> | |
<|file_name|>subscriber_notifee.go<|end_file_name|><|fim▁begin|>package dht
import (
"context"
"fmt"
"github.com/libp2p/go-libp2p-core/event"
"github.com/libp2p/go-libp2p-core/network"
"github.com/libp2p/go-libp2p-core/peer"
"github.com/libp2p/go-eventbus"
"github.com/jbenet/goprocess"
ma "github.com/multiformats/go-multiaddr"
)
// subscriberNotifee implements network.Notifee and also manages the subscriber to the event bus. We consume peer
// identification events to trigger inclusion in the routing table, and we consume Disconnected events to eject peers
// from it.
type subscriberNotifee struct {
dht *IpfsDHT
subs event.Subscription
}
func newSubscriberNotifiee(dht *IpfsDHT) (*subscriberNotifee, error) {
bufSize := eventbus.BufSize(256)
evts := []interface{}{
// register for event bus notifications of when peers successfully complete identification in order to update
// the routing table
new(event.EvtPeerIdentificationCompleted),
// register for event bus protocol ID changes in order to update the routing table
new(event.EvtPeerProtocolsUpdated),
// register for event bus notifications for when our local address/addresses change so we can
// advertise those to the network
new(event.EvtLocalAddressesUpdated),
}
// register for event bus local routability changes in order to trigger switching between client and server modes
// only register for events if the DHT is operating in ModeAuto
if dht.auto == ModeAuto || dht.auto == ModeAutoServer {
evts = append(evts, new(event.EvtLocalReachabilityChanged))
}
subs, err := dht.host.EventBus().Subscribe(evts, bufSize)
if err != nil {
return nil, fmt.Errorf("dht could not subscribe to eventbus events; err: %s", err)
}
nn := &subscriberNotifee{
dht: dht,
subs: subs,
}
// register for network notifications
dht.host.Network().Notify(nn)
return nn, nil
}
func (nn *subscriberNotifee) subscribe(proc goprocess.Process) {
dht := nn.dht
defer dht.host.Network().StopNotify(nn)
defer nn.subs.Close()
for {
select {
case e, more := <-nn.subs.Out():
if !more {
return<|fim▁hole|> // when our address changes, we should proactively tell our closest peers about it so
// we become discoverable quickly. The Identify protocol will push a signed peer record
// with our new address to all peers we are connected to. However, we might not necessarily be connected
// to our closet peers & so in the true spirit of Zen, searching for ourself in the network really is the best way
// to to forge connections with those matter.
if dht.autoRefresh || dht.testAddressUpdateProcessing {
dht.rtRefreshManager.RefreshNoWait()
}
case event.EvtPeerProtocolsUpdated:
handlePeerChangeEvent(dht, evt.Peer)
case event.EvtPeerIdentificationCompleted:
handlePeerChangeEvent(dht, evt.Peer)
case event.EvtLocalReachabilityChanged:
if dht.auto == ModeAuto || dht.auto == ModeAutoServer {
handleLocalReachabilityChangedEvent(dht, evt)
} else {
// something has gone really wrong if we get an event we did not subscribe to
logger.Errorf("received LocalReachabilityChanged event that was not subscribed to")
}
default:
// something has gone really wrong if we get an event for another type
logger.Errorf("got wrong type from subscription: %T", e)
}
case <-proc.Closing():
return
}
}
}
func handlePeerChangeEvent(dht *IpfsDHT, p peer.ID) {
valid, err := dht.validRTPeer(p)
if err != nil {
logger.Errorf("could not check peerstore for protocol support: err: %s", err)
return
} else if valid {
dht.peerFound(dht.ctx, p, false)
dht.fixRTIfNeeded()
} else {
dht.peerStoppedDHT(dht.ctx, p)
}
}
func handleLocalReachabilityChangedEvent(dht *IpfsDHT, e event.EvtLocalReachabilityChanged) {
var target mode
switch e.Reachability {
case network.ReachabilityPrivate:
target = modeClient
case network.ReachabilityUnknown:
if dht.auto == ModeAutoServer {
target = modeServer
} else {
target = modeClient
}
case network.ReachabilityPublic:
target = modeServer
}
logger.Infof("processed event %T; performing dht mode switch", e)
err := dht.setMode(target)
// NOTE: the mode will be printed out as a decimal.
if err == nil {
logger.Infow("switched DHT mode successfully", "mode", target)
} else {
logger.Errorw("switching DHT mode failed", "mode", target, "error", err)
}
}
// validRTPeer returns true if the peer supports the DHT protocol and false otherwise. Supporting the DHT protocol means
// supporting the primary protocols, we do not want to add peers that are speaking obsolete secondary protocols to our
// routing table
func (dht *IpfsDHT) validRTPeer(p peer.ID) (bool, error) {
b, err := dht.peerstore.FirstSupportedProtocol(p, dht.protocolsStrs...)
if len(b) == 0 || err != nil {
return false, err
}
return dht.routingTablePeerFilter == nil || dht.routingTablePeerFilter(dht, p), nil
}
type disconnector interface {
OnDisconnect(ctx context.Context, p peer.ID)
}
func (nn *subscriberNotifee) Disconnected(n network.Network, v network.Conn) {
dht := nn.dht
ms, ok := dht.msgSender.(disconnector)
if !ok {
return
}
select {
case <-dht.Process().Closing():
return
default:
}
p := v.RemotePeer()
// Lock and check to see if we're still connected. We lock to make sure
// we don't concurrently process a connect event.
dht.plk.Lock()
defer dht.plk.Unlock()
if dht.host.Network().Connectedness(p) == network.Connected {
// We're still connected.
return
}
ms.OnDisconnect(dht.Context(), p)
}
func (nn *subscriberNotifee) Connected(network.Network, network.Conn) {}
func (nn *subscriberNotifee) OpenedStream(network.Network, network.Stream) {}
func (nn *subscriberNotifee) ClosedStream(network.Network, network.Stream) {}
func (nn *subscriberNotifee) Listen(network.Network, ma.Multiaddr) {}
func (nn *subscriberNotifee) ListenClose(network.Network, ma.Multiaddr) {}<|fim▁end|> | }
switch evt := e.(type) {
case event.EvtLocalAddressesUpdated: |
<|file_name|>visualization.py<|end_file_name|><|fim▁begin|>"""
Plotting (requires matplotlib)
"""
from colorsys import hsv_to_rgb, hls_to_rgb
from libmp import NoConvergence
class VisualizationMethods(object):
plot_ignore = (ValueError, ArithmeticError, ZeroDivisionError, NoConvergence)
def plot(ctx, f, xlim=[-5,5], ylim=None, points=200, file=None, dpi=None,
singularities=[], axes=None):
r"""
Shows a simple 2D plot of a function `f(x)` or list of functions
`[f_0(x), f_1(x), \ldots, f_n(x)]` over a given interval
specified by *xlim*. Some examples::
plot(lambda x: exp(x)*li(x), [1, 4])
plot([cos, sin], [-4, 4])
plot([fresnels, fresnelc], [-4, 4])
plot([sqrt, cbrt], [-4, 4])
plot(lambda t: zeta(0.5+t*j), [-20, 20])
plot([floor, ceil, abs, sign], [-5, 5])
Points where the function raises a numerical exception or
returns an infinite value are removed from the graph.
Singularities can also be excluded explicitly
as follows (useful for removing erroneous vertical lines)::
plot(cot, ylim=[-5, 5]) # bad
plot(cot, ylim=[-5, 5], singularities=[-pi, 0, pi]) # good
For parts where the function assumes complex values, the
real part is plotted with dashes and the imaginary part
is plotted with dots.
.. note :: This function requires matplotlib (pylab).
"""
if file:
axes = None
fig = None
if not axes:
import pylab
fig = pylab.figure()
axes = fig.add_subplot(111)
if not isinstance(f, (tuple, list)):
f = [f]
a, b = xlim
colors = ['b', 'r', 'g', 'm', 'k']
for n, func in enumerate(f):
x = ctx.arange(a, b, (b-a)/float(points))
segments = []
segment = []
in_complex = False
for i in xrange(len(x)):
try:
if i != 0:
for sing in singularities:
if x[i-1] <= sing and x[i] >= sing:
raise ValueError
v = func(x[i])
if ctx.isnan(v) or abs(v) > 1e300:
raise ValueError
if hasattr(v, "imag") and v.imag:
re = float(v.real)
im = float(v.imag)
if not in_complex:
in_complex = True
segments.append(segment)
segment = []
segment.append((float(x[i]), re, im))
else:
if in_complex:
in_complex = False
segments.append(segment)
segment = []
segment.append((float(x[i]), v))
except ctx.plot_ignore:
if segment:
segments.append(segment)
segment = []
if segment:
segments.append(segment)
for segment in segments:
x = [s[0] for s in segment]
y = [s[1] for s in segment]
if not x:
continue
c = colors[n % len(colors)]
if len(segment[0]) == 3:
z = [s[2] for s in segment]
axes.plot(x, y, '--'+c, linewidth=3)
axes.plot(x, z, ':'+c, linewidth=3)
else:
axes.plot(x, y, c, linewidth=3)
axes.set_xlim(map(float, xlim))
if ylim:<|fim▁hole|> axes.grid(True)
if fig:
if file:
pylab.savefig(file, dpi=dpi)
else:
pylab.show()
def default_color_function(ctx, z):
if ctx.isinf(z):
return (1.0, 1.0, 1.0)
if ctx.isnan(z):
return (0.5, 0.5, 0.5)
pi = 3.1415926535898
a = (float(ctx.arg(z)) + ctx.pi) / (2*ctx.pi)
a = (a + 0.5) % 1.0
b = 1.0 - float(1/(1.0+abs(z)**0.3))
return hls_to_rgb(a, b, 0.8)
def cplot(ctx, f, re=[-5,5], im=[-5,5], points=2000, color=None,
verbose=False, file=None, dpi=None, axes=None):
"""
Plots the given complex-valued function *f* over a rectangular part
of the complex plane specified by the pairs of intervals *re* and *im*.
For example::
cplot(lambda z: z, [-2, 2], [-10, 10])
cplot(exp)
cplot(zeta, [0, 1], [0, 50])
By default, the complex argument (phase) is shown as color (hue) and
the magnitude is show as brightness. You can also supply a
custom color function (*color*). This function should take a
complex number as input and return an RGB 3-tuple containing
floats in the range 0.0-1.0.
To obtain a sharp image, the number of points may need to be
increased to 100,000 or thereabout. Since evaluating the
function that many times is likely to be slow, the 'verbose'
option is useful to display progress.
.. note :: This function requires matplotlib (pylab).
"""
if color is None:
color = ctx.default_color_function
import pylab
if file:
axes = None
fig = None
if not axes:
fig = pylab.figure()
axes = fig.add_subplot(111)
rea, reb = re
ima, imb = im
dre = reb - rea
dim = imb - ima
M = int(ctx.sqrt(points*dre/dim)+1)
N = int(ctx.sqrt(points*dim/dre)+1)
x = pylab.linspace(rea, reb, M)
y = pylab.linspace(ima, imb, N)
# Note: we have to be careful to get the right rotation.
# Test with these plots:
# cplot(lambda z: z if z.real < 0 else 0)
# cplot(lambda z: z if z.imag < 0 else 0)
w = pylab.zeros((N, M, 3))
for n in xrange(N):
for m in xrange(M):
z = ctx.mpc(x[m], y[n])
try:
v = color(f(z))
except ctx.plot_ignore:
v = (0.5, 0.5, 0.5)
w[n,m] = v
if verbose:
print n, "of", N
axes.imshow(w, extent=(rea, reb, ima, imb), origin='lower')
axes.set_xlabel('Re(z)')
axes.set_ylabel('Im(z)')
if fig:
if file:
pylab.savefig(file, dpi=dpi)
else:
pylab.show()
def splot(ctx, f, u=[-5,5], v=[-5,5], points=100, keep_aspect=True, \
wireframe=False, file=None, dpi=None, axes=None):
"""
Plots the surface defined by `f`.
If `f` returns a single component, then this plots the surface
defined by `z = f(x,y)` over the rectangular domain with
`x = u` and `y = v`.
If `f` returns three components, then this plots the parametric
surface `x, y, z = f(u,v)` over the pairs of intervals `u` and `v`.
For example, to plot a simple function::
>>> from mpmath import *
>>> f = lambda x, y: sin(x+y)*cos(y)
>>> splot(f, [-pi,pi], [-pi,pi]) # doctest: +SKIP
Plotting a donut::
>>> r, R = 1, 2.5
>>> f = lambda u, v: [r*cos(u), (R+r*sin(u))*cos(v), (R+r*sin(u))*sin(v)]
>>> splot(f, [0, 2*pi], [0, 2*pi]) # doctest: +SKIP
.. note :: This function requires matplotlib (pylab) 0.98.5.3 or higher.
"""
import pylab
import mpl_toolkits.mplot3d as mplot3d
if file:
axes = None
fig = None
if not axes:
fig = pylab.figure()
axes = mplot3d.axes3d.Axes3D(fig)
ua, ub = u
va, vb = v
du = ub - ua
dv = vb - va
if not isinstance(points, (list, tuple)):
points = [points, points]
M, N = points
u = pylab.linspace(ua, ub, M)
v = pylab.linspace(va, vb, N)
x, y, z = [pylab.zeros((M, N)) for i in xrange(3)]
xab, yab, zab = [[0, 0] for i in xrange(3)]
for n in xrange(N):
for m in xrange(M):
fdata = f(ctx.convert(u[m]), ctx.convert(v[n]))
try:
x[m,n], y[m,n], z[m,n] = fdata
except TypeError:
x[m,n], y[m,n], z[m,n] = u[m], v[n], fdata
for c, cab in [(x[m,n], xab), (y[m,n], yab), (z[m,n], zab)]:
if c < cab[0]:
cab[0] = c
if c > cab[1]:
cab[1] = c
if wireframe:
axes.plot_wireframe(x, y, z, rstride=4, cstride=4)
else:
axes.plot_surface(x, y, z, rstride=4, cstride=4)
axes.set_xlabel('x')
axes.set_ylabel('y')
axes.set_zlabel('z')
if keep_aspect:
dx, dy, dz = [cab[1] - cab[0] for cab in [xab, yab, zab]]
maxd = max(dx, dy, dz)
if dx < maxd:
delta = maxd - dx
axes.set_xlim3d(xab[0] - delta / 2.0, xab[1] + delta / 2.0)
if dy < maxd:
delta = maxd - dy
axes.set_ylim3d(yab[0] - delta / 2.0, yab[1] + delta / 2.0)
if dz < maxd:
delta = maxd - dz
axes.set_zlim3d(zab[0] - delta / 2.0, zab[1] + delta / 2.0)
if fig:
if file:
pylab.savefig(file, dpi=dpi)
else:
pylab.show()
VisualizationMethods.plot = plot
VisualizationMethods.default_color_function = default_color_function
VisualizationMethods.cplot = cplot
VisualizationMethods.splot = splot<|fim▁end|> | axes.set_ylim(map(float, ylim))
axes.set_xlabel('x')
axes.set_ylabel('f(x)') |
<|file_name|>automate_scrape.py<|end_file_name|><|fim▁begin|>#! /usr/bin/python3
'''
automate_scrape.py - runs throught the online verision of the text book, 'Automate the boring stuff with python' and pulls out all of the projects and stores them in a file.
'''
import requests, os, bs4, sys
<|fim▁hole|> '''
print('Downloading page {0}'.format(web_page))
res = requests.get(web_page)
res.raise_for_status()
soup = bs4.BeautifulSoup(res.text)
chapter_loop_and_write(soup, chapter_num, no_project_count, no_chapter_projects)
def chapter_loop_and_write(downloaded_page, chapter_num, no_project_count, no_chapter_projects):
'''
Takes a downloaded web page and pulls out the practice projects and writes them to a file. It then moves to the next chapter, gets that url. Counts the number of chapters where practice projects could not be found and stores those chapters in a list.
'''
soup = downloaded_page
projects = soup.find_all('div', {'class': "book", 'title' : 'Practice Projects'})
if projects == []:
print('Could not find Projects.')
no_project_count += 1
no_chapter_projects.append('Chapter' + str(chapter_num))
else:
with open('automateProjects.txt', 'a') as f:
for el in projects:
f.write(el.get_text())
print('Writing text to file')
f.close()
chapter_num += 1
if chapter_num == 19:
print('\n{0} chapters where Practice Projects could not be found'.format(no_project_count))
print('Here is the list of those chapters:\n{0}'.format(no_chapter_projects))
print('='*20 + 'Done' + '='*20 +'\n')
return
next_link = soup.find('a', href='/chapter' + str(chapter_num))
web_page = 'http://automatetheboringstuff.com' + next_link.get('href') +'/'
page_download(web_page, chapter_num, no_project_count, no_chapter_projects)
def main():
'''
main
'''
web_page = 'https://automatetheboringstuff.com/chapter1/'
page_download(web_page, 1, 0, [])
if __name__ == '__main__':
sys.exit(main())<|fim▁end|> | def page_download(web_page, chapter_num, no_project_count, no_chapter_projects):
'''
Downloads the web page. Keeps the varibales, chapter_num, no_project_count, no_chapter_projects to be then used in chapter_loop_and_write. |
<|file_name|>section_test.js<|end_file_name|><|fim▁begin|>// ==========================================================================
// Project: Brochurno
// Copyright: @2011 Jason Dooley
// ==========================================================================
/*globals Brochurno module test ok equals same stop start */
module("Brochurno.SectionView");
// TODO: Replace with real unit test for Brochurno.SectionView
test("test description", function() {
var expected = "test";
var result = "test";
equals(result, expected, "test should equal test");<|fim▁hole|>});<|fim▁end|> | |
<|file_name|>encoder.go<|end_file_name|><|fim▁begin|>package sdp
func (s Session) appendAttributes(attrs Attributes) Session {
for _, v := range attrs {
if v.Value == blank {
s = s.AddFlag(v.Key)
} else {
s = s.AddAttribute(v.Key, v.Value)
}
}
return s
}
// Append encodes message to Session and returns result.
//
// See RFC 4566 Section 5.
func (m *Message) Append(s Session) Session {
s = s.AddVersion(m.Version)
s = s.AddOrigin(m.Origin)
s = s.AddSessionName(m.Name)
if len(m.Info) > 0 {
s = s.AddSessionInfo(m.Info)
}
if len(m.URI) > 0 {
s = s.AddURI(m.URI)<|fim▁hole|> if len(m.Email) > 0 {
s = s.AddEmail(m.Email)
}
if len(m.Phone) > 0 {
s = s.AddPhone(m.Phone)
}
if !m.Connection.Blank() {
s = s.AddConnectionData(m.Connection)
}
for t, v := range m.Bandwidths {
s = s.AddBandwidth(t, v)
}
// One or more time descriptions ("t=" and "r=" lines)
for _, t := range m.Timing {
s = s.AddTiming(t.Start, t.End)
if len(t.Offsets) > 0 {
s = s.AddRepeatTimesCompact(t.Repeat, t.Active, t.Offsets...)
}
}
if len(m.TZAdjustments) > 0 {
s = s.AddTimeZones(m.TZAdjustments...)
}
if !m.Encryption.Blank() {
s = s.AddEncryption(m.Encryption)
}
s = s.appendAttributes(m.Attributes)
for i := range m.Medias {
s = s.AddMediaDescription(m.Medias[i].Description)
if len(m.Medias[i].Title) > 0 {
s = s.AddSessionInfo(m.Medias[i].Title)
}
if !m.Medias[i].Connection.Blank() {
s = s.AddConnectionData(m.Medias[i].Connection)
}
for t, v := range m.Medias[i].Bandwidths {
s = s.AddBandwidth(t, v)
}
if !m.Medias[i].Encryption.Blank() {
s = s.AddEncryption(m.Medias[i].Encryption)
}
s = s.appendAttributes(m.Medias[i].Attributes)
}
return s
}<|fim▁end|> | } |
<|file_name|>type-alias-impl-trait.rs<|end_file_name|><|fim▁begin|>// run-pass
#![allow(dead_code)]
#![allow(unused_assignments)]
#![allow(unused_variables)]
#![feature(type_alias_impl_trait)]
fn main() {
assert_eq!(foo().to_string(), "foo");
assert_eq!(bar1().to_string(), "bar1");
assert_eq!(bar2().to_string(), "bar2");
let mut x = bar1();
x = bar2();
assert_eq!(my_iter(42u8).collect::<Vec<u8>>(), vec![42u8]);
}
// single definition
type Foo = impl std::fmt::Display;
fn foo() -> Foo {
"foo"
}
// two definitions
type Bar = impl std::fmt::Display;
fn bar1() -> Bar {
"bar1"
}
fn bar2() -> Bar {
"bar2"
}
type MyIter<T> = impl Iterator<Item = T>;
fn my_iter<T>(t: T) -> MyIter<T> {
std::iter::once(t)
}
fn my_iter2<T>(t: T) -> MyIter<T> {
std::iter::once(t)
}
// param names should not have an effect!
fn my_iter3<U>(u: U) -> MyIter<U> {
std::iter::once(u)
}
// param position should not have an effect!
fn my_iter4<U, V>(_: U, v: V) -> MyIter<V> {
std::iter::once(v)
}
// param names should not have an effect!
type MyOtherIter<T> = impl Iterator<Item = T>;
fn my_other_iter<U>(u: U) -> MyOtherIter<U> {
std::iter::once(u)
}
trait Trait {}<|fim▁hole|>
fn generic_bound<'a, T: Trait + 'a>(t: T) -> GenericBound<'a, T> {
t
}
mod pass_through {
pub type Passthrough<T: 'static> = impl Sized + 'static;
fn define_passthrough<T: 'static>(t: T) -> Passthrough<T> {
t
}
}
fn use_passthrough(x: pass_through::Passthrough<u32>) -> pass_through::Passthrough<u32> {
x
}<|fim▁end|> | type GenericBound<'a, T: Trait + 'a> = impl Sized + 'a; |
<|file_name|>draft_utils.py<|end_file_name|><|fim▁begin|>from .models import Framework
from .utils import get_json_from_request, json_has_required_keys, \
json_has_matching_id
from .validation import get_validation_errors
def validate_and_return_draft_request(draft_id=0):
json_payload = get_json_from_request()
json_has_required_keys(json_payload, ['services'])
if draft_id:
json_has_matching_id(json_payload['services'], draft_id)
return json_payload['services']
<|fim▁hole|>def get_draft_validation_errors(draft_json, lot,
framework_id=0, slug=None, required=None):
if not slug and not framework_id:
raise Exception('Validation requires either framework_id or slug')
if not slug:
framework = Framework.query.filter(
Framework.id == framework_id
).first()
slug = framework.slug
errs = get_validation_errors(
"services-{0}-{1}".format(slug, lot.lower()),
draft_json,
enforce_required=False,
required_fields=required
)
return errs<|fim▁end|> | |
<|file_name|>math.js<|end_file_name|><|fim▁begin|>function mathGame(){
var game = new Phaser.Game(window.innerWidth, window.innerHeight, Phaser.auto, 'math', {
preload: onPreload,
create: onCreate,
// resize:onResize
});
WebFontConfig = {
active: function() { game.time.events.add(Phaser.Timer.SECOND, createText, this); },
google: {
families: ['Fredericka the Great']
}
};
var sumsArray = [];
var questionText;
var randomSum;
var timeTween;
var numberTimer;
var buttonMask;
var replay;
var score=0;
var scoreText;
var isGameOver = false;
var topScore;
var numbersArray = [-3,-2,-1,1,2,3];
function buildThrees(initialNummber,currentIndex,limit,currentString){
for(var i=0;i<numbersArray.length;i++){
var sum = initialNummber+numbersArray[i];
var outputString = currentString+(numbersArray[i]<0?"":"+")+numbersArray[i];
if(sum>0 && sum<4 && currentIndex==limit){
sumsArray[limit][sum-1].push(outputString);
}
if(currentIndex<limit){
buildThrees(sum,currentIndex+1,limit,outputString);
}
}
}
function onPreload() {
// responsiveScale();
game.load.script('webfont', '//ajax.googleapis.com/ajax/libs/webfont/1.4.7/webfont.js');
game.load.image("timebar", "/images/math/timebar.png");
game.load.image("buttonmask", "/images/math/buttonmask.png");
game.load.spritesheet("buttons", "/images/math/buttons.png",400,50);
game.load.spritesheet('myguy', '/images/math/dance.png', 70, 120);
game.load.image("background", "/images/math/board2.png");
game.load.image("replay", "images/math/replay.png");
game.load.image("home", "images/home.png");
}
function onCreate() {
topScore = localStorage.getItem("topScore")==null?0:localStorage.getItem("topScore");
// game.stage.backgroundColor = "#cccccc";
chalkBoard = game.add.sprite(1100, 850,"background");
chalkBoard.x = 0;
chalkBoard.y = 0;
chalkBoard.height = game.height;
chalkBoard.width = game.width;
game.stage.disableVisibilityChange = true;
gameOverSprite = this.game.add.sprite(600, 300, 'myguy');
gameOverSprite.visible = false;
gameOverSprite.frame = 0;
gameOverSprite.animations.add('left', [0, 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13], 10, true);
replay = game.add.button(game.width*.6, game.height*.1,"replay",replay,this);
replay.visable = false;
home = game.add.button(game.width*.75, game.height*.1, 'home', function onClick(){window.location.href ="/home"});
home.scale.setTo(0.2,0.2);
for(var i=1;i<5;i++){
sumsArray[i]=[[],[],[]];
for(var j=1;j<=3;j++){
buildThrees(j,1,i,j);
}
}
questionText = game.add.text(game.width*.5,game.height*.3,"-");
questionText.anchor.set(0.5);
scoreText = game.add.text(game.width*.1,game.height*.10,"-");
for(var i=0;i<3;i++){
var numberButton = game.add.button(game.width*.3,game.height*.4+i*75,"buttons",checkAnswer,this).frame=i;
}
numberTimer = game.add.sprite(game.width*.3,game.height*.4,"timebar");
nextNumber();
}
function createText() {
questionText.font = 'Fredericka the Great';
questionText.fontSize = 37;
questionText.addColor('#edf0f3',0);
scoreText.font = 'Fredericka the Great';
scoreText.fontSize = 37;
scoreText.addColor('#edf0f3',0);
};
function gameOver(gameOverString){
// game.stage.backgroundColor = "#ff0000";
console.log(gameOverString)
questionText.text = "Wrong Answer!";
questionText.addColor('#ff471a',0);
isGameOver = true;
localStorage.setItem("topScore",Math.max(score,topScore));
numberTimer.destroy();<|fim▁hole|> // gameOverSprite.animations.play('left');
}
function checkAnswer(button){
var correctAnswer;
if(!isGameOver){
if(button.frame==randomSum){
score+=Math.floor((buttonMask.x+350)/4);
nextNumber();
}
else{
if(score>0) {
timeTween.stop();
}
correctAnswer = randomSum;
gameOver(correctAnswer);
}
}
}
function replay(){
$("#math").html("");
mathGame();
}
function nextNumber(){
scoreText.text = "Score: "+score.toString()+"\nBest Score: "+topScore.toString();
if(buttonMask){
buttonMask.destroy();
game.tweens.removeAll();
}
buttonMask = game.add.graphics(game.width*.3,game.height*.4);
buttonMask.beginFill(0xffffff);
buttonMask.drawRect(0, 0, 400, 200);
buttonMask.endFill();
numberTimer.mask = buttonMask;
if(score>0){
timeTween=game.add.tween(buttonMask);
timeTween.to({
x: -350
}, 9000, "Linear",true);
timeTween.onComplete.addOnce(function(){
gameOver("?");
}, this);
}
randomSum = game.rnd.between(0,2);
questionText.text = sumsArray[Math.min(Math.round((score-100)/400)+1,4)][randomSum][game.rnd.between(0,sumsArray[Math.min(Math.round((score-100)/400)+1,4)][randomSum].length-1)];
}
}
// }<|fim▁end|> | buttonMask.destroy();
replay.visible = true;
// gameOverSprite.visible = true; |
<|file_name|>svg-container.component.ts<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {
AfterContentInit,
ChangeDetectionStrategy,
Component,
ElementRef,
EventEmitter,
Input,
OnInit,
Output,
ViewChild
} from '@angular/core';
import * as d3 from 'd3';
import { select, Selection } from 'd3-selection';
import { zoom, ZoomBehavior } from 'd3-zoom';
import { SafeAny } from 'interfaces';
@Component({
selector: 'flink-svg-container',
templateUrl: './svg-container.component.html',
styleUrls: ['./svg-container.component.less'],
changeDetection: ChangeDetectionStrategy.OnPush
})
export class SvgContainerComponent implements OnInit, AfterContentInit {
zoom = 1;
width: number;
height: number;
transform = 'translate(0, 0) scale(1)';
containerTransform = { x: 0, y: 0, k: 1 };
svgSelect: Selection<SafeAny, SafeAny, SafeAny, SafeAny>;
zoomController: ZoomBehavior<SafeAny, SafeAny>;
@ViewChild('svgContainer', { static: true }) svgContainer: ElementRef<SVGAElement>;<|fim▁hole|> @Input() nzMinZoom = 0.1;
@Output() clickBgEvent: EventEmitter<MouseEvent> = new EventEmitter();
@Output() zoomEvent: EventEmitter<number> = new EventEmitter();
@Output() transformEvent: EventEmitter<{ x: number; y: number; scale: number }> = new EventEmitter();
/**
* Zoom to spec level
*
* @param zoomLevel
*/
zoomTo(zoomLevel: number): void {
this.svgSelect
.transition()
.duration(0)
.call(this.zoomController.scaleTo, zoomLevel);
}
/**
* Set transform position
*
* @param transform
* @param animate
*/
setPositionByTransform(transform: { x: number; y: number; k: number }, animate = false): void {
this.svgSelect
.transition()
.duration(animate ? 500 : 0)
.call(this.zoomController.transform, transform);
}
constructor(private el: ElementRef) {}
ngOnInit(): void {
this.svgSelect = select(this.svgContainer.nativeElement);
this.zoomController = zoom()
.scaleExtent([this.nzMinZoom, this.nzMaxZoom])
.on('zoom', () => {
this.containerTransform = d3.event.transform;
this.zoom = this.containerTransform.k;
if (!isNaN(this.containerTransform.x)) {
this.transform = `translate(${this.containerTransform.x} ,${this.containerTransform.y})scale(${this.containerTransform.k})`;
}
this.zoomEvent.emit(this.zoom);
this.transformEvent.emit(this.containerTransform as SafeAny);
});
this.svgSelect.call(this.zoomController).on('wheel.zoom', null);
}
ngAfterContentInit(): void {
const hostElem = this.el.nativeElement;
if (hostElem.parentNode !== null) {
const dims = hostElem.parentNode.getBoundingClientRect();
this.width = dims.width;
this.height = dims.height;
this.zoomTo(this.zoom);
}
}
}<|fim▁end|> | @ViewChild('svgInner', { static: true }) svgInner: ElementRef<SVGAElement>;
@Input() nzMaxZoom = 5; |
<|file_name|>BrightContrastFilter.rs<|end_file_name|><|fim▁begin|>#pragma version(1)
#pragma rs java_package_name(cn.louispeng.imagefilter.renderscript)
// 高亮对比度特效
#include "Clamp.rsh"
// set from the java SDK level
rs_allocation gIn;
rs_allocation gOut;
rs_script gScript;
// The brightness factor.
// Should be in the range [-1.0f, 1.0f].
float gBrightnessFactor = 0.25f;
// The contrast factor.
// Should be in the range [-1.0f, 1.0f].
float gContrastFactor = 0.5f;
// magic factor
static float ContrastFactor1;
void filter() {
ContrastFactor1 = (1.0f + gContrastFactor) * (1.0f + gContrastFactor);
rsForEach(gScript, gIn, gOut, 0, 0); // for each element of the input allocation,
// call root() method on gScript
}
void root(const uchar4 *v_in, uchar4 *v_out, const void *usrData, uint32_t x, uint32_t y) {
float4 f4 = rsUnpackColor8888(*v_in); // extract RGBA values, see rs_core.rsh<|fim▁hole|> if (gBrightnessFactor != 0.0f) {
// Add brightness
f3 = f3 + gBrightnessFactor;
f3 = FClamp01Float3(f3);
}
// Modifiy contrast (multiplication)
if (ContrastFactor1 != 1.0f){
// Transform to range [-0.5f, 0.5f]
f3 = f3 - 0.5f;
// Multiply contrast factor
f3 = f3 * ContrastFactor1;
// Transform back to range [0.0f, 1.0f]
f3 = f3 + 0.5f;
f3 = FClamp01Float3(f3);
}
*v_out = rsPackColorTo8888(f3);
}<|fim▁end|> |
float3 f3 = f4.rgb;
// Modify brightness (addition) |
<|file_name|>point.cpp<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2006 Paolo Capriotti <p.capriotti@gmail.com>
(c) 2006 Maurizio Monge <maurizio.monge@kdemail.net>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
*/
#include <cmath>
#include <QDebug>
#include <QString>
#include "common.h"
#include "point.h"
Point::Point(int x, int y)
: x(x), y(y) {
}
Point::Point(const QPoint& p)
: x(p.x()), y(p.y()) {
}
Point::Point() {
}
Point::Point(const QString& str, int ysize) {
x = y = -1;
int length = str.length();
if(length == 0)
return;
if(str[0].isLetter()) {
char c = str[0].toAscii();
if(c >= 'a' && c <= 'z')
x = c-'a';
else if(c >= 'A' && c <= 'Z')
x = c-'A';
if(length>1)
y = ysize - str.mid(1).toInt();
}
else
y = ysize - str.toInt();
}
QString Point::row(int ysize) const {
if (y != -1)
return QString::number(ysize - y);
else
return QString();
}
QString Point::numcol(int xsize) const {
if (x != -1)
return QString::number(xsize - x);
else
return QString();
}
QString Point::col() const {
if (x != -1) {
if(x >= 26)
return QChar(static_cast<char>(x - 26 + 'A'));
else
return QChar(static_cast<char>(x + 'a'));
}
else
return QString();
}
QString Point::alpharow() const {
if (y != -1) {
if(y >= 26)
return QChar(static_cast<char>(y - 26 + 'A'));
else
return QChar(static_cast<char>(y + 'a'));
}
else
return QString();
}
QString Point::toString(int ysize) const {
return col() + row(ysize);
}
Point Point::operator+(const Point& other) const {
return Point(x + other.x, y + other.y);
}
Point Point::operator+=(const Point& other) {
return *this = *this + other;
}
Point Point::operator-() const {
return Point(-x, -y);
}
Point Point::operator-(const Point& other) const {
return Point(x - other.x, y - other.y);
}
Point Point::operator*(int n) const {
return Point(x * n, y * n);
}
Point Point::operator/(int n) const {
return Point(x / n, y / n);
}
Point Point::div(int n) const {
return Point(x >= 0 ? x / n : x / n - 1,
y >= 0 ? y / n : y / n - 1);
}
bool Point::operator==(const Point& other) const {
return x == other.x && y == other.y;
}
bool Point::operator!=(const Point& other) const {
return !(*this == other);
}
bool Point::operator<(const Point& other) const {
return y < other.y || (y == other.y && x < other.x);
}
bool Point::operator<=(const Point& other) const {
return y <= other.y || (y == other.y && x <= other.x);
}
bool Point::resembles(const Point& other) const {
return (other.x == -1 || x == other.x) &&
(other.y == -1 || y == other.y);
}
Point::operator QPoint() const {
return QPoint(x,y);
}
Point Point::normalizeInfinity() const {
return Point(
normalizeInfinityHelper(x),
normalizeInfinityHelper(y)
);
}
double Point::norm() const {
return sqrt((double)(x*x + y*y));
}
int Point::normalizeInfinityHelper(int n) const {
if (n == 0)
return 0;
else
return n > 0 ? 1 : -1;
}
QDebug operator<<(QDebug dbg, const Point& p) {
dbg << "(" << (p.x == -1 ? QString("?") : QString::number(p.x))<|fim▁hole|><|fim▁end|> | << ", " << (p.y == -1 ? QString("?") : QString::number(p.y)) << ")";
return dbg;
} |
<|file_name|>database.py<|end_file_name|><|fim▁begin|>from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
engine = create_engine('sqlite:////db/cities.sqlite', convert_unicode=True)
db_session = scoped_session(sessionmaker(autocommit=False,<|fim▁hole|>
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import models
Base.metadata.create_all(bind=engine)<|fim▁end|> | autoflush=False,
bind=engine))
Base = declarative_base()
Base.query = db_session.query_property() |
<|file_name|>UserOverview.ts<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2020 Graylog, Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the Server Side Public License, version 1,
* as published by MongoDB, Inc.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* Server Side Public License for more details.
*
* You should have received a copy of the Server Side Public License
* along with this program. If not, see
* <http://www.mongodb.com/licensing/server-side-public-license>.
*/
import { $PropertyType } from 'utility-types';
import * as Immutable from 'immutable';
export type AccountStatus = 'enabled' | 'disabled' | 'deleted';
export type UserOverviewJSON = {
id: string;
username: string;
full_name: string;
email: string;
external_user: boolean | null | undefined;
roles: Array<string>;
read_only: boolean | null | undefined;
session_active: boolean | null | undefined;
client_address: string;
last_activity: string | null | undefined;
enabled: boolean;
auth_service_id: string;
auth_service_uid: string;
account_status: AccountStatus;
};
type InternalState = {
id: string;
username: string;
fullName: string;
email: string;
roles: Immutable.Set<string>;
readOnly: boolean;
external: boolean;
sessionActive: boolean;
clientAddress: string;
lastActivity: string | null | undefined;
enabled: boolean;
authServiceId: string;
authServiceUid: string;
accountStatus: AccountStatus;
};
export default class UserOverview {
_value: InternalState;
constructor(
id: $PropertyType<InternalState, 'id'>,
username: $PropertyType<InternalState, 'username'>,
fullName: $PropertyType<InternalState, 'fullName'>,
email: $PropertyType<InternalState, 'email'>,
roles: $PropertyType<InternalState, 'roles'>,
readOnly: $PropertyType<InternalState, 'readOnly'>,
external: $PropertyType<InternalState, 'external'>,
sessionActive: $PropertyType<InternalState, 'sessionActive'>,
clientAddress: $PropertyType<InternalState, 'clientAddress'>,
lastActivity: $PropertyType<InternalState, 'lastActivity'>,
enabled: $PropertyType<InternalState, 'enabled'>,
authServiceId: $PropertyType<InternalState, 'authServiceId'>,
authServiceUid: $PropertyType<InternalState, 'authServiceUid'>,
accountStatus: $PropertyType<InternalState, 'accountStatus'>,
) {
this._value = {
id,
username,
fullName,
email,
roles,
readOnly,
external,
sessionActive,
clientAddress,
lastActivity,
enabled,
authServiceId,
authServiceUid,
accountStatus,
};
}
get id() {
return this._value.id;
}
get username() {
return this._value.username;
}
get name() {
return this._value.username;
}
get fullName() {
return this._value.fullName;
}
get description() {
return this._value.fullName;
}
get email() {
return this._value.email;
}
get roles() {
return this._value.roles;
}
get readOnly() {
return this._value.readOnly;
}
get external() {
return this._value.external;
}
get sessionActive() {
return this._value.sessionActive;
}
get clientAddress() {
return this._value.clientAddress;
}
get lastActivity() {
return this._value.lastActivity;
}
get enabled() {
return this._value.enabled;
}
get authServiceId() {
return this._value.authServiceId;
}
get authServiceUid() {
return this._value.authServiceUid;
}
get accountStatus() {
return this._value.accountStatus;
}
toBuilder() {
const {
id,
username,
fullName,
email,
roles,
readOnly,
external,
sessionActive,
clientAddress,
lastActivity,
enabled,
authServiceId,
authServiceUid,
accountStatus,
} = this._value;
// eslint-disable-next-line no-use-before-define,@typescript-eslint/no-use-before-define
return new Builder(Immutable.Map({
id,
username,
fullName,
email,
roles,
readOnly,
external,
sessionActive,
clientAddress,
lastActivity,
enabled,
authServiceId,
authServiceUid,
accountStatus,
}));
}
static create(
id: $PropertyType<InternalState, 'id'>,
username: $PropertyType<InternalState, 'username'>,
fullName: $PropertyType<InternalState, 'fullName'>,
email: $PropertyType<InternalState, 'email'>,
roles: $PropertyType<InternalState, 'roles'>,
readOnly: $PropertyType<InternalState, 'readOnly'>,
external: $PropertyType<InternalState, 'external'>,
sessionActive: $PropertyType<InternalState, 'sessionActive'>,
clientAddress: $PropertyType<InternalState, 'clientAddress'>,
lastActivity: $PropertyType<InternalState, 'lastActivity'>,
enabled: $PropertyType<InternalState, 'enabled'>,
authServiceId: $PropertyType<InternalState, 'authServiceId'>,
authServiceUid: $PropertyType<InternalState, 'authServiceUid'>,
accountStatus: $PropertyType<InternalState, 'accountStatus'>,
) {
return new UserOverview(
id,
username,
fullName,
email,
roles,
readOnly,
external,
sessionActive,
clientAddress,
lastActivity,
enabled,
authServiceId,
authServiceUid,
accountStatus,
);
}
toJSON(): UserOverviewJSON {
const {
id,
username,
fullName,
email,
roles,
readOnly,
external,
sessionActive,
clientAddress,
lastActivity,
enabled,
authServiceId,
authServiceUid,
accountStatus,
} = this._value;
return {
id,
username,
full_name: fullName,
email,
roles: roles.toArray(),
read_only: readOnly,
external_user: external,
session_active: sessionActive,
client_address: clientAddress,
last_activity: lastActivity,
enabled,
auth_service_id: authServiceId,
auth_service_uid: authServiceUid,
account_status: accountStatus,
};
}
static fromJSON(value: UserOverviewJSON) {
const {
id,
username,
full_name: fullName,
email,
roles,
read_only: readOnly,
external_user: external,
session_active: sessionActive,
client_address: clientAddress,
last_activity: lastActivity,
enabled,
auth_service_id: authServiceId,
auth_service_uid: authServiceUid,
account_status: accountStatus,
} = value;<|fim▁hole|> return UserOverview.create(id,
username,
fullName,
email,
Immutable.Set(roles),
readOnly ?? false,
external ?? false,
sessionActive ?? false,
clientAddress,
lastActivity,
enabled,
authServiceId,
authServiceUid,
accountStatus);
}
// eslint-disable-next-line no-use-before-define
static builder(): Builder {
// eslint-disable-next-line no-use-before-define,@typescript-eslint/no-use-before-define
return new Builder();
}
}
type BuilderState = Immutable.Map<string, any>;
class Builder {
value: BuilderState;
constructor(value: BuilderState = Immutable.Map()) {
this.value = value;
}
id(value: $PropertyType<InternalState, 'id'>) {
return new Builder(this.value.set('id', value));
}
username(value: $PropertyType<InternalState, 'username'>) {
return new Builder(this.value.set('username', value));
}
fullName(value: $PropertyType<InternalState, 'fullName'>) {
return new Builder(this.value.set('fullName', value));
}
email(value: $PropertyType<InternalState, 'email'>) {
return new Builder(this.value.set('email', value));
}
roles(value: $PropertyType<InternalState, 'roles'>) {
return new Builder(this.value.set('roles', value));
}
readOnly(value: $PropertyType<InternalState, 'readOnly'>) {
return new Builder(this.value.set('readOnly', value));
}
external(value: $PropertyType<InternalState, 'external'>) {
return new Builder(this.value.set('external', value));
}
sessionActive(value: $PropertyType<InternalState, 'sessionActive'>) {
return new Builder(this.value.set('sessionActive', value));
}
clientAddress(value: $PropertyType<InternalState, 'clientAddress'>) {
return new Builder(this.value.set('clientAddress', value));
}
lastActivity(value: $PropertyType<InternalState, 'lastActivity'>) {
return new Builder(this.value.set('lastActivity', value));
}
enabled(value: $PropertyType<InternalState, 'enabled'>) {
return new Builder(this.value.set('enabled', value));
}
authServiceId(value: $PropertyType<InternalState, 'authServiceId'>) {
return new Builder(this.value.set('authServiceId', value));
}
authServiceUid(value: $PropertyType<InternalState, 'authServiceUid'>) {
return new Builder(this.value.set('authServiceUid', value));
}
accountStatus(value: $PropertyType<InternalState, 'accountStatus'>) {
return new Builder(this.value.set('accountStatus', value));
}
build() {
const {
id,
username,
fullName,
email,
roles,
readOnly,
external,
sessionActive,
clientAddress,
lastActivity,
enabled,
authServiceId,
authServiceUid,
accountStatus,
} = this.value.toObject();
return new UserOverview(
id,
username,
fullName,
email,
roles,
readOnly,
external,
sessionActive,
clientAddress,
lastActivity,
enabled,
authServiceId,
authServiceUid,
accountStatus,
);
}
}<|fim▁end|> | |
<|file_name|>posts.client.routes.tests.js<|end_file_name|><|fim▁begin|>(function () {
'use strict';
describe('Posts Route Tests', function () {
// Initialize global variables
var $scope,
PostsService;
//We can start by loading the main application module
beforeEach(module(ApplicationConfiguration.applicationModuleName));
// The injector ignores leading and trailing underscores here (i.e. _$httpBackend_).
// This allows us to inject a service but then attach it to a variable
// with the same name as the service.
beforeEach(inject(function ($rootScope, _PostsService_) {
// Set a new global scope
$scope = $rootScope.$new();
PostsService = _PostsService_;
}));
describe('Route Config', function () {
describe('Main Route', function () {
var mainstate;
beforeEach(inject(function ($state) {
mainstate = $state.get('posts');
}));
it('Should have the correct URL', function () {
expect(mainstate.url).toEqual('/posts');
});
it('Should be abstract', function () {
expect(mainstate.abstract).toBe(true);
});
it('Should have template', function () {
expect(mainstate.template).toBe('<ui-view/>');
});
});
describe('View Route', function () {
var viewstate,
PostsController,
mockPost;
beforeEach(inject(function ($controller, $state, $templateCache) {
viewstate = $state.get('posts.view');
$templateCache.put('modules/posts/client/views/view-post.client.view.html', '');
// create mock Post
mockPost = new PostsService({
_id: '525a8422f6d0f87f0e407a33',
name: 'Post Name'
});
//Initialize Controller
PostsController = $controller('PostsController as vm', {
$scope: $scope,
postResolve: mockPost
});
}));
it('Should have the correct URL', function () {
expect(viewstate.url).toEqual('/:postId');
});
it('Should have a resolve function', function () {
expect(typeof viewstate.resolve).toEqual('object');
expect(typeof viewstate.resolve.postResolve).toEqual('function');
});
it('should respond to URL', inject(function ($state) {
expect($state.href(viewstate, {
postId: 1
})).toEqual('/posts/1');<|fim▁hole|>
it('should attach an Post to the controller scope', function () {
expect($scope.vm.post._id).toBe(mockPost._id);
});
it('Should not be abstract', function () {
expect(viewstate.abstract).toBe(undefined);
});
it('Should have templateUrl', function () {
expect(viewstate.templateUrl).toBe('modules/posts/client/views/view-post.client.view.html');
});
});
describe('Create Route', function () {
var createstate,
PostsController,
mockPost;
beforeEach(inject(function ($controller, $state, $templateCache) {
createstate = $state.get('posts.create');
$templateCache.put('modules/posts/client/views/form-post.client.view.html', '');
// create mock Post
mockPost = new PostsService();
//Initialize Controller
PostsController = $controller('PostsController as vm', {
$scope: $scope,
postResolve: mockPost
});
}));
it('Should have the correct URL', function () {
expect(createstate.url).toEqual('/create');
});
it('Should have a resolve function', function () {
expect(typeof createstate.resolve).toEqual('object');
expect(typeof createstate.resolve.postResolve).toEqual('function');
});
it('should respond to URL', inject(function ($state) {
expect($state.href(createstate)).toEqual('/posts/create');
}));
it('should attach an Post to the controller scope', function () {
expect($scope.vm.post._id).toBe(mockPost._id);
expect($scope.vm.post._id).toBe(undefined);
});
it('Should not be abstract', function () {
expect(createstate.abstract).toBe(undefined);
});
it('Should have templateUrl', function () {
expect(createstate.templateUrl).toBe('modules/posts/client/views/form-post.client.view.html');
});
});
describe('Edit Route', function () {
var editstate,
PostsController,
mockPost;
beforeEach(inject(function ($controller, $state, $templateCache) {
editstate = $state.get('posts.edit');
$templateCache.put('modules/posts/client/views/form-post.client.view.html', '');
// create mock Post
mockPost = new PostsService({
_id: '525a8422f6d0f87f0e407a33',
name: 'Post Name'
});
//Initialize Controller
PostsController = $controller('PostsController as vm', {
$scope: $scope,
postResolve: mockPost
});
}));
it('Should have the correct URL', function () {
expect(editstate.url).toEqual('/:postId/edit');
});
it('Should have a resolve function', function () {
expect(typeof editstate.resolve).toEqual('object');
expect(typeof editstate.resolve.postResolve).toEqual('function');
});
it('should respond to URL', inject(function ($state) {
expect($state.href(editstate, {
postId: 1
})).toEqual('/posts/1/edit');
}));
it('should attach an Post to the controller scope', function () {
expect($scope.vm.post._id).toBe(mockPost._id);
});
it('Should not be abstract', function () {
expect(editstate.abstract).toBe(undefined);
});
it('Should have templateUrl', function () {
expect(editstate.templateUrl).toBe('modules/posts/client/views/form-post.client.view.html');
});
xit('Should go to unauthorized route', function () {
});
});
});
});
})();<|fim▁end|> | })); |
<|file_name|>MobileStepper.js<|end_file_name|><|fim▁begin|>import _objectWithoutPropertiesLoose from "@babel/runtime/helpers/esm/objectWithoutPropertiesLoose";
import _extends from "@babel/runtime/helpers/esm/extends";
import * as React from 'react';
import PropTypes from 'prop-types';
import clsx from 'clsx';
import { integerPropType, deepmerge } from '@material-ui/utils';
import { unstable_composeClasses as composeClasses } from '@material-ui/unstyled';
import Paper from '../Paper';
import capitalize from '../utils/capitalize';
import LinearProgress from '../LinearProgress';
import useThemeProps from '../styles/useThemeProps';
import experimentalStyled from '../styles/experimentalStyled';
import mobileStepperClasses, { getMobileStepperUtilityClass } from './mobileStepperClasses';
import { jsxs as _jsxs } from "react/jsx-runtime";
import { jsx as _jsx } from "react/jsx-runtime";
const overridesResolver = (props, styles) => {
const {
styleProps
} = props;
return deepmerge(_extends({}, styles[`position${capitalize(styleProps.position)}`], {
[`& .${mobileStepperClasses.dots}`]: styles.dots,
[`& .${mobileStepperClasses.dot}`]: _extends({}, styles.dot, styleProps.dotActive && styles.dotActive),
[`& .${mobileStepperClasses.dotActive}`]: styles.dotActive,
[`& .${mobileStepperClasses.progress}`]: styles.progress
}), styles.root || {});
};
const useUtilityClasses = styleProps => {
const {
classes,
position
} = styleProps;
const slots = {
root: ['root', `position${capitalize(position)}`],
dots: ['dots'],
dot: ['dot'],
dotActive: ['dotActive'],
progress: ['progress']
};
return composeClasses(slots, getMobileStepperUtilityClass, classes);
};
const MobileStepperRoot = experimentalStyled(Paper, {}, {
name: 'MuiMobileStepper',
slot: 'Root',
overridesResolver
})(({
theme,
styleProps
}) => _extends({
/* Styles applied to the root element. */
display: 'flex',
flexDirection: 'row',
justifyContent: 'space-between',
alignItems: 'center',
background: theme.palette.background.default,
padding: 8
}, styleProps.position === 'bottom' && {
position: 'fixed',
bottom: 0,
left: 0,
right: 0,
zIndex: theme.zIndex.mobileStepper
}, styleProps.position === 'top' && {
position: 'fixed',
top: 0,
left: 0,
right: 0,
zIndex: theme.zIndex.mobileStepper
}));
const MobileStepperDots = experimentalStyled('div', {}, {
name: 'MuiMobileStepper',
slot: 'Dots'
})(({
styleProps
}) => _extends({}, styleProps.variant === 'dots' && {
display: 'flex',
flexDirection: 'row'
}));
const MobileStepperDot = experimentalStyled('div', {}, {
name: 'MuiMobileStepper',<|fim▁hole|> theme,
styleProps
}) => _extends({}, styleProps.variant === 'dots' && _extends({
transition: theme.transitions.create('background-color', {
duration: theme.transitions.duration.shortest
}),
backgroundColor: theme.palette.action.disabled,
borderRadius: '50%',
width: 8,
height: 8,
margin: '0 2px'
}, styleProps.dotActive && {
backgroundColor: theme.palette.primary.main
})));
const MobileStepperProgress = experimentalStyled(LinearProgress, {}, {
name: 'MuiMobileStepper',
slot: 'Progress'
})(({
styleProps
}) => _extends({}, styleProps.variant === 'progress' && {
width: '50%'
}));
const MobileStepper = /*#__PURE__*/React.forwardRef(function MobileStepper(inProps, ref) {
const props = useThemeProps({
props: inProps,
name: 'MuiMobileStepper'
});
const {
activeStep = 0,
backButton,
className,
LinearProgressProps,
nextButton,
position = 'bottom',
steps,
variant = 'dots'
} = props,
other = _objectWithoutPropertiesLoose(props, ["activeStep", "backButton", "className", "LinearProgressProps", "nextButton", "position", "steps", "variant"]);
const styleProps = _extends({}, props, {
activeStep,
position,
variant
});
const classes = useUtilityClasses(styleProps);
return /*#__PURE__*/_jsxs(MobileStepperRoot, _extends({
square: true,
elevation: 0,
className: clsx(classes.root, className),
ref: ref,
styleProps: styleProps
}, other, {
children: [backButton, variant === 'text' && /*#__PURE__*/_jsxs(React.Fragment, {
children: [activeStep + 1, " / ", steps]
}), variant === 'dots' && /*#__PURE__*/_jsx(MobileStepperDots, {
styleProps: styleProps,
className: classes.dots,
children: [...new Array(steps)].map((_, index) => /*#__PURE__*/_jsx(MobileStepperDot, {
className: clsx(classes.dot, index === activeStep && classes.dotActive),
styleProps: _extends({}, styleProps, {
dotActive: index === activeStep
})
}, index))
}), variant === 'progress' && /*#__PURE__*/_jsx(MobileStepperProgress, _extends({
styleProps: styleProps,
className: classes.progress,
variant: "determinate",
value: Math.ceil(activeStep / (steps - 1) * 100)
}, LinearProgressProps)), nextButton]
}));
});
process.env.NODE_ENV !== "production" ? MobileStepper.propTypes
/* remove-proptypes */
= {
// ----------------------------- Warning --------------------------------
// | These PropTypes are generated from the TypeScript type definitions |
// | To update them edit the d.ts file and run "yarn proptypes" |
// ----------------------------------------------------------------------
/**
* Set the active step (zero based index).
* Defines which dot is highlighted when the variant is 'dots'.
* @default 0
*/
activeStep: integerPropType,
/**
* A back button element. For instance, it can be a `Button` or an `IconButton`.
*/
backButton: PropTypes.node,
/**
* Override or extend the styles applied to the component.
*/
classes: PropTypes.object,
/**
* @ignore
*/
className: PropTypes.string,
/**
* Props applied to the `LinearProgress` element.
*/
LinearProgressProps: PropTypes.object,
/**
* A next button element. For instance, it can be a `Button` or an `IconButton`.
*/
nextButton: PropTypes.node,
/**
* Set the positioning type.
* @default 'bottom'
*/
position: PropTypes.oneOf(['bottom', 'static', 'top']),
/**
* The total steps.
*/
steps: integerPropType.isRequired,
/**
* The system prop that allows defining system overrides as well as additional CSS styles.
*/
sx: PropTypes.object,
/**
* The variant to use.
* @default 'dots'
*/
variant: PropTypes.oneOf(['dots', 'progress', 'text'])
} : void 0;
export default MobileStepper;<|fim▁end|> | slot: 'Dot'
})(({ |
<|file_name|>jquery-selective.es.js<|end_file_name|><|fim▁begin|>/**
* jQuery Selective v0.3.5
* https://github.com/amazingSurge/jquery-selective
*
* Copyright (c) amazingSurge
* Released under the LGPL-3.0 license
*/
import $$1 from 'jquery';
/*eslint no-empty-function: "off"*/
var DEFAULTS = {
namespace: 'selective',
buildFromHtml: true,
closeOnSelect: false,
local: null,
selected: null,
withSearch: false,
searchType: null, //'change' or 'keyup'
ajax: {
work: false,
url: null,
quietMills: null,
loadMore: false,
pageSize: null
},
query: function() {}, //function(api, search_text, page) {},
tpl: {
frame: function() {
return `<div class="${this.namespace}"><div class="${this.namespace}-trigger">${this.options.tpl.triggerButton.call(this)}<div class="${this.namespace}-trigger-dropdown"><div class="${this.namespace}-list-wrap">${this.options.tpl.list.call(this)}</div></div></div>${this.options.tpl.items.call(this)}</div>`;
},
search: function() {
return `<input class="${this.namespace}-search" type="text" placeholder="Search...">`;
},
select: function() {
return `<select class="${this.namespace}-select" name="${this.namespace}" multiple="multiple"></select>`;
},
optionValue: function(data) {
if('name' in data) {
return data.name;
}
return data;
},
option: function(content) {
return `<option value="${this.options.tpl.optionValue.call(this)}">${content}</option>`;
},
items: function() {
return `<ul class="${this.namespace}-items"></ul>`;
},
item: function(content) {
return `<li class="${this.namespace}-item">${content}${this.options.tpl.itemRemove.call(this)}</li>`;
},
itemRemove: function() {
return `<span class="${this.namespace}-remove">x</span>`;
},
triggerButton: function() {
return `<div class="${this.namespace}-trigger-button">Add</div>`;
},
list: function() {
return `<ul class="${this.namespace}-list"></ul>`;
},
listItem: function(content) {
return `<li class="${this.namespace}-list-item">${content}</li>`;
}
},
onBeforeShow: null,
onAfterShow: null,
onBeforeHide: null,
onAfterHide: null,
onBeforeSearch: null,
onAfterSearch: null,
onBeforeSelected: null,
onAfterSelected: null,
onBeforeUnselect: null,
onAfterUnselect: null,
onBeforeItemRemove: null,
onAfterItemRemove: null,
onBeforeItemAdd: null,
onAfterItemAdd: null
};
class Options {
constructor(instance) {
this.instance = instance;
}
getOptions() {
this.instance.$options = this.instance.$select.find('option');
return this.instance.$options;
}
select(opt) {
$(opt).prop('selected', true);
return this.instance;
}
unselect(opt) {
$(opt).prop('selected', false);
return this.instance;
}
add(data) {
/*eslint consistent-return: "off"*/
if (this.instance.options.buildFromHtml === false &&
this.instance.getItem('option', this.instance.$select, this.instance.options.tpl.optionValue(data)) === undefined) {
const $option = $(this.instance.options.tpl.option.call(this.instance, data));
this.instance.setIndex($option, data);
this.instance.$select.append($option);
return $option;
}
}
remove(opt) {
$(opt).remove();
return this.instance;
}
}
class List {
constructor(instance) {
this.instance = instance;
}
build(data) {
const $list = $('<ul></ul>');
const $options = this.instance._options.getOptions();
if (this.instance.options.buildFromHtml === true) {
if ($options.length !== 0) {
$.each($options, (i, n) => {
const $li = $(this.instance.options.tpl.listItem.call(this.instance, n.text));
const $n = $(n);
this.instance.setIndex($li, $n);
if ($n.attr('selected') !== undefined) {
this.instance.select($li);
}
$list.append($li);
});
}
} else if (data !== null) {
$.each(data, i => {
const $li = $(this.instance.options.tpl.listItem.call(this.instance, data[i]));
this.instance.setIndex($li, data[i]);
$list.append($li);
});
if ($options.length !== 0) {
$.each($options, (i, n) => {
const $n = $(n);
const li = this.instance.getItem('li', $list, this.instance.options.tpl.optionValue($n.data('selective_index')));
if (li !== undefined) {
this.instance._list.select(li);
}
});
}
}
this.instance.$list.append($list.children('li'));
return this.instance;
}
buildSearch() {
if (this.instance.options.withSearch === true) {
this.instance.$triggerDropdown.prepend(this.instance.options.tpl.search.call(this.instance));
this.instance.$search = this.instance.$triggerDropdown.find(`.${this.instance.namespace}-search`);
}
return this.instance;
}
select(obj) {
this.instance._trigger("beforeSelected");
$(obj).addClass(`${this.instance.namespace}-selected`);
this.instance._trigger("afterSelected");
return this.instance;
}
unselect(obj) {
this.instance._trigger("beforeUnselected");
$(obj).removeClass(`${this.instance.namespace}-selected`);
this.instance._trigger("afterUnselected");
return this.instance;
}
click() {
const that = this;
this.instance.$list.on('click', 'li', function() {
const $this = $(this);
if (!$this.hasClass(`${that.instance.namespace}-selected`)) {
that.instance.select($this);
}
});
}
filter(val) {
$.expr[':'].Contains = (a, i, m) => jQuery(a).text().toUpperCase().includes(m[3].toUpperCase());
if (val) {
this.instance.$list.find(`li:not(:Contains(${val}))`).slideUp();
this.instance.$list.find(`li:Contains(${val})`).slideDown();
} else {
this.instance.$list.children('li').slideDown();
}
return this.instance;
}
loadMore() {
const pageMax = this.instance.options.ajax.pageSize || 9999;
this.instance.$listWrap.on('scroll.selective', () => {
if (pageMax > this.instance.page) {
const listHeight = this.instance.$list.outerHeight(true);
const wrapHeight = this.instance.$listWrap.outerHeight();
const wrapScrollTop = this.instance.$listWrap.scrollTop();
const below = listHeight - wrapHeight - wrapScrollTop;
if (below === 0) {
this.instance.options.query(this.instance, this.instance.$search.val(), ++this.instance.page);
}
}
});
return this.instance;
}
loadMoreRemove() {
this.instance.$listWrap.off('scroll.selective');
return this.instance;
}
}
class Search {
constructor(instance) {
this.instance = instance;
}
change() {
this.instance.$search.change(() => {
this.instance._trigger("beforeSearch");
if (this.instance.options.buildFromHtml === true) {
this.instance._list.filter(this.instance.$search.val());
} else if (this.instance.$search.val() !== '') {
this.instance.page = 1;
this.instance.options.query(this.instance, this.instance.$search.val(), this.instance.page);
} else {
this.instance.update(this.instance.options.local);
}
this.instance._trigger("afterSearch");
});
}
keyup() {
const quietMills = this.instance.options.ajax.quietMills || 1000;
let oldValue = '';
let currentValue = '';
let timeout;
this.instance.$search.on('keyup', e => {
this.instance._trigger("beforeSearch");
currentValue = this.instance.$search.val();
if (this.instance.options.buildFromHtml === true) {
if (currentValue !== oldValue) {
this.instance._list.filter(currentValue);
}
} else if (currentValue !== oldValue || e.keyCode === 13) {
window.clearTimeout(timeout);
timeout = window.setTimeout(() => {
if (currentValue !== '') {
this.instance.page = 1;
this.instance.options.query(this.instance, currentValue, this.instance.page);
} else {
this.instance.update(this.instance.options.local);
}
}, quietMills);
}
oldValue = currentValue;
this.instance._trigger("afterSearch");
});
}
bind(type) {
if (type === 'change') {
this.change();
} else if (type === 'keyup') {
this.keyup();
}
}
}
class Items {
constructor(instance) {
this.instance = instance;
}
withDefaults(data) {
if (data !== null) {
$.each(data, i => {
this.instance._options.add(data[i]);
this.instance._options.select(this.instance.getItem('option', this.instance.$select, this.instance.options.tpl.optionValue(data[i])));
this.instance._items.add(data[i]);
});
}
}
add(data, content) {
let $item;
let fill;
if (this.instance.options.buildFromHtml === true) {
fill = content;
} else {
fill = data;
}
$item = $(this.instance.options.tpl.item.call(this.instance, fill));
this.instance.setIndex($item, data);
this.instance.$items.append($item);
}
remove(obj) {
obj = $(obj);
let $li;
let $option;
if (this.instance.options.buildFromHtml === true) {
this.instance._list.unselect(obj.data('selective_index'));
this.instance._options.unselect(obj.data('selective_index').data('selective_index'));
} else {
$li = this.instance.getItem('li', this.instance.$list, this.instance.options.tpl.optionValue(obj.data('selective_index')));
if ($li !== undefined) {
this.instance._list.unselect($li);
}
$option = this.instance.getItem('option', this.instance.$select, this.instance.options.tpl.optionValue(obj.data('selective_index')));
this.instance._options.unselect($option)._options.remove($option);
}
obj.remove();
return this.instance;
}
click() {
const that = this;
this.instance.$items.on('click', `.${this.instance.namespace}-remove`, function() {
const $this = $(this);
const $item = $this.parents('li');
that.instance.itemRemove($item);
});
}
}
const NAMESPACE$1 = 'selective';
/**
* Plugin constructor
**/
class Selective {
constructor(element, options = {}) {
this.element = element;
this.$element = $$1(element).hide() || $$1('<select></select>');
this.options = $$1.extend(true, {}, DEFAULTS, options);
this.namespace = this.options.namespace;
const $frame = $$1(this.options.tpl.frame.call(this));
//get the select
const _build = () => {
this.$element.html(this.options.tpl.select.call(this));
return this.$element.children('select');
};
this.$select = this.$element.is('select') === true ? this.$element : _build();
this.$element.after($frame);
this.init();
this.opened = false;
}
init() {
this.$selective = this.$element.next(`.${this.namespace}`);
this.$items = this.$selective.find(`.${this.namespace}-items`);
this.$trigger = this.$selective.find(`.${this.namespace}-trigger`);
this.$triggerButton = this.$selective.find(`.${this.namespace}-trigger-button`);
this.$triggerDropdown = this.$selective.find(`.${this.namespace}-trigger-dropdown`);
this.$listWrap = this.$selective.find(`.${this.namespace}-list-wrap`);
this.$list = this.$selective.find(`.${this.namespace}-list`);
this._list = new List(this);
this._options = new Options(this);
this._search = new Search(this);
this._items = new Items(this);
this._items.withDefaults(this.options.selected);
this.update(this.options.local)._list.buildSearch();
this.$triggerButton.on('click', () => {
if (this.opened === false) {
this.show();
} else {
this.hide();
}
});
this._list.click(this);
this._items.click(this);
if (this.options.withSearch === true) {
this._search.bind(this.options.searchType);
}
this._trigger('ready');
}
_trigger(eventType, ...params) {
let data = [this].concat(params);
// event
this.$element.trigger(`${NAMESPACE$1}::${eventType}`, data);
// callback
eventType = eventType.replace(/\b\w+\b/g, (word) => {
return word.substring(0, 1).toUpperCase() + word.substring(1);
});
let onFunction = `on${eventType}`;
if (typeof this.options[onFunction] === 'function') {
this.options[onFunction].apply(this, params);
}
}
_show() {
$$1(document).on('click.selective', e => {
if (this.options.closeOnSelect === true) {
if ($$1(e.target).closest(this.$triggerButton).length === 0 &&
$$1(e.target).closest(this.$search).length === 0) {
this._hide();
}
} else if ($$1(e.target).closest(this.$trigger).length === 0) {
this._hide();
}
});
this.$trigger.addClass(`${this.namespace}-active`);
this.opened = true;
if (this.options.ajax.loadMore === true) {
this._list.loadMore();
}
return this;
}
_hide() {
$$1(document).off('click.selective');
this.$trigger.removeClass(`${this.namespace}-active`);
this.opened = false;
if (this.options.ajax.loadMore === true) {
this._list.loadMoreRemove();
}
return this;
}
show() {
this._trigger("beforeShow");
this._show();
this._trigger("afterShow");
return this;
}
hide() {
this._trigger("beforeHide");
this._hide();
this._trigger("afterHide");
return this;
}
select($li) {
this._list.select($li);
const data = $li.data('selective_index');
if (this.options.buildFromHtml === true) {
this._options.select(data);
this.itemAdd($li, data.text());
} else {
this._options.add(data);
this._options.select(this.getItem('option', this.$select, this.options.tpl.optionValue(data)));
this.itemAdd(data);
}
return this;
}
unselect($li) {
this._list.unselect($li);
return this;
}
setIndex(obj, index) {
obj.data('selective_index', index);
return this;
}
getItem(type, $list, index) {
const $items = $list.children(type);
let position = '';
for (let i = 0; i < $items.length; i++) {
if (this.options.tpl.optionValue($items.eq(i).data('selective_index')) === index) {
position = i;
}
}
return position === '' ? undefined : $items.eq(position);
}
itemAdd(data, content) {
this._trigger("beforeItemAdd");
this._items.add(data, content);
this._trigger("afterItemAdd");
return this;
}
itemRemove($li) {
this._trigger("beforeItemRemove");
this._items.remove($li);
this._trigger("afterItemRemove");
return this;
}
optionAdd(data) {
this._options.add(data);
return this;
}
optionRemove(opt) {
this._options.remove(opt);
return this;
}
update(data) {
this.$list.empty();
this.page = 1;
if (data !== null) {
this._list.build(data);
} else {
this._list.build();
}
return this;
}
destroy() {
this.$selective.remove();
this.$element.show();
$$1(document).off('click.selective');
this._trigger('destroy');
}
static setDefaults(options) {
$$1.extend(true, DEFAULTS, $$1.isPlainObject(options) && options);
}
}
var info = {
version:'0.3.5'
};
const NAMESPACE = 'selective';
const OtherSelective = $$1.fn.selective;
const jQuerySelective = function(options, ...args) {
if (typeof options === 'string') {
const method = options;
if (/^_/.test(method)) {
return false;
} else if ((/^(get)/.test(method))) {
const instance = this.first().data(NAMESPACE);
if (instance && typeof instance[method] === 'function') {
return instance[method](...args);
}<|fim▁hole|> const instance = $$1.data(this, NAMESPACE);
if (instance && typeof instance[method] === 'function') {
instance[method](...args);
}
});
}
}
return this.each(function() {
if (!$$1(this).data(NAMESPACE)) {
$$1(this).data(NAMESPACE, new Selective(this, options));
}
});
};
$$1.fn.selective = jQuerySelective;
$$1.selective = $$1.extend({
setDefaults: Selective.setDefaults,
noConflict: function() {
$$1.fn.selective = OtherSelective;
return jQuerySelective;
}
}, info);<|fim▁end|> | } else {
return this.each(function() { |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! Functions for filtering images.
mod median;
pub use self::median::median_filter;
use image::{GrayImage, GenericImage, GenericImageView, ImageBuffer, Luma, Pixel, Primitive};
use integral_image::{column_running_sum, row_running_sum};
use map::{WithChannel, ChannelMap};
use definitions::{Clamp, Image};
use num::Num;
use conv::ValueInto;
use math::cast;
use std::cmp::{min, max};
use std::f32;
/// Convolves an 8bpp grayscale image with a kernel of width (2 * `x_radius` + 1)
/// and height (2 * `y_radius` + 1) whose entries are equal and
/// sum to one. i.e. each output pixel is the unweighted mean of
/// a rectangular region surrounding its corresponding input pixel.
/// We handle locations where the kernel would extend past the image's
/// boundary by treating the image as if its boundary pixels were
/// repeated indefinitely.
// TODO: for small kernels we probably want to do the convolution
// TODO: directly instead of using an integral image.
// TODO: more formats!
pub fn box_filter(image: &GrayImage, x_radius: u32, y_radius: u32) -> Image<Luma<u8>> {
let (width, height) = image.dimensions();
let mut out = ImageBuffer::new(width, height);
if width == 0 || height == 0 {
return out;
}
let kernel_width = 2 * x_radius + 1;
let kernel_height = 2 * y_radius + 1;
let mut row_buffer = vec![0; (width + 2 * x_radius) as usize];
for y in 0..height {
row_running_sum(image, y, &mut row_buffer, x_radius);
let val = row_buffer[(2 * x_radius) as usize] / kernel_width;
unsafe {
out.unsafe_put_pixel(0, y, Luma([val as u8]));
}
for x in 1..width {
// TODO: This way we pay rounding errors for each of the
// TODO: x and y convolutions. Is there a better way?
let u = (x + 2 * x_radius) as usize;
let l = (x - 1) as usize;
let val = (row_buffer[u] - row_buffer[l]) / kernel_width;
unsafe {
out.unsafe_put_pixel(x, y, Luma([val as u8]));
}
}
}
let mut col_buffer = vec![0; (height + 2 * y_radius) as usize];
for x in 0..width {
column_running_sum(&out, x, &mut col_buffer, y_radius);
let val = col_buffer[(2 * y_radius) as usize] / kernel_height;
unsafe {
out.unsafe_put_pixel(x, 0, Luma([val as u8]));
}
for y in 1..height {
let u = (y + 2 * y_radius) as usize;
let l = (y - 1) as usize;
let val = (col_buffer[u] - col_buffer[l]) / kernel_height;
unsafe {
out.unsafe_put_pixel(x, y, Luma([val as u8]));
}
}
}
out
}
/// A 2D kernel, used to filter images via convolution.
pub struct Kernel<'a, K: 'a> {
data: &'a [K],
width: u32,
height: u32,
}
impl<'a, K: Num + Copy + 'a> Kernel<'a, K> {
/// Construct a kernel from a slice and its dimensions. The input slice is
/// in row-major form.
pub fn new(data: &'a [K], width: u32, height: u32) -> Kernel<'a, K> {
assert!(
width * height == data.len() as u32,
format!(
"Invalid kernel len: expecting {}, found {}",
width * height,
data.len()
)
);
Kernel {
data: data,
width: width,
height: height,
}
}
/// Returns 2d correlation of an image. Intermediate calculations are performed
/// at type K, and the results converted to pixel Q via f. Pads by continuity.
pub fn filter<P, F, Q>(&self, image: &Image<P>, mut f: F) -> Image<Q>
where
P: Pixel + 'static,
<P as Pixel>::Subpixel: ValueInto<K>,
Q: Pixel + 'static,
F: FnMut(&mut Q::Subpixel, K) -> (),
{
let (width, height) = image.dimensions();
let mut out = Image::<Q>::new(width, height);
let num_channels = P::channel_count() as usize;
let zero = K::zero();
let mut acc = vec![zero; num_channels];
let (k_width, k_height) = (self.width, self.height);
for y in 0..height {
for x in 0..width {
for k_y in 0..k_height {
let y_p = min(
height + height - 1,
max(height, height + y + k_y - k_height / 2),
) - height;
for k_x in 0..k_width {
let x_p = min(
width + width - 1,
max(width, width + x + k_x - k_width / 2),
) - width;
let (p, k) = unsafe {
(
image.unsafe_get_pixel(x_p, y_p),
*self.data.get_unchecked((k_y * k_width + k_x) as usize),
)
};
accumulate(&mut acc, &p, k);
}
}
let out_channels = out.get_pixel_mut(x, y).channels_mut();
for (a, c) in acc.iter_mut().zip(out_channels.iter_mut()) {
f(c, *a);
*a = zero;
}
}
}
out
}
}
#[inline]
fn gaussian(x: f32, r: f32) -> f32 {
((2.0 * f32::consts::PI).sqrt() * r).recip() * (-x.powi(2) / (2.0 * r.powi(2))).exp()
}
/// Construct a one dimensional float-valued kernel for performing a Gaussian blur
/// with standard deviation sigma.
fn gaussian_kernel_f32(sigma: f32) -> Vec<f32> {
let kernel_radius = (2.0 * sigma).ceil() as usize;
let mut kernel_data = vec![0.0; 2 * kernel_radius + 1];
for i in 0..kernel_radius + 1 {
let value = gaussian(i as f32, sigma);
kernel_data[kernel_radius + i] = value;
kernel_data[kernel_radius - i] = value;
}
kernel_data
}
/// Blurs an image using a Gaussian of standard deviation sigma.
/// The kernel used has type f32 and all intermediate calculations are performed
/// at this type.
// TODO: Integer type kernel, approximations via repeated box filter.
pub fn gaussian_blur_f32<P>(image: &Image<P>, sigma: f32) -> Image<P>
where
P: Pixel + 'static,
<P as Pixel>::Subpixel: ValueInto<f32> + Clamp<f32>,
{
let kernel = gaussian_kernel_f32(sigma);
separable_filter_equal(image, &kernel)
}
/// Returns 2d correlation of view with the outer product of the 1d
/// kernels `h_kernel` and `v_kernel`.
pub fn separable_filter<P, K>(image: &Image<P>, h_kernel: &[K], v_kernel: &[K]) -> Image<P>
where
P: Pixel + 'static,
<P as Pixel>::Subpixel: ValueInto<K> + Clamp<K>,
K: Num + Copy,
{
let h = horizontal_filter(image, h_kernel);
vertical_filter(&h, v_kernel)
}
/// Returns 2d correlation of an image with the outer product of the 1d
/// kernel filter with itself.
pub fn separable_filter_equal<P, K>(image: &Image<P>, kernel: &[K]) -> Image<P>
where
P: Pixel + 'static,
<P as Pixel>::Subpixel: ValueInto<K> + Clamp<K>,
K: Num + Copy,
{
separable_filter(image, kernel, kernel)
}
/// Returns 2d correlation of an image with a 3x3 row-major kernel. Intermediate calculations are
/// performed at type K, and the results clamped to subpixel type S. Pads by continuity.
pub fn filter3x3<P, K, S>(image: &Image<P>, kernel: &[K]) -> Image<ChannelMap<P, S>>
where
P::Subpixel: ValueInto<K>,
S: Clamp<K> + Primitive + 'static,
P: WithChannel<S> + 'static,
K: Num + Copy,
{
let kernel = Kernel::new(kernel, 3, 3);
kernel.filter(image, |channel, acc| *channel = S::clamp(acc))
}
/// Returns horizontal correlations between an image and a 1d kernel.
/// Pads by continuity. Intermediate calculations are performed at
/// type K.
pub fn horizontal_filter<P, K>(image: &Image<P>, kernel: &[K]) -> Image<P>
where
P: Pixel + 'static,
<P as Pixel>::Subpixel: ValueInto<K> + Clamp<K>,
K: Num + Copy,
{
// Don't replace this with a call to Kernel::filter without
// checking the benchmark results. At the time of writing this
// specialised implementation is faster.
let (width, height) = image.dimensions();
let mut out = Image::<P>::new(width, height);
let zero = K::zero();
let mut acc = vec![zero; P::channel_count() as usize];
let k_width = kernel.len() as i32;
// Typically the image side will be much larger than the kernel length.
// In that case we can remove a lot of bounds checks for most pixels.
if k_width >= width as i32 {
for y in 0..height {
for x in 0..width {
for (i, k) in kernel.iter().enumerate() {
let x_unchecked = (x as i32) + i as i32 - k_width / 2;
let x_p = max(0, min(x_unchecked, width as i32 - 1)) as u32;
let p = unsafe { image.unsafe_get_pixel(x_p, y) };
accumulate(&mut acc, &p, *k);
}
let out_channels = out.get_pixel_mut(x, y).channels_mut();
for (a, c) in acc.iter_mut().zip(out_channels.iter_mut()) {
*c = <P as Pixel>::Subpixel::clamp(*a);
*a = zero;
}
}
}
return out;
}
let half_k = k_width / 2;
for y in 0..height {
// Left margin - need to check lower bound only
for x in 0..half_k {
for (i, k) in kernel.iter().enumerate() {
let x_unchecked = (x as i32) + i as i32 - k_width / 2;
let x_p = max(0, x_unchecked) as u32;
let p = unsafe { image.unsafe_get_pixel(x_p, y) };
accumulate(&mut acc, &p, *k);
}
let out_channels = out.get_pixel_mut(x as u32, y).channels_mut();
for (a, c) in acc.iter_mut().zip(out_channels.iter_mut()) {
*c = <P as Pixel>::Subpixel::clamp(*a);
*a = zero;
}
}
// Neither margin - don't need bounds check on either side
for x in half_k..(width as i32 - half_k) {
for (i, k) in kernel.iter().enumerate() {
let x_unchecked = (x as i32) + i as i32 - k_width / 2;
let x_p = x_unchecked as u32;
let p = unsafe { image.unsafe_get_pixel(x_p, y) };
accumulate(&mut acc, &p, *k);
}
let out_channels = out.get_pixel_mut(x as u32, y).channels_mut();
for (a, c) in acc.iter_mut().zip(out_channels.iter_mut()) {
*c = <P as Pixel>::Subpixel::clamp(*a);
*a = zero;
}
}
// Right margin - need to check upper bound only
for x in (width as i32 - half_k)..(width as i32) {
for (i, k) in kernel.iter().enumerate() {
let x_unchecked = (x as i32) + i as i32 - k_width / 2;
let x_p = min(x_unchecked, width as i32 - 1) as u32;
let p = unsafe { image.unsafe_get_pixel(x_p, y) };
accumulate(&mut acc, &p, *k);
}
let out_channels = out.get_pixel_mut(x as u32, y).channels_mut();
for (a, c) in acc.iter_mut().zip(out_channels.iter_mut()) {
*c = <P as Pixel>::Subpixel::clamp(*a);
*a = zero;
}
}
}
out
}
/// Returns horizontal correlations between an image and a 1d kernel.
/// Pads by continuity.
pub fn vertical_filter<P, K>(image: &Image<P>, kernel: &[K]) -> Image<P>
where
P: Pixel + 'static,
<P as Pixel>::Subpixel: ValueInto<K> + Clamp<K>,
K: Num + Copy,
{
// Don't replace this with a call to Kernel::filter without
// checking the benchmark results. At the time of writing this
// specialised implementation is faster.
let (width, height) = image.dimensions();
let mut out = Image::<P>::new(width, height);
let zero = K::zero();
let mut acc = vec![zero; P::channel_count() as usize];
let k_height = kernel.len() as i32;
// Typically the image side will be much larger than the kernel length.
// In that case we can remove a lot of bounds checks for most pixels.
if k_height >= height as i32 {
for y in 0..height {
for x in 0..width {
for (i, k) in kernel.iter().enumerate() {
let y_unchecked = (y as i32) + i as i32 - k_height / 2;
let y_p = max(0, min(y_unchecked, height as i32 - 1)) as u32;
let p = unsafe { image.unsafe_get_pixel(x, y_p) };
accumulate(&mut acc, &p, *k);
}
let out_channels = out.get_pixel_mut(x, y).channels_mut();
for (a, c) in acc.iter_mut().zip(out_channels.iter_mut()) {
*c = <P as Pixel>::Subpixel::clamp(*a);
*a = zero;
}
}
}
return out;
}
let half_k = k_height / 2;
// Top margin - need to check lower bound only
for y in 0..half_k {
for x in 0..width {
for (i, k) in kernel.iter().enumerate() {
let y_unchecked = (y as i32) + i as i32 - k_height / 2;
let y_p = max(0, y_unchecked) as u32;
let p = unsafe { image.unsafe_get_pixel(x, y_p) };
accumulate(&mut acc, &p, *k);
}
let out_channels = out.get_pixel_mut(x, y as u32).channels_mut();
for (a, c) in acc.iter_mut().zip(out_channels.iter_mut()) {
*c = <P as Pixel>::Subpixel::clamp(*a);
*a = zero;
}
}
}
// Neither margin - don't need bounds check on either side
for y in half_k..(height as i32 - half_k) {
for x in 0..width {
for (i, k) in kernel.iter().enumerate() {
let y_unchecked = (y as i32) + i as i32 - k_height / 2;
let y_p = y_unchecked as u32;
let p = unsafe { image.unsafe_get_pixel(x, y_p) };
accumulate(&mut acc, &p, *k);
}
let out_channels = out.get_pixel_mut(x, y as u32).channels_mut();
for (a, c) in acc.iter_mut().zip(out_channels.iter_mut()) {
*c = <P as Pixel>::Subpixel::clamp(*a);
*a = zero;
}
}
}
// Right margin - need to check upper bound only
for y in (height as i32 - half_k)..(height as i32) {
for x in 0..width {
for (i, k) in kernel.iter().enumerate() {
let y_unchecked = (y as i32) + i as i32 - k_height / 2;
let y_p = min(y_unchecked, height as i32 - 1) as u32;
let p = unsafe { image.unsafe_get_pixel(x, y_p) };
accumulate(&mut acc, &p, *k);
}
let out_channels = out.get_pixel_mut(x, y as u32).channels_mut();
for (a, c) in acc.iter_mut().zip(out_channels.iter_mut()) {
*c = <P as Pixel>::Subpixel::clamp(*a);
*a = zero;
}
}
}
out
}
fn accumulate<P, K>(acc: &mut [K], pixel: &P, weight: K)
where
P: Pixel,
<P as Pixel>::Subpixel: ValueInto<K>,
K: Num + Copy,
{
for i in 0..(P::channel_count() as usize) {
acc[i as usize] = acc[i as usize] + cast(pixel.channels()[i]) * weight;
}
}
#[cfg(test)]
mod test {
use super::*;
use utils::{gray_bench_image, rgb_bench_image};
use image::{GenericImage, GrayImage, ImageBuffer, Luma, Rgb};
use definitions::{Clamp, Image};
use image::imageops::blur;
use test::{Bencher, black_box};
use std::cmp::{min, max};
#[test]
fn test_box_filter_handles_empty_images() {
let _ = box_filter(&GrayImage::new(0, 0), 3, 3);
let _ = box_filter(&GrayImage::new(1, 0), 3, 3);
let _ = box_filter(&GrayImage::new(0, 1), 3, 3);
}
#[test]
fn test_box_filter() {
let image = gray_image!(
1, 2, 3;
4, 5, 6;
7, 8, 9);
// For this image we get the same answer from the two 1d
// convolutions as from doing the 2d convolution in one step
// (but we needn't in general, as in the former case we're
// clipping to an integer value twice).
let expected = gray_image!(
2, 3, 3;
4, 5, 5;
6, 7, 7);
assert_pixels_eq!(box_filter(&image, 1, 1), expected);
}
#[bench]
fn bench_box_filter(b: &mut Bencher) {
let image = gray_bench_image(500, 500);
b.iter(|| {
let filtered = box_filter(&image, 7, 7);<|fim▁hole|> #[test]
fn test_separable_filter() {
let image = gray_image!(
1, 2, 3;
4, 5, 6;
7, 8, 9);
// Lazily copying the box_filter test case
let expected = gray_image!(
2, 3, 3;
4, 5, 5;
6, 7, 7);
let kernel = vec![1f32 / 3f32; 3];
let filtered = separable_filter_equal(&image, &kernel);
assert_pixels_eq!(filtered, expected);
}
#[test]
fn test_separable_filter_integer_kernel() {
let image = gray_image!(
1, 2, 3;
4, 5, 6;
7, 8, 9);
let expected = gray_image!(
21, 27, 33;
39, 45, 51;
57, 63, 69);
let kernel = vec![1i32; 3];
let filtered = separable_filter_equal(&image, &kernel);
assert_pixels_eq!(filtered, expected);
}
#[bench]
fn bench_separable_filter(b: &mut Bencher) {
let image = gray_bench_image(300, 300);
let h_kernel = vec![1f32 / 5f32; 5];
let v_kernel = vec![0.1f32, 0.4f32, 0.3f32, 0.1f32, 0.1f32];
b.iter(|| {
let filtered = separable_filter(&image, &h_kernel, &v_kernel);
black_box(filtered);
});
}
/// Reference implementation of horizontal_filter. Used to validate
/// the (presumably faster) actual implementation.
fn horizontal_filter_reference(image: &GrayImage, kernel: &[f32]) -> GrayImage {
let (width, height) = image.dimensions();
let mut out = GrayImage::new(width, height);
for y in 0..height {
for x in 0..width {
let mut acc = 0f32;
for k in 0..kernel.len() {
let mut x_unchecked = x as i32 + k as i32 - (kernel.len() / 2) as i32;
x_unchecked = max(0, x_unchecked);
x_unchecked = min(x_unchecked, width as i32 - 1);
let x_checked = x_unchecked as u32;
let color = image.get_pixel(x_checked, y)[0];
let weight = kernel[k];
acc += color as f32 * weight;
}
let clamped = u8::clamp(acc);
out.put_pixel(x, y, Luma([clamped]));
}
}
out
}
/// Reference implementation of vertical_filter. Used to validate
/// the (presumably faster) actual implementation.
fn vertical_filter_reference(image: &GrayImage, kernel: &[f32]) -> GrayImage {
let (width, height) = image.dimensions();
let mut out = GrayImage::new(width, height);
for y in 0..height {
for x in 0..width {
let mut acc = 0f32;
for k in 0..kernel.len() {
let mut y_unchecked = y as i32 + k as i32 - (kernel.len() / 2) as i32;
y_unchecked = max(0, y_unchecked);
y_unchecked = min(y_unchecked, height as i32 - 1);
let y_checked = y_unchecked as u32;
let color = image.get_pixel(x, y_checked)[0];
let weight = kernel[k];
acc += color as f32 * weight;
}
let clamped = u8::clamp(acc);
out.put_pixel(x, y, Luma([clamped]));
}
}
out
}
macro_rules! test_against_reference_implementation {
($test_name:ident, $under_test:ident, $reference_impl:ident) => {
#[test]
fn $test_name() {
// I think the interesting edge cases here are determined entirely
// by the relative sizes of the kernel and the image side length, so
// I'm just enumerating over small values instead of generating random
// examples via quickcheck.
for height in 0..5 {
for width in 0..5 {
for kernel_length in 0..15 {
let image = gray_bench_image(width, height);
let kernel: Vec<f32> =
(0..kernel_length).map(|i| i as f32 % 1.35).collect();
let expected = $reference_impl(&image, &kernel);
let actual = $under_test(&image, &kernel);
assert_pixels_eq!(actual, expected);
}
}
}
}
}
}
test_against_reference_implementation!(
test_horizontal_filter_matches_reference_implementation,
horizontal_filter,
horizontal_filter_reference
);
test_against_reference_implementation!(
test_vertical_filter_matches_reference_implementation,
vertical_filter,
vertical_filter_reference
);
#[test]
fn test_horizontal_filter() {
let image = gray_image!(
1, 4, 1;
4, 7, 4;
1, 4, 1);
let expected = gray_image!(
2, 2, 2;
5, 5, 5;
2, 2, 2);
let kernel = vec![1f32 / 3f32; 3];
let filtered = horizontal_filter(&image, &kernel);
assert_pixels_eq!(filtered, expected);
}
#[test]
fn test_horizontal_filter_with_kernel_wider_than_image_does_not_panic() {
let image = gray_image!(
1, 4, 1;
4, 7, 4;
1, 4, 1);
let kernel = vec![1f32 / 10f32; 10];
black_box(horizontal_filter(&image, &kernel));
}
#[bench]
fn bench_horizontal_filter(b: &mut Bencher) {
let image = gray_bench_image(500, 500);
let kernel = vec![1f32 / 5f32; 5];
b.iter(|| {
let filtered = horizontal_filter(&image, &kernel);
black_box(filtered);
});
}
#[test]
fn test_vertical_filter() {
let image = gray_image!(
1, 4, 1;
4, 7, 4;
1, 4, 1);
let expected = gray_image!(
2, 5, 2;
2, 5, 2;
2, 5, 2);
let kernel = vec![1f32 / 3f32; 3];
let filtered = vertical_filter(&image, &kernel);
assert_pixels_eq!(filtered, expected);
}
#[test]
fn test_vertical_filter_with_kernel_taller_than_image_does_not_panic() {
let image = gray_image!(
1, 4, 1;
4, 7, 4;
1, 4, 1);
let kernel = vec![1f32 / 10f32; 10];
black_box(vertical_filter(&image, &kernel));
}
#[bench]
fn bench_vertical_filter(b: &mut Bencher) {
let image = gray_bench_image(500, 500);
let kernel = vec![1f32 / 5f32; 5];
b.iter(|| {
let filtered = vertical_filter(&image, &kernel);
black_box(filtered);
});
}
#[test]
fn test_filter3x3_with_results_outside_input_channel_range() {
let kernel: Vec<i32> = vec![
-1, 0, 1,
-2, 0, 2,
-1, 0, 1];
let image = gray_image!(
3, 2, 1;
6, 5, 4;
9, 8, 7);
let expected = gray_image!(type: i16,
-4, -8, -4;
-4, -8, -4;
-4, -8, -4
);
let filtered = filter3x3(&image, &kernel);
assert_pixels_eq!(filtered, expected);
}
#[bench]
fn bench_filter3x3_i32_filter(b: &mut Bencher) {
let image = gray_bench_image(500, 500);
let kernel: Vec<i32> = vec![
-1, 0, 1,
-2, 0, 2,
-1, 0, 1];
b.iter(|| {
let filtered: ImageBuffer<Luma<i16>, Vec<i16>> =
filter3x3::<_, _, i16>(&image, &kernel);
black_box(filtered);
});
}
/// Baseline implementation of Gaussian blur is that provided by image::imageops.
/// We can also use this to validate correctnes of any implementations we add here.
fn gaussian_baseline_rgb<I>(image: &I, stdev: f32) -> Image<Rgb<u8>>
where
I: GenericImage<Pixel = Rgb<u8>> + 'static,
{
blur(image, stdev)
}
#[bench]
#[ignore] // Gives a baseline performance using code from another library
fn bench_baseline_gaussian_stdev_1(b: &mut Bencher) {
let image = rgb_bench_image(100, 100);
b.iter(|| {
let blurred = gaussian_baseline_rgb(&image, 1f32);
black_box(blurred);
});
}
#[bench]
#[ignore] // Gives a baseline performance using code from another library
fn bench_baseline_gaussian_stdev_3(b: &mut Bencher) {
let image = rgb_bench_image(100, 100);
b.iter(|| {
let blurred = gaussian_baseline_rgb(&image, 3f32);
black_box(blurred);
});
}
#[bench]
#[ignore] // Gives a baseline performance using code from another library
fn bench_baseline_gaussian_stdev_10(b: &mut Bencher) {
let image = rgb_bench_image(100, 100);
b.iter(|| {
let blurred = gaussian_baseline_rgb(&image, 10f32);
black_box(blurred);
});
}
#[bench]
fn bench_gaussian_f32_stdev_1(b: &mut Bencher) {
let image = rgb_bench_image(100, 100);
b.iter(|| {
let blurred = gaussian_blur_f32(&image, 1f32);
black_box(blurred);
});
}
#[bench]
fn bench_gaussian_f32_stdev_3(b: &mut Bencher) {
let image = rgb_bench_image(100, 100);
b.iter(|| {
let blurred = gaussian_blur_f32(&image, 3f32);
black_box(blurred);
});
}
#[bench]
fn bench_gaussian_f32_stdev_10(b: &mut Bencher) {
let image = rgb_bench_image(100, 100);
b.iter(|| {
let blurred = gaussian_blur_f32(&image, 10f32);
black_box(blurred);
});
}
}<|fim▁end|> | black_box(filtered);
});
}
|
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin
from simulation.models import SimulationStage, SimulationStageMatch, SimulationStageMatchResult
class SimulationStageAdmin(admin.ModelAdmin):
list_display = ["number", "created_at"]
list_filter = ["created_at"]<|fim▁hole|>class SimulationStageMatchAdmin(admin.ModelAdmin):
list_display = ["stage", "order", "raund",
"cat", "rat", "won", "created_at"]
list_filter = ["stage", "created_at"]
search_fields = ["cat", "rat"]
readonly_fields = ["won", "cat_password", "rat_password", "system_password"]
class SimulationStageMatchResultAdmin(admin.ModelAdmin):
list_display = ["match", "is_caught", "distance", "is_cancelled", "created_at"]
list_filter = ["created_at"]
search_fields = ["match"]
admin.site.register(SimulationStage, SimulationStageAdmin)
admin.site.register(SimulationStageMatch, SimulationStageMatchAdmin)
admin.site.register(SimulationStageMatchResult, SimulationStageMatchResultAdmin)<|fim▁end|> | |
<|file_name|>modalStudent.js<|end_file_name|><|fim▁begin|>$('.registerModalButton').on("click", function () {
var courseId = $(this).data('id'),
courseName = $(this).data('name');
$("#studentModal .registerOkButton").data('courseToRegisterId', courseId);
$("#studentModal #courseNameModal").text(courseName);
});
$(".registerOkButton").on("click", function () {
var id = $("#studentModal .registerOkButton").data('courseToRegisterId'),
token = $('input[name="__RequestVerificationToken"]').val(),
data = {<|fim▁hole|>
$.ajax({
type: "POST",
url: "/Home/AssignToCourse",
data: data,
datatype: "html",
success: function (data) {
$('[data-groupid=' + id + ']')
.clone()
.appendTo('.registerdSection')
.find('button')
.remove();
},
error: function (data) {
$("#errorModal .modal-title").text(data.responseText);
$('#errorModal').modal();
console.log('fail to register');
}
});
});<|fim▁end|> | 'courseId': id,
'__RequestVerificationToken': token
}; |
<|file_name|>eqPattern.py<|end_file_name|><|fim▁begin|>from pattern import Pattern
import itertools
import random
import colorsys
import time<|fim▁hole|>class EqPattern(Pattern):
def __init__(self, meter_color=(255,100,50), background_color=(0,50,255)):
self.meter_r = meter_color[0]
self.meter_g = meter_color[1]
self.meter_b = meter_color[2]
self.bg_r = background_color[0]
self.bg_g = background_color[1]
self.bg_b = background_color[2]
# TODO: delete?
# self.register_param("meter_r", 0, 255, meter_color[0])
# self.register_param("meter_g", 0, 255, meter_color[1])
# self.register_param("meter_b", 0, 255, meter_color[2])
# self.register_param("bg_r", 0, 255, background_color[0])
# self.register_param("bg_g", 0, 255, background_color[1])
# self.register_param("bg_b", 0, 255, background_color[2])
self.register_param("max_hue_shift", 0, 0.5, 0.2)
self.register_param("beat_channel", 0, 6, 2)
self.register_param("max_bpm", 0, 200, 100)
self.register_param("prob_shift", 0, 1, 100)
self.next_shift = time.time()
def meter_color(self):
return (self.meter_r, self.meter_g, self.meter_b)
def background_color(self):
return (self.bg_r, self.bg_g, self.bg_b)
# TODO: put this into utils or something
def hue_shift(self, color, hue_shift):
color_scaled = [x/255.0 for x in color]
hsv = list(colorsys.rgb_to_hsv(color_scaled[0], color_scaled[1], color_scaled[2]))
hsv[0] += hue_shift % 1
return tuple([int(x*255) for x in colorsys.hsv_to_rgb(hsv[0], hsv[1], hsv[2])])
def next_frame(self, octopus, data):
beat_channel = int(round(self.beat_channel))
t = time.time()
if data.beats[beat_channel] and t > self.next_shift:
self.next_shift = t + 60.0/self.max_bpm
shift = self.max_hue_shift*(2*random.random() - 1)
if int(round(random.random())):
self.meter_r, self.meter_g, self.meter_b = self.hue_shift(self.meter_color(), shift)
else:
self.bg_r, self.bg_g, self.bg_b = self.hue_shift(self.background_color(), shift)
meter_color = self.meter_color()
background_color = self.background_color()
eq = itertools.cycle(data.eq)
for tentacle in octopus.tentacles:
level = next(eq)
for led_strip in tentacle.led_strips:
pixel_colors = []
n_meter_pixels = int(len(led_strip.pixels)*float(level))
pixel_colors.extend([meter_color for x in range(n_meter_pixels)])
n_background_pixels = len(led_strip.pixels) - n_meter_pixels
pixel_colors.extend([background_color for x in range(n_background_pixels)])
led_strip.put_pixels(pixel_colors)<|fim▁end|> | |
<|file_name|>request.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {HttpHeaders} from './headers';
import {HttpUrlParams} from './url_params';
/**
* Represents an HTTP request body when serialized for the server.
*
* @experimental
*/
export type HttpSerializedBody = ArrayBuffer | Blob | FormData | string;
/**
* A subset of the allowed values for `XMLHttpRequest.responseType` supported by
* {@link HttpClient}.
*
* @experimental
*/
export type HttpResponseType = 'arraybuffer' | 'blob' | 'json' | 'text';
/**
* A type capturing HTTP methods which don't take request bodies.
*
* @experimental
*/
export type HttpNoBodyMethod = 'DELETE' | 'GET' | 'HEAD' | 'JSONP' | 'OPTIONS';
/**
* A type capturing HTTP methods which do take request bodies.
*
* @experimental
*/
export type HttpBodyMethod = 'POST' | 'PUT' | 'PATCH';
/**
* A type representing all (known) HTTP methods.
*
* @experimental
*/
export type HttpMethod = HttpBodyMethod | HttpNoBodyMethod;
/**
* Construction interface for {@link HttpRequest}s.
*
* All values are optional and will override default values if provided.
*
* @experimental
*/
export interface HttpRequestInit {
headers?: HttpHeaders;
reportProgress?: boolean;
responseType?: HttpResponseType;
withCredentials?: boolean;
}
/**
* Cloning interface for {@link HttpRequestClone}.
*
* All values are optional and will be cloned from the base request if not
* provided.
*
* @experimental
*/
export interface HttpRequestClone<T> extends HttpRequestInit {
body?: T|null;
method?: HttpMethod|string;
url?: string;
setHeaders?: {[name: string]: string | string[]};
}
/**
* Determine whether the given HTTP method may include a body.
*/
function mightHaveBody(method: string): boolean {
switch (method) {
case 'DELETE':
case 'GET':
case 'HEAD':
case 'OPTIONS':
case 'JSONP':
return false;
default:
return true;
}
}
/**
* Safely assert whether the given value is an ArrayBuffer.
*
* In some execution environments ArrayBuffer is not defined.
*/
function isArrayBuffer(value: any): value is ArrayBuffer {
return typeof ArrayBuffer !== 'undefined' && value instanceof ArrayBuffer;
}
/**
* Safely assert whether the given value is a Blob.
*
* In some execution environments Blob is not defined.
*/
function isBlob(value: any): value is Blob {
return typeof Blob !== 'undefined' && value instanceof Blob;
}
/**
* Safely assert whether the given value is a FormData instance.
*
* In some execution environments FormData is not defined.
*/
function isFormData(value: any): value is FormData {
return typeof FormData !== 'undefined' && value instanceof FormData;
}
/**
* An outgoing HTTP request with an optional typed body.
*
* {@link HttpRequest} represents an outgoing request, including URL, method,
* headers, body, and other request configuration options. Instances should be
* assumed to be immutable. To modify a {@link HttpRequest}, the {@link HttpRequest#clone}
* method should be used.
*
* @experimental
*/
export class HttpRequest<T> {
/**
* The request body, or `null` if one isn't set.
*
* Bodies are not enforced to be immutable, as they can include a reference to any
* user-defined data type. However, interceptors should take care to preserve
* idempotence by treating them as such.
*/
readonly body: T|null = null;
/**
* Outgoing headers for this request.
*/
readonly headers: HttpHeaders;
/**
* Whether this request should be made in a way that exposes progress events.
*
* Progress events are expensive (change detection runs on each event) and so
* they should only be requested if the consumer intends to monitor them.
*/
readonly reportProgress: boolean = false;
/**
* Whether this request should be sent with outgoing credentials (cookies).
*/
readonly withCredentials: boolean = false;
/**
* The expected response type of the server.
*
* This is used to parse the response appropriately before returning it to
* the requestee.
*/
readonly responseType: HttpResponseType = 'json';
/**
* The outgoing HTTP request method.
*/
readonly method: string;
constructor(url: string);
constructor(url: string, method: HttpNoBodyMethod, init?: HttpRequestInit);
constructor(url: string, method: HttpBodyMethod, body: T|null, init?: HttpRequestInit);
constructor(url: string, method: HttpMethod|string, body: T|null, init?: HttpRequestInit);
constructor(
public readonly url: string, method?: string, third?: T|HttpRequestInit|null,
fourth?: HttpRequestInit) {
// Assume GET unless otherwise specified, and normalize the request method.
this.method = (method || 'GET').toUpperCase();
// Next, need to figure out which argument holds the HttpRequestInit
// options, if any.
let options: HttpRequestInit|undefined;
// Check whether a body argument is expected. The only valid way to omit
// the body argument is to use a known no-body method like GET.
if (mightHaveBody(this.method) || !!fourth) {
// Body is the third argument, options are the fourth.
this.body = third as T || null;<|fim▁hole|> options = third as HttpRequestInit;
}
// If options have been passed, interpret them.
if (options) {
// Normalize reportProgress and withCredentials.
this.reportProgress = !!options.reportProgress;
this.withCredentials = !!options.withCredentials;
// Override default response type of 'json' if one is provided.
if (!!options.responseType) {
this.responseType = options.responseType;
}
// Override headers if they're provided.
if (!!options.headers) {
this.headers = options.headers;
}
}
// If no headers have been passed in, construct a new HttpHeaders instance.
if (!this.headers) {
this.headers = new HttpHeaders();
}
// In any case, seal the headers so no changes are allowed.
this.headers.seal();
}
/**
* Transform the free-form body into a serialized format suitable for
* transmission to the server.
*/
serializeBody(): HttpSerializedBody|null {
// If no body is present, no need to serialize it.
if (this.body === null) {
return null;
}
// Check whether the body is already in a serialized form. If so,
// it can just be returned directly.
if (isArrayBuffer(this.body) || isBlob(this.body) || isFormData(this.body) ||
typeof this.body === 'string') {
return this.body;
}
// Check whether the body is an object or array, and serialize with JSON if so.
if (typeof this.body === 'object' || typeof this.body === 'boolean' ||
Array.isArray(this.body)) {
return JSON.stringify(this.body);
}
// Fall back on toString() for everything else.
return (this.body as any).toString();
}
/**
* Examine the body and attempt to infer an appropriate MIME type
* for it.
*
* If no such type can be inferred, this method will return `null`.
*/
detectContentTypeHeader(): string|null {
// An empty body has no content type.
if (this.body === null) {
return null;
}
// FormData instances are URL encoded on the wire.
if (isFormData(this.body)) {
return 'application/x-www-form-urlencoded;charset=UTF-8';
}
// Blobs usually have their own content type. If it doesn't, then
// no type can be inferred.
if (isBlob(this.body)) {
return this.body.type || null;
}
// Array buffers have unknown contents and thus no type can be inferred.
if (isArrayBuffer(this.body)) {
return null;
}
// Technically, strings could be a form of JSON data, but it's safe enough
// to assume they're plain strings.
if (typeof this.body === 'string') {
return 'text/plain';
}
// Arrays, objects, and numbers will be encoded as JSON.
if (typeof this.body === 'object' || typeof this.body === 'number' ||
Array.isArray(this.body)) {
return 'application/json';
}
// No type could be inferred.
return null;
}
clone(): HttpRequest<T>;
clone(update: HttpRequestInit): HttpRequest<T>;
clone<V>(update: HttpRequestClone<V>): HttpRequest<V>;
clone(update: HttpRequestClone<any> = {}): HttpRequest<any> {
// For method, url, and responseType, take the current value unless
// it is overridden in the update hash.
const method = update.method || this.method;
const url = update.url || this.url;
const responseType = update.responseType || this.responseType;
// The body is somewhat special - a `null` value in update.body means
// whatever current body is present is being overridden with an empty
// body, whereas an `undefined` value in update.body implies no
// override.
const body = (update.body !== undefined) ? update.body : this.body;
// Carefully handle the boolean options to differentiate between
// `false` and `undefined` in the update args.
const withCredentials =
(update.withCredentials !== undefined) ? update.withCredentials : this.withCredentials;
const reportProgress =
(update.reportProgress !== undefined) ? update.reportProgress : this.reportProgress;
// Headers may need to be cloned later if they're sealed, but being
// appended to.
let headers = update.headers || this.headers;
// Check whether the caller has asked to add headers.
if (update.setHeaders !== undefined) {
// Adding extra headers. If the current headers are sealed, they need to
// be cloned to unseal them first.
if (headers.sealed) {
headers = headers.clone();
}
// Set every requested header.
Object.keys(update.setHeaders)
.forEach(header => headers.set(header, update.setHeaders ![header]));
}
// Finally, construct the new HttpRequest using the pieces from above.
return new HttpRequest(
url, method, body, {
headers, reportProgress, responseType, withCredentials,
});
}
}<|fim▁end|> | options = fourth;
} else {
// No body required, options are the third argument. The body stays null. |
<|file_name|>definition.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 by Ecreall under licence AGPL terms
# available on http://www.gnu.org/licenses/agpl.html
# licence: AGPL
# author: Amen Souissi
from dace.processdefinition.processdef import ProcessDefinition
from dace.processdefinition.activitydef import ActivityDefinition
from dace.processdefinition.gatewaydef import (
ExclusiveGatewayDefinition,
ParallelGatewayDefinition)
from dace.processdefinition.transitiondef import TransitionDefinition
from dace.processdefinition.eventdef import (
StartEventDefinition,
EndEventDefinition)
from dace.objectofcollaboration.services.processdef_container import (
process_definition)<|fim▁hole|>from pontus.core import VisualisableElement
from .behaviors import (
Addapplications,
AddFacebookApplication,
AddTwitterApplication,
AddGoogleApplication,
SeeApplication,
EditApplication,
RemoveApplication
)
from lac import _
@process_definition(name='socialapplicationsprocess',
id='socialapplicationsprocess')
class SocialApplicationsProcess(ProcessDefinition, VisualisableElement):
isUnique = True
def __init__(self, **kwargs):
super(SocialApplicationsProcess, self).__init__(**kwargs)
self.title = _('Social applications process')
self.description = _('Social applications process')
def _init_definition(self):
self.defineNodes(
start = StartEventDefinition(),
pg = ParallelGatewayDefinition(),
addapplication = ActivityDefinition(contexts=[Addapplications, AddFacebookApplication,
AddTwitterApplication, AddGoogleApplication],
description=_("Add a social application"),
title=_("Add a social application"),
groups=[]),
seeapplication = ActivityDefinition(contexts=[SeeApplication],
description=_("See the application"),
title=_("See the application"),
groups=[]),
editapplication = ActivityDefinition(contexts=[EditApplication],
description=_("Edit the application"),
title=_("Edit"),
groups=[]),
removeapplication = ActivityDefinition(contexts=[RemoveApplication],
description=_("Remove the application"),
title=_("Remove"),
groups=[]),
eg = ExclusiveGatewayDefinition(),
end = EndEventDefinition(),
)
self.defineTransitions(
TransitionDefinition('start', 'pg'),
TransitionDefinition('pg', 'addapplication'),
TransitionDefinition('addapplication', 'eg'),
TransitionDefinition('pg', 'seeapplication'),
TransitionDefinition('seeapplication', 'eg'),
TransitionDefinition('pg', 'editapplication'),
TransitionDefinition('editapplication', 'eg'),
TransitionDefinition('pg', 'removeapplication'),
TransitionDefinition('removeapplication', 'eg'),
TransitionDefinition('eg', 'end'),
)<|fim▁end|> | |
<|file_name|>monomorphized-callees-with-ty-params-3314.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.<|fim▁hole|>
trait Serializer {
}
trait Serializable {
fn serialize<S:Serializer>(&self, s: S);
}
impl Serializable for int {
fn serialize<S:Serializer>(&self, _s: S) { }
}
struct F<A> { a: A }
impl<A:Serializable> Serializable for F<A> {
fn serialize<S:Serializer>(&self, s: S) {
self.a.serialize(s);
}
}
impl Serializer for int {
}
pub fn main() {
let foo = F { a: 1 };
foo.serialize(1);
let bar = F { a: F {a: 1 } };
bar.serialize(2);
}<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from .module1 import *
from .module4 import *<|fim▁hole|>import sys<|fim▁end|> | |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2012 OpenPlans
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of<|fim▁hole|># MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
from django.conf.urls.defaults import patterns, url
js_info_dict = {
'packages': ('geonode.maps',),
}
urlpatterns = patterns('geonode.maps.views',
url(r'^$', 'map_list', name='maps_browse'),
url(r'^tag/(?P<slug>[-\w]+?)/$', 'maps_tag', name='maps_browse_tag'),
url(r'^new$', 'new_map', name="new_map"),
url(r'^new/data$', 'new_map_json', name='new_map_json'),
url(r'^(?P<mapid>\d+)$', 'map_detail', name='map_detail'),
url(r'^(?P<mapid>\d+)/view$', 'map_view', name='map_view'),
url(r'^(?P<mapid>\d+)/data$', 'map_json', name='map_json'),
url(r'^(?P<mapid>\d+)/download$', 'map_download', name='map_download'),
url(r'^(?P<mapid>\d+)/wmc$', 'map_wmc', name='map_wmc'),
url(r'^(?P<mapid>\d+)/remove$', 'map_remove', name='map_remove'),
url(r'^(?P<mapid>\d+)/metadata$', 'map_metadata', name='map_metadata'),
url(r'^(?P<mapid>\d+)/embed$', 'map_embed', name='map_embed'),
url(r'^(?P<mapid>\d+)/permissions$', 'map_permissions', name='map_permissions'),
url(r'^(?P<mapid>\d+)/thumbnail$', 'map_thumbnail', name='map_thumbnail'),
url(r'^check/$', 'map_download_check', name='map_download_check'),
url(r'^embed/$', 'map_embed', name='map_embed'),
url(r'^(?P<layername>[^/]*)/attributes', 'maplayer_attributes', name='maplayer_attributes'),
#url(r'^change-poc/(?P<ids>\w+)$', 'change_poc', name='maps_change_poc'),
)<|fim▁end|> | |
<|file_name|>config.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""Configure batch3dfier with the input data."""
import os.path
from subprocess import call
from shapely.geometry import shape
from shapely import geos
from psycopg2 import sql
import fiona
def call_3dfier(db, tile, schema_tiles,
pc_file_name, pc_tile_case, pc_dir,
table_index_pc, fields_index_pc,
table_index_footprint, fields_index_footprint, uniqueid,
extent_ewkb, clip_prefix, prefix_tile_footprint,
yml_dir, tile_out, output_format, output_dir,
path_3dfier, thread):
"""Call 3dfier with the YAML config created by yamlr().
Note
----
For the rest of the parameters see batch3dfier_config.yml.
Parameters
----------
db : db Class instance
tile : str
Name of of the 2D tile.
schema_tiles : str
Schema of the footprint tiles.
pc_file_name : str
Naming convention for the pointcloud files. See 'dataset_name' in batch3dfier_config.yml.
pc_tile_case : str
How the string matching is done for pc_file_name. See 'tile_case' in batch3dfier_config.yml.
pc_dir : str
Directory of the pointcloud files. See 'dataset_dir' in batch3dfier_config.yml.
thread : str
Name/ID of the active thread.
extent_ewkb : str
EWKB representation of 'extent' in batch3dfier_config.yml.
clip_prefix : str
Prefix for naming the clipped/united views. This value shouldn't be a substring of the pointcloud file names.
prefix_tile_footprint : str or None
Prefix prepended to the footprint tile view names. If None, the views are named as
the values in fields_index_fooptrint['unit_name'].
Returns
-------
list
The tiles that are skipped because no corresponding pointcloud file
was found in 'dataset_dir' (YAML)
"""
pc_tiles = find_pc_tiles(db, table_index_pc, fields_index_pc,
table_index_footprint, fields_index_footprint,
extent_ewkb, tile_footprint=tile,
prefix_tile_footprint=prefix_tile_footprint)
pc_path = find_pc_files(pc_tiles, pc_dir, pc_file_name, pc_tile_case)
# prepare output file name
if not tile_out:
tile_out = tile.replace(clip_prefix, '', 1)
# Call 3dfier ------------------------------------------------------------
if pc_path:
# Needs a YAML per thread so one doesn't overwrite it while the other
# uses it
yml_name = thread + "_config.yml"
yml_path = os.path.join(yml_dir, yml_name)
config = yamlr(dbname=db.dbname, host=db.host, user=db.user,
pw=db.password, schema_tiles=schema_tiles,
bag_tile=tile, pc_path=pc_path,
output_format=output_format, uniqueid=uniqueid)
# Write temporary config file
try:
with open(yml_path, "w") as text_file:
text_file.write(config)
except BaseException:
print("Error: cannot write _config.yml")
# Prep output file name
if "obj" in output_format.lower():
o = tile_out + ".obj"
output_path = os.path.join(output_dir, o)
elif "csv" in output_format.lower():
o = tile_out + ".csv"
output_path = os.path.join(output_dir, o)
else:
output_path = os.path.join(output_dir, tile_out)
# Run 3dfier
command = (path_3dfier + " {yml} -o {out}").format(
yml=yml_path, out=output_path)
try:
call(command, shell=True)
except BaseException:
print("\nCannot run 3dfier on tile " + tile)
tile_skipped = tile
else:
print(
"\nPointcloud file(s) " +
str(pc_tiles) +
" not available. Skipping tile.\n")
tile_skipped = tile
return({'tile_skipped': tile_skipped,
'out_path': None})
return({'tile_skipped': None,
'out_path': output_path})
def yamlr(dbname, host, user, pw, schema_tiles,
bag_tile, pc_path, output_format, uniqueid):
"""Parse the YAML config file for 3dfier.
Parameters
----------
See batch3dfier_config.yml.
Returns
-------
string
the YAML config file for 3dfier
"""
pc_dataset = ""
if len(pc_path) > 1:
for p in pc_path:
pc_dataset += "- " + p + "\n" + " "
else:
pc_dataset += "- " + pc_path[0]
# !!! Do not correct the indentation of the config template, otherwise it
# results in 'YAML::TypedBadConversion<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > >'
# because every line is indented as here
config = """
input_polygons:
- datasets:
- "PG:dbname={dbname} host={host} user={user} password={pw} schemas={schema_tiles} tables={bag_tile}"
uniqueid: {uniqueid}
lifting: Building
lifting_options:
Building:
height_roof: percentile-90
height_floor: percentile-10
lod: 1
input_elevation:
- datasets:
{pc_path}
omit_LAS_classes:
thinning: 0
options:
building_radius_vertex_elevation: 2.0
radius_vertex_elevation: 1.0
threshold_jump_edges: 0.5
output:
format: {output_format}
building_floor: true
vertical_exaggeration: 0
""".format(dbname=dbname,
host=host,
user=user,
pw=pw,
schema_tiles=schema_tiles,
bag_tile=bag_tile,
uniqueid=uniqueid,<|fim▁hole|>
def find_pc_files(pc_tiles, pc_dir, pc_file_name, pc_tile_case):
"""Find pointcloud files in the file system when given a list of pointcloud tile names
"""
# Prepare AHN file names -------------------------------------------------
if pc_tile_case == "upper":
tiles = [pc_file_name.format(tile=t.upper()) for t in pc_tiles]
elif pc_tile_case == "lower":
tiles = [pc_file_name.format(tile=t.lower()) for t in pc_tiles]
elif pc_tile_case == "mixed":
tiles = [pc_file_name.format(tile=t) for t in pc_tiles]
else:
raise "Please provide one of the allowed values for pc_tile_case."
# use the tile list in tiles to parse the pointcloud file names
pc_path = [os.path.join(pc_dir, pc_tile) for pc_tile in tiles]
if all([os.path.isfile(p) for p in pc_path]):
return(pc_path)
else:
return(None)
def find_pc_tiles(db, table_index_pc, fields_index_pc,
table_index_footprint=None, fields_index_footprint=None,
extent_ewkb=None, tile_footprint=None,
prefix_tile_footprint=None):
"""Find pointcloud tiles in tile index that intersect the extent or the footprint tile.
Parameters
----------
prefix_tile_footprint : str or None
Prefix prepended to the footprint tile view names. If None, the views are named as
the values in fields_index_fooptrint['unit_name'].
"""
if extent_ewkb:
tiles = get_2Dtiles(db, table_index_pc, fields_index_pc, extent_ewkb)
else:
schema_pc_q = sql.Identifier(table_index_pc['schema'])
table_pc_q = sql.Identifier(table_index_pc['table'])
field_pc_geom_q = sql.Identifier(fields_index_pc['geometry'])
field_pc_unit_q = sql.Identifier(fields_index_pc['unit_name'])
schema_ftpr_q = sql.Identifier(table_index_footprint['schema'])
table_ftpr_q = sql.Identifier(table_index_footprint['table'])
field_ftpr_geom_q = sql.Identifier(fields_index_footprint['geometry'])
field_ftpr_unit_q = sql.Identifier(fields_index_footprint['unit_name'])
if prefix_tile_footprint:
tile_footprint = tile_footprint.replace(
prefix_tile_footprint, '', 1)
tile_q = sql.Literal(tile_footprint)
query = sql.SQL("""
SELECT
{table_pc}.{field_pc_unit}
FROM
{schema_pc}.{table_pc},
{schema_ftpr}.{table_ftpr}
WHERE
{table_ftpr}.{field_ftpr_unit} = {tile}
AND st_intersects(
{table_pc}.{field_pc_geom},
{table_ftpr}.{field_ftpr_geom}
);
""").format(table_pc=table_pc_q,
field_pc_unit=field_pc_unit_q,
schema_pc=schema_pc_q,
schema_ftpr=schema_ftpr_q,
table_ftpr=table_ftpr_q,
field_ftpr_unit=field_ftpr_unit_q,
tile=tile_q,
field_pc_geom=field_pc_geom_q,
field_ftpr_geom=field_ftpr_geom_q)
resultset = db.getQuery(query)
tiles = [tile[0] for tile in resultset]
return(tiles)
def extent_to_ewkb(db, table_index, file):
"""Reads a polygon from a file and returns its EWKB.
I didn't find a simple way to safely get SRIDs from the input geometry
with Shapely, therefore it is obtained from the database and the CRS of the
polygon is assumed to be the same as of the tile indexes.
Parameters
----------
db : db Class instance
table_index : dict
{'schema' : str, 'table' : str} of the table of tile index.
file : str
Path to the polygon for clipping the input.
Must be in the same CRS as the table_index.
Returns
-------
[Shapely polygon, EWKB str]
"""
schema = sql.Identifier(table_index['schema'])
table = sql.Identifier(table_index['table'])
query = sql.SQL("""SELECT st_srid(geom) AS srid
FROM {schema}.{table}
LIMIT 1;""").format(schema=schema, table=table)
srid = db.getQuery(query)[0][0]
assert srid is not None
# Get clip polygon and set SRID
with fiona.open(file, 'r') as src:
poly = shape(src[0]['geometry'])
# Change a the default mode to add this, if SRID is set
geos.WKBWriter.defaults['include_srid'] = True
# set SRID for polygon
geos.lgeos.GEOSSetSRID(poly._geom, srid)
ewkb = poly.wkb_hex
return([poly, ewkb])
def get_2Dtiles(db, table_index, fields_index, ewkb):
"""Returns a list of tiles that overlap the output extent.
Parameters
----------
db : db Class instance
table_index : dict
{'schema' : str, 'table' : str} of the table of tile index.
fields_index : dict
{'primary_key' : str, 'geometry' : str, 'unit_name' : str}
primary_key: Name of the primary_key field in table_index.
geometry: Name of the geometry field in table_index.
unit: Name of the field in table_index that contains the index unit names.
ewkb : str
EWKB representation of a polygon.
Returns
-------
[tile IDs]
Tiles that are intersected by the polygon that is provided in 'extent' (YAML).
"""
schema = sql.Identifier(table_index['schema'])
table = sql.Identifier(table_index['table'])
field_idx_geom_q = sql.Identifier(fields_index['geometry'])
field_idx_unit_q = sql.Identifier(fields_index['unit_name'])
ewkb_q = sql.Literal(ewkb)
# TODO: user input for a.unit
query = sql.SQL("""
SELECT {table}.{field_idx_unit}
FROM {schema}.{table}
WHERE st_intersects({table}.{field_idx_geom}, {ewkb}::geometry);
""").format(schema=schema,
table=table,
field_idx_unit=field_idx_unit_q,
field_idx_geom=field_idx_geom_q,
ewkb=ewkb_q)
resultset = db.getQuery(query)
tiles = [tile[0] for tile in resultset]
print("Nr. of tiles in clip extent: " + str(len(tiles)))
return(tiles)
def get_2Dtile_area(db, table_index):
"""Get the area of a 2D tile.
Note
----
Assumes that all tiles have equal area. Area is in units of the tile CRS.
Parameters
----------
db : db Class instance
table_index : list of str
{'schema' : str, 'table' : str} of the table of tile index.
Returns
-------
float
"""
schema = sql.Identifier(table_index['schema'])
table = sql.Identifier(table_index['table'])
query = sql.SQL("""
SELECT public.st_area(geom) AS area
FROM {schema}.{table}
LIMIT 1;
""").format(schema=schema, table=table)
area = db.getQuery(query)[0][0]
return(area)
def get_2Dtile_views(db, schema_tiles, tiles):
"""Get View names of the 2D tiles. It tries to find views in schema_tiles
that contain the respective tile ID in their name.
Parameters
----------
db : db Class instance
schema_tiles: str
Name of the schema where the 2D tile views are stored.
tiles : list
Tile IDs
Returns
-------
list
Name of the view that contain the tile ID as substring.
"""
# Get View names for the tiles
t = ["%" + str(tile) + "%" for tile in tiles]
t = sql.Literal(t)
schema_tiles = sql.Literal(schema_tiles)
query = sql.SQL("""SELECT table_name
FROM information_schema.views
WHERE table_schema = {}
AND table_name LIKE any({});
""").format(schema_tiles, t)
resultset = db.getQuery(query)
tile_views = [tile[0] for tile in resultset]
return(tile_views)
def clip_2Dtiles(db, user_schema, schema_tiles, tiles, poly, clip_prefix,
fields_view):
"""Creates views for the clipped tiles.
Parameters
----------
db : db Class instance
user_schema: str
schema_tiles : str
tiles : list
poly : Shapely polygon
clip_prefix : str
Returns
-------
list
Name of the views of the clipped tiles.
"""
user_schema = sql.Identifier(user_schema)
schema_tiles = sql.Identifier(schema_tiles)
tiles_clipped = []
fields_all = fields_view['all']
field_geom_q = sql.Identifier(fields_view['geometry'])
for tile in tiles:
t = clip_prefix + tile
tiles_clipped.append(t)
view = sql.Identifier(t)
tile_view = sql.Identifier(tile)
fields_q = parse_sql_select_fields(tile, fields_all)
wkb = sql.Literal(poly.wkb_hex)
query = sql.SQL("""
CREATE OR REPLACE VIEW {user_schema}.{view} AS
SELECT
{fields}
FROM
{schema_tiles}.{tile_view}
WHERE
st_within({tile_view}.{geom}, {wkb}::geometry)"""
).format(user_schema=user_schema,
schema_tiles=schema_tiles,
view=view,
fields=fields_q,
tile_view=tile_view,
geom=field_geom_q,
wkb=wkb)
db.sendQuery(query)
try:
db.conn.commit()
print(
str(
len(tiles_clipped)) +
" views with prefix '{}' are created in schema {}.".format(
clip_prefix,
user_schema))
except BaseException:
print("Cannot create view {user_schema}.{clip_prefix}{tile}".format(
schema_tiles=schema_tiles, clip_prefix=clip_prefix))
db.conn.rollback()
return(tiles_clipped)
def union_2Dtiles(db, user_schema, tiles_clipped, clip_prefix, fields_view):
"""Union the clipped tiles into a single view.
Parameters
----------
db : db Class instance
user_schema : str
tiles_clipped : list
clip_prefix : str
Returns
-------
str
Name of the united view.
"""
# Check if there are enough tiles to unite
assert len(tiles_clipped) > 1, "Need at least 2 tiles for union"
user_schema = sql.Identifier(user_schema)
u = "{clip_prefix}union".format(clip_prefix=clip_prefix)
union_view = sql.Identifier(u)
sql_query = sql.SQL("CREATE OR REPLACE VIEW {user_schema}.{view} AS ").format(
user_schema=user_schema, view=union_view)
fields_all = fields_view['all']
for tile in tiles_clipped[:-1]:
view = sql.Identifier(tile)
fields_q = parse_sql_select_fields(tile, fields_all)
sql_subquery = sql.SQL("""SELECT {fields}
FROM {user_schema}.{view}
UNION ALL """).format(fields=fields_q,
user_schema=user_schema,
view=view)
sql_query = sql_query + sql_subquery
# The last statement
tile = tiles_clipped[-1]
view = sql.Identifier(tile)
fields_q = parse_sql_select_fields(tile, fields_all)
sql_subquery = sql.SQL("""SELECT {fields}
FROM {user_schema}.{view};
""").format(fields=fields_q,
user_schema=user_schema,
view=view)
sql_query = sql_query + sql_subquery
db.sendQuery(sql_query)
try:
db.conn.commit()
print("View {} created in schema {}.".format(u, user_schema))
except BaseException:
print("Cannot create view {}.{}".format(user_schema, u))
db.conn.rollback()
return(False)
return(u)
def get_view_fields(db, user_schema, tile_views):
"""Get the fields in a 2D tile view
Parameters
----------
tile_views : list of str
Returns
-------
{'all' : list, 'geometry' : str}
"""
if len(tile_views) > 0:
schema_q = sql.Literal(user_schema)
view_q = sql.Literal(tile_views[0])
resultset = db.getQuery(sql.SQL("""
SELECT
column_name
FROM
information_schema.columns
WHERE
table_schema = {schema}
AND table_name = {view};
""").format(schema=schema_q,
view=view_q))
f = [field[0] for field in resultset]
geom_res = db.getQuery(sql.SQL("""
SELECT
f_geometry_column
FROM
public.geometry_columns
WHERE
f_table_schema = {schema}
AND f_table_name = {view};
""").format(schema=schema_q,
view=view_q))
f_geom = geom_res[0][0]
fields = {}
fields['all'] = f
fields['geometry'] = f_geom
return(fields)
else:
return(None)
def parse_sql_select_fields(table, fields):
"""Parses a list of field names into "table"."field" to insert into a SELECT ... FROM table
Parameters
----------
fields : list of str
Returns
-------
psycopg2.sql.Composable
"""
s = []
for f in fields:
s.append(sql.SQL('.').join([sql.Identifier(table), sql.Identifier(f)]))
sql_fields = sql.SQL(', ').join(s)
return(sql_fields)
def drop_2Dtiles(db, user_schema, views_to_drop):
"""Drops Views in a given schema.
Note
----
Used for dropping the views created by clip_2Dtiles() and union_2Dtiles().
Parameters
----------
db : db Class instance
user_schema : str
views_to_drop : list
Returns
-------
bool
"""
user_schema = sql.Identifier(user_schema)
for view in views_to_drop:
view = sql.Identifier(view)
query = sql.SQL("DROP VIEW IF EXISTS {user_schema}.{view} CASCADE;").format(
user_schema=user_schema, view=view)
db.sendQuery(query)
try:
db.conn.commit()
print("Dropped {} in schema {}.".format(views_to_drop, user_schema))
# sql.Identifier("tile_index").as_string(dbs.conn)
return(True)
except BaseException:
print("Cannot drop views ", views_to_drop)
db.conn.rollback()
return(False)<|fim▁end|> | pc_path=pc_dataset,
output_format=output_format)
return(config)
|
<|file_name|>main.cpp<|end_file_name|><|fim▁begin|><|fim▁hole|>/*
This file is a part of the Depecher (Telegram client)
Copyright (C) 2017 Alexandr Akulich <akulichalexander@gmail.com>
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
#include <QGuiApplication>
#include <QQuickView>
#include <sailfishapp.h>
int main(int argc, char *argv[])
{
QScopedPointer<QGuiApplication> application(SailfishApp::application(argc, argv));
application->setApplicationName(QStringLiteral("depecher"));
application->setApplicationDisplayName(QStringLiteral("Depecher"));
QScopedPointer<QQuickView> view(SailfishApp::createView());
view->setSource(SailfishApp::pathTo(QStringLiteral("qml/main.qml")));
view->setTitle(application->applicationDisplayName());
view->setResizeMode(QQuickView::SizeRootObjectToView);
view->show();
return application->exec();
}<|fim▁end|> | |
<|file_name|>eachValue.next.js<|end_file_name|><|fim▁begin|>/**
*
* @function
* @param {Array|arraylike} value
* @param {Function} cmd<|fim▁hole|> * @returns {?}
*/
export default function eachValue(value, cmd, context, keepReverse) {
if (value === undefined || value === null) return undefined;
const size = (0 | value.length) - 1;
for (let index = size; index > -1; index -= 1) {
const i = keepReverse ? index : size - index;
const item = value[i];
const resolve = cmd.call(context || item, item, i, value, i);
if (resolve === undefined === false) {
return resolve;
}
}
return undefined;
}<|fim▁end|> | * @param {any} context |
<|file_name|>CollectionEditor.js<|end_file_name|><|fim▁begin|>// Copyright 2016 The Oppia Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* @fileoverview Primary controller for the collection editor page.
*/
// TODO(bhenning): These constants should be provided by the backend.
oppia.constant(
'COLLECTION_DATA_URL_TEMPLATE', '/collection_handler/data/<collection_id>');
oppia.constant(
'WRITABLE_COLLECTION_DATA_URL_TEMPLATE',
'/collection_editor_handler/data/<collection_id>');
oppia.constant(
'COLLECTION_RIGHTS_URL_TEMPLATE',
'/collection_editor_handler/rights/<collection_id>');
oppia.controller('CollectionEditor', ['$scope',<|fim▁hole|> 'CollectionObjectFactory', 'SkillListObjectFactory',
'CollectionUpdateService', 'UndoRedoService', 'alertsService', function(
$scope, WritableCollectionBackendApiService,
CollectionRightsBackendApiService, CollectionObjectFactory,
SkillListObjectFactory, CollectionUpdateService, UndoRedoService,
alertsService) {
$scope.collection = null;
$scope.collectionId = GLOBALS.collectionId;
$scope.collectionSkillList = SkillListObjectFactory.create([]);
$scope.isPublic = GLOBALS.isPublic;
// Load the collection to be edited.
WritableCollectionBackendApiService.fetchWritableCollection(
$scope.collectionId).then(
function(collectionBackendObject) {
$scope.collection = CollectionObjectFactory.create(
collectionBackendObject);
$scope.collectionSkillList.setSkills(collectionBackendObject.skills);
}, function(error) {
alertsService.addWarning(
error || 'There was an error loading the collection.');
});
$scope.getChangeListCount = function() {
return UndoRedoService.getChangeCount();
};
// To be used after mutating the prerequisite and/or acquired skill lists.
$scope.updateSkillList = function() {
$scope.collectionSkillList.clearSkills();
$scope.collectionSkillList.addSkillsFromSkillList(
$scope.collection.getSkillList());
$scope.collectionSkillList.sortSkills();
};
// An explicit save is needed to push all changes to the backend at once
// because some likely working states of the collection will cause
// validation errors when trying to incrementally save them.
$scope.saveCollection = function(commitMessage) {
// Don't attempt to save the collection if there are no changes pending.
if (!UndoRedoService.hasChanges()) {
return;
}
WritableCollectionBackendApiService.updateCollection(
$scope.collection.getId(), $scope.collection.getVersion(),
commitMessage, UndoRedoService.getCommittableChangeList()).then(
function(collectionBackendObject) {
$scope.collection = CollectionObjectFactory.create(
collectionBackendObject);
$scope.collectionSkillList.setSkills(collectionBackendObject.skills);
UndoRedoService.clearChanges();
}, function(error) {
alertsService.addWarning(
error || 'There was an error updating the collection.');
});
};
$scope.publishCollection = function() {
// TODO(bhenning): Publishing should not be doable when the exploration
// may have errors/warnings. Publish should only show up if the collection
// is private. This also needs a confirmation of destructive action since
// it is not reversible.
CollectionRightsBackendApiService.setCollectionPublic(
$scope.collectionId, $scope.collection.getVersion()).then(
function() {
// TODO(bhenning): There should be a scope-level rights object used,
// instead. The rights object should be loaded with the collection.
$scope.isPublic = true;
}, function() {
alertsService.addWarning(
'There was an error when publishing the collection.');
});
};
}]);<|fim▁end|> | 'WritableCollectionBackendApiService', 'CollectionRightsBackendApiService', |
<|file_name|>nxOMSAgentNPMConfig.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# ===================================
# Copyright (c) Microsoft Corporation. All rights reserved.
# See license.txt for license information.
# ===================================
import socket
import os
import sys
import imp
import md5
import sha
import codecs
import base64
import platform
import shutil
protocol = imp.load_source('protocol', '../protocol.py')
nxDSCLog = imp.load_source('nxDSCLog', '../nxDSCLog.py')
LG = nxDSCLog.DSCLog
# Paths
CONFIG_PATH = '/etc/opt/microsoft/omsagent/conf/'
SERVER_ADDRESS = '/var/opt/microsoft/omsagent/npm_state/npmdagent.sock'
DEST_FILE_NAME = 'npmd_agent_config.xml'
PLUGIN_PATH = '/opt/microsoft/omsagent/plugin/'
PLUGIN_CONF_PATH = '/etc/opt/microsoft/omsagent/conf/omsagent.d/'
RESOURCE_MODULE_PATH = '/opt/microsoft/omsconfig/modules/nxOMSAgentNPMConfig/DSCResources/MSFT_nxOMSAgentNPMConfigResource/NPM/'
DSC_RESOURCE_VERSION_PATH = '/opt/microsoft/omsconfig/modules/nxOMSAgentNPMConfig/VERSION'
AGENT_RESOURCE_VERSION_PATH = '/var/opt/microsoft/omsagent/npm_state/npm_version'
DSC_X64_AGENT_PATH = 'Agent/64/'
DSC_X86_AGENT_PATH = 'Agent/32/'
DSC_PLUGIN_PATH = 'Plugin/plugin/'
DSC_PLUGIN_CONF_PATH = 'Plugin/conf/'
AGENT_BINARY_PATH = '/opt/microsoft/omsagent/plugin/'
AGENT_SCRIPT_PATH = '/opt/microsoft/omsconfig/Scripts/NPMAgentBinaryCap.sh'
# Constants
X64 = '64bit'
AGENT_BINARY_NAME = 'npmd_agent'
def enum(**enums):
return type('Enum', (), enums)
Commands = enum(LogNPM = 'ErrorLog', StartNPM = 'StartNPM', StopNPM = 'StopNPM', Config = 'Config', Purge = 'Purge')
LogType = enum(Error = 'ERROR', Info = 'INFO')
class INPMDiagnosticLog:
def log(self):
pass
class NPMDiagnosticLogUtil(INPMDiagnosticLog):
def log(self, logType, logString):
# Create a UDS socket
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
try:
try:
# Connect the socket to the port where the server is listening
sock.connect(SERVER_ADDRESS)
# Send data
message = Commands.LogNPM + ':' + '[' + logType + ']' + logString
sock.sendall(message)
except Exception, msg:
LG().Log(LogType.Error, str(msg))
finally:
sock.close()
LOG_ACTION = NPMDiagnosticLogUtil()
class IOMSAgent:
def restart_oms_agent(self):
pass
class OMSAgentUtil(IOMSAgent):
def restart_oms_agent(self):
if os.system('sudo /opt/microsoft/omsagent/bin/service_control restart') == 0:
return True
else:
LOG_ACTION.log(LogType.Error, 'Error restarting omsagent.')
return False
class INPMAgent:
def binary_setcap(self):
pass
class NPMAgentUtil(IOMSAgent):
def binary_setcap(self, binaryPath):
if os.path.exists(AGENT_SCRIPT_PATH) and os.system('sudo %s %s' %(AGENT_SCRIPT_PATH, binaryPath)) == 0:
return True
else:
LOG_ACTION.log(LogType.Error, 'Error setting capabilities to npmd agent binary.')
return False
global show_mof
show_mof = False
OMS_ACTION = OMSAgentUtil()
NPM_ACTION = NPMAgentUtil()
# [key] string ConfigType;
# [write] string ConfigID;
# [write] string Contents;
# [write,ValueMap{"Present", "Absent"},Values{"Present", "Absent"}] string Ensure;
# [write] string ContentChecksum;
def init_vars(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
if ConfigType is not None and ConfigType != '':
ConfigType = ConfigType.encode('ascii', 'ignore')
else:
ConfigType = 'UpdatedAgentConfig'
if ConfigID is not None:
ConfigID = ConfigID.encode('ascii', 'ignore')
else:
ConfigID = ''
if Contents is not None:
Contents = base64.b64decode(Contents)#Contents.encode('ascii', 'ignore')
else:
Contents = ''
if Ensure is not None and Ensure != '':
Ensure = Ensure.encode('ascii', 'ignore')
else:
Ensure = 'Present'
if ContentChecksum is not None:
ContentChecksum = ContentChecksum.encode('ascii', 'ignore')
else:
ContentChecksum = ''
return ConfigType, ConfigID, Contents, Ensure, ContentChecksum
def Set_Marshall(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
recvdContentChecksum = md5.md5(Contents).hexdigest().upper()
if recvdContentChecksum != ContentChecksum:
LOG_ACTION.log(LogType.Info, 'Content received did not match checksum with md5, trying with sha1')
# validate with sha1
recvdContentChecksum = sha.sha(Contents).hexdigest().upper()
if recvdContentChecksum != ContentChecksum:
# data is corrupt do not proceed further
LOG_ACTION.log(LogType.Error, 'Content received did not match checksum with sha1, exiting Set')
return [-1]
(ConfigType, ConfigID, Contents, Ensure, ContentChecksum) = init_vars(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
retval = Set(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
return retval
def Test_Marshall(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
recvdContentChecksum = md5.md5(Contents).hexdigest().upper()
if recvdContentChecksum != ContentChecksum:
LOG_ACTION.log(LogType.Info, 'Content received did not match checksum with md5, trying with sha1')
# validate with sha1
recvdContentChecksum = sha.sha(Contents).hexdigest().upper()
if recvdContentChecksum != ContentChecksum:
# data is corrupt do not proceed further
LOG_ACTION.log(LogType.Error, 'Content received did not match checksum with sha1, exiting Set')
return [0]
(ConfigType, ConfigID, Contents, Ensure, ContentChecksum) = init_vars(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)<|fim▁hole|>def Get_Marshall(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
arg_names = list(locals().keys())
(ConfigType, ConfigID, Contents, Ensure, ContentChecksum) = init_vars(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
retval = 0
retval = Get(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
ConfigType = protocol.MI_String(ConfigType)
ConfigID = protocol.MI_String(ConfigID)
Ensure = protocol.MI_String(Ensure)
Contents = protocol.MI_String(Contents)
ContentChecksum = protocol.MI_String(ContentChecksum)
retd = {}
ld = locals()
for k in arg_names:
retd[k] = ld[k]
return retval, retd
############################################################
# Begin user defined DSC functions
############################################################
def SetShowMof(a):
global show_mof
show_mof = a
def ShowMof(op, ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
if not show_mof:
return
mof = ''
mof += op + ' nxOMSAgentNPMConfig MyNPMConfig \n'
mof += '{\n'
mof += ' ConfigType = "' + ConfigType + '"\n'
mof += ' ConfigID = "' + ConfigID + '"\n'
mof += ' Contents = "' + Contents + '"\n'
mof += ' Ensure = "' + Ensure + '"\n'
mof += ' ContentChecksum = "' + ContentChecksum + '"\n'
mof += '}\n'
f = open('./test_mofs.log', 'a')
Print(mof, file=f)
LG().Log(LogType.Info, mof)
f.close()
def Set(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
ShowMof('SET', ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
retval = 0
if ConfigType != 'UpdatedAgentConfig':
LOG_ACTION.log(LogType.Error, 'Config type did not match, exiting set')
return [-1]
if Ensure == 'Absent':
if os.path.exists(AGENT_RESOURCE_VERSION_PATH):
LG().Log(LogType.Info, 'Ensure is absent, but resource is present, purging')
success = PurgeSolution()
if not success:
retval = -1
return [retval]
if TestConfigUpdate(Contents) != 0:
retval = SetConfigUpdate(Contents)
version = TestResourceVersion()
if version != 0:
retval = SetFilesUpdate(version)
return [retval]
def Test(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
ShowMof('TEST', ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
retval = 0
if not os.path.exists(AGENT_SCRIPT_PATH):
LG().Log(LogType.Error, 'npmd set cap script does not exist, exiting test')
return [retval]
if ConfigType != 'UpdatedAgentConfig':
LOG_ACTION.log(LogType.Error, 'Config type did not match, exiting test')
return [retval]
if Ensure == 'Absent':
if os.path.exists(AGENT_RESOURCE_VERSION_PATH):
LG().Log(LogType.Info, 'Ensure is absent, resource is present on the agent, set will purge')
retval = -1
return [retval]
if TestResourceVersion() != 0 or TestConfigUpdate(Contents) != 0:
retval = -1
return [retval]
def Get(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
retval = 0
ShowMof('GET', ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
return [retval]
def Print(s, file=sys.stdout):
file.write(s + '\n')
# Compare resource version in DSC and agent machine
# Returns
# 0 if version is same
# dsc version number if there is a mismatch or agent config not present
def TestResourceVersion():
retval = 0
dscVersion = ReadFile(DSC_RESOURCE_VERSION_PATH)
if not os.path.exists(AGENT_RESOURCE_VERSION_PATH):
#npmd agent is not present, copy binaries
retval = dscVersion
else:
agentVersion = ReadFile(AGENT_RESOURCE_VERSION_PATH)
if agentVersion != dscVersion:
#version mismatch, copy binaries
retval = dscVersion
return retval
def TestConfigUpdate(Contents):
retval = 0
destFileFullPath = CONFIG_PATH.__add__(DEST_FILE_NAME)
if not os.path.exists(CONFIG_PATH):
LOG_ACTION.log(LogType.Error, 'CONFIG_PATH does not exist')
retval = 0
elif not os.path.exists(destFileFullPath):
# Configuration does not exist, fail
retval = -1
else:
origConfigData = ReadFile(destFileFullPath)
#compare
if origConfigData is None or origConfigData != Contents:
retval = -1
return retval
def SetConfigUpdate(Contents):
destFileFullPath = CONFIG_PATH.__add__(DEST_FILE_NAME)
# Update config after checking if directory exists
if not os.path.exists(CONFIG_PATH):
LOG_ACTION.log(LogType.Error, 'CONFIG_PATH does not exist')
retval = -1
else:
retval = WriteFile(destFileFullPath, Contents)
if retval == 0 and os.path.exists(AGENT_RESOURCE_VERSION_PATH): #notify server only if plugin is present
LG().Log(LogType.Info, 'Updated the file, going to notify server')
NotifyServer(Commands.Config)
return retval
def SetFilesUpdate(newVersion):
retval = UpdateAgentBinary(newVersion)
retval &= UpdatePluginFiles()
if retval:
return 0
return -1
def UpdateAgentBinary(newVersion):
retval = True
arch = platform.architecture()
src = ''
if arch is not None and arch[0] == X64:
src = RESOURCE_MODULE_PATH.__add__(DSC_X64_AGENT_PATH)
retval &= DeleteAllFiles(src, AGENT_BINARY_PATH)
retval &= CopyAllFiles(src, AGENT_BINARY_PATH)
else:
src = RESOURCE_MODULE_PATH.__add__(DSC_X86_AGENT_PATH)
retval &= DeleteAllFiles(src, AGENT_BINARY_PATH)
retval &= CopyAllFiles(src, AGENT_BINARY_PATH)
LOG_ACTION.log(LogType.Error, 'npmd agent binary do not support 32-bit.')
#Update version number after deleting and copying new agent files
if retval == True:
WriteFile(AGENT_RESOURCE_VERSION_PATH, newVersion)
# set capabilities to binary
src_files = os.listdir(src)
for file_name in src_files:
if AGENT_BINARY_NAME in file_name:
full_file_name = os.path.join(AGENT_BINARY_PATH, file_name)
break
NPM_ACTION.binary_setcap(full_file_name)
# Notify ruby plugin
#retval &= NotifyServer(Commands.RestartNPM)
return retval
def UpdatePluginFiles():
retval = True
#replace files
retval &= DeleteAllFiles(RESOURCE_MODULE_PATH.__add__(DSC_PLUGIN_PATH), PLUGIN_PATH)
retval &= DeleteAllFiles(RESOURCE_MODULE_PATH.__add__(DSC_PLUGIN_CONF_PATH), PLUGIN_CONF_PATH)
retval &= CopyAllFiles(RESOURCE_MODULE_PATH.__add__(DSC_PLUGIN_PATH), PLUGIN_PATH)
retval &= CopyAllFiles(RESOURCE_MODULE_PATH.__add__(DSC_PLUGIN_CONF_PATH), PLUGIN_CONF_PATH)
# restart oms agent
retval &= OMS_ACTION.restart_oms_agent()
return retval
def CopyAllFiles(src, dest):
try:
src_files = os.listdir(src)
for file_name in src_files:
full_file_name = os.path.join(src, file_name)
if (os.path.isfile(full_file_name)):
shutil.copy(full_file_name, dest)
except:
LOG_ACTION.log(LogType.Error, 'copy_all_files failed for src: ' + src + ' dest: ' + dest)
return False
return True
# Deletes all files present in both directories
def DeleteAllFiles(src, dest):
try:
src_files = os.listdir(src)
for file_name in src_files:
full_file_name = os.path.join(dest, file_name)
if (os.path.isfile(full_file_name)):
os.remove(full_file_name)
except:
LOG_ACTION.log(LogType.Error, 'delete_all_files failed for src: ' + src + ' dest: ' + dest)
return False
return True
def PurgeSolution():
# remove plugin config file so that plugin does not start again
retval = DeleteAllFiles(RESOURCE_MODULE_PATH.__add__(DSC_PLUGIN_CONF_PATH), PLUGIN_CONF_PATH)
# remove resource version file
try:
os.remove(AGENT_RESOURCE_VERSION_PATH)
except:
LOG_ACTION.log(LogType.Error, 'failed to remove version file')
retval = False
# notify ruby plugin to purge agent
NotifyServer(Commands.Purge)
return retval
def NotifyServer(command):
# Create a UDS socket
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
LG().Log(LogType.Info, 'connecting to ' + SERVER_ADDRESS)
try:
try:
# Connect the socket to the port where the server is listening
sock.connect(SERVER_ADDRESS)
# Send data
message = command
LG().Log(LogType.Info, 'sending ' + message)
sock.sendall(message)
except Exception, msg:
LG().Log(LogType.Error, str(msg))
# restart omsagent if command was config update and sock conn failed
if (command == Commands.Config):
OMS_ACTION.restart_oms_agent()
finally:
LG().Log(LogType.Info, 'closing socket')
sock.close()
def WriteFile(path, contents):
retval = 0
try:
dFile = open(path, 'w+')
dFile.write(contents)
dFile.close()
except IOError, error:
LOG_ACTION.log(LogType.Error, "Exception opening file " + path + " Error Code: " + str(error.errno) + " Error: " + error.message + error.strerror)
retval = -1
return retval
def ReadFile(path):
content = None
try:
dFile = codecs.open (path, encoding = 'utf8', mode = "r")
content = dFile.read()
dFile.close()
except IOError, error:
LOG_ACTION.log(LogType.Error, "Exception opening file " + path + " Error Code: " + str(error.errno) + " Error: " + error.message + error.strerror)
return content<|fim▁end|> | retval = Test(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
return retval
|
<|file_name|>wiki2asciidoc.py<|end_file_name|><|fim▁begin|>from optparse import OptionParser
from uuid import uuid4
import re
def main():
parser = OptionParser()
#parser.add_option("--skip-rows", type="int", dest="skip_rows", default="0", help="Number of lines/rows to skip when using --lines or --csv. Default: %default")
(options, arguments) = parser.parse_args()
listing_markup = "------------------------------------------------------------------\n"
for path in arguments:
tag_listing = False
block_listing = False
graphviz_listing = False
for markup in open(path, "r"):
if re.search("<pre>", markup):
tag_listing = True
markup = listing_markup
elif re.search("</pre>", markup):
tag_listing = False
markup = listing_markup
elif re.search("<graphviz>", markup):
graphviz_listing = True
print "[\"graphviz\", \"%s.png\"]" % str(uuid4())
markup = listing_markup
elif re.search("</graphviz>", markup):
graphviz_listing = False
markup = listing_markup
elif markup.startswith(" ") and not block_listing and not tag_listing:
print listing_markup
block_listing = True<|fim▁hole|> markup = markup[1:]
elif not markup.startswith(" ") and block_listing:
print listing_markup
block_listing = False
if not tag_listing and not block_listing and not graphviz_listing:
# Convert varying numbers of single-quotes to Asciidoc "emphasis"
markup = re.sub("('{2,5})([^']+('[^']+)*)\\1", "'\\2'", markup)
# Markup double-quotes
markup = re.sub("\"([^\"]*)\"", "``\\1''", markup)
# Eliminate article categories
markup = re.sub("\\[\\[Category:[^\]]*\\]\\]", "", markup)
# Convert wiki links to Asciidoc links
markup = re.sub("\\[\\[([^\]]*)\\]\\]", "<<\\1,\\1>>", markup)
print markup,
if __name__ == "__main__":
main()<|fim▁end|> | |
<|file_name|>io.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
"""
meza.io
~~~~~~~
Provides methods for reading/writing/processing tabular formatted files
Examples:
basic usage::
>>> from meza.io import read_csv
>>>
>>> path = p.join(DATA_DIR, 'test.csv')
>>> csv_records = read_csv(path)
>>> csv_header = next(csv_records).keys()
>>> next(csv_records)['Some Value'] == '100'
True
"""
import itertools as it
import sys
import hashlib
import sqlite3
import json
import os
from os import path as p
from datetime import time
from mmap import mmap
from collections import deque
from subprocess import check_output, check_call, Popen, PIPE, CalledProcessError
from http import client
from csv import Error as csvError
from functools import partial
from codecs import iterdecode, iterencode, StreamReader
from itertools import zip_longest
import yaml
import xlrd
import pygogo as gogo
from bs4 import BeautifulSoup, FeatureNotFound
from ijson import items
from chardet.universaldetector import UniversalDetector
from xlrd import (
XL_CELL_DATE, XL_CELL_EMPTY, XL_CELL_NUMBER, XL_CELL_BOOLEAN,
XL_CELL_ERROR)
from xlrd.xldate import xldate_as_datetime as xl2dt
from io import StringIO, TextIOBase, BytesIO, open
from . import (
fntools as ft, process as pr, unicsv as csv, dbf, ENCODING, BOM, DATA_DIR)
# pylint: disable=C0103
logger = gogo.Gogo(__name__, monolog=True, verbose=True).logger
# pylint: disable=C0103
encode = lambda iterable: (s.encode(ENCODING) for s in iterable)
chain = lambda iterable: it.chain.from_iterable(iterable or [])
NEWLINES = {b'\n', b'\r', b'\r\n', '\n', '\r', '\r\n'}
def groupby_line(iterable):
return it.groupby(iterable, lambda s: s not in NEWLINES)
class IterStringIO(TextIOBase):
"""A lazy StringIO that reads a generator of strings.
https://stackoverflow.com/a/32020108/408556
https://stackoverflow.com/a/20260030/408556
"""
# pylint: disable=super-init-not-called
def __init__(self, iterable=None, bufsize=4096, decode=False, **kwargs):
""" IterStringIO constructor
Args:
iterable (Seq[str]): Iterable of strings or bytes
bufsize (Int): Buffer size for seeking
decode (bool): Decode the text into a string (default: False)
Examples:
>>> StringIO(iter('Hello World')).read(5) # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError:...
>>> IterStringIO(iter('Hello World')).read(5)
b'Hello'
>>> i = IterStringIO(iter('one\\ntwo\\n'))
>>> list(next(i.lines)) == [b'o', b'n', b'e']
True
>>> decoded = IterStringIO(iter('Hello World'), decode=True)
>>> decoded.read(5) == 'Hello'
True
"""
iterable = iterable if iterable else []
chained = chain(iterable)
self.iter = encode(chained)
self.decode = decode
self.bufsize = bufsize
self.last = deque(bytearray(), self.bufsize)
self.pos = 0
def __next__(self):
return self._read(next(self.lines))
def __iter__(self):
return self
@property
def lines(self):
"""Read all the lines of content"""
# TODO: what about a csv with embedded newlines?
groups = groupby_line(self.iter)
return (g for k, g in groups if k)
def _read(self, iterable, num=None, newline=True):
"""Helper method used to read content"""
content = it.islice(iterable, num) if num else iterable
byte = ft.byte(content)
self.last.extend(byte)
self.pos += num or len(byte)
if newline:
self.last.append('\n')
return byte.decode(ENCODING) if self.decode else bytes(byte)
def write(self, iterable):
"""Write the content"""
chained = chain(iterable)
self.iter = it.chain(self.iter, encode(chained))
def read(self, num=None):
"""Read the content"""
return self._read(self.iter, num, False)
def readline(self, num=None):
"""Read a line of content"""
return self._read(next(self.lines), num)
def readlines(self):
"""Read all the lines of content"""
return map(self._read, self.lines)
def seek(self, num):
"""Go to a specific position within a file"""
next_pos = self.pos + 1
beg_buf = max([0, self.pos - self.bufsize])
if num <= beg_buf:
self.iter = it.chain(self.last, self.iter)
self.last = deque([], self.bufsize)
elif self.pos > num > beg_buf:
extend = [self.last.pop() for _ in range(self.pos - num)]
self.iter = it.chain(reversed(extend), self.iter)
elif num == self.pos:
pass
elif num == next_pos:
self.last.append(next(self.iter))
elif num > next_pos:
pos = num - self.pos
[self.last.append(x) for x in it.islice(self.iter, 0, pos)]
self.pos = beg_buf if num < beg_buf else num
def tell(self):
"""Get the current position within a file"""
return self.pos
class Reencoder(StreamReader):
"""Recodes a file like object from one encoding to another.
"""
def __init__(self, f, fromenc=ENCODING, toenc=ENCODING, **kwargs):
""" Reencoder constructor
Args:
f (obj): File-like object
fromenc (str): The input encoding.
toenc (str): The output encoding.
Kwargs:
remove_BOM (bool): Remove Byte Order Marker (default: True)
decode (bool): Decode the text into a string (default: False)
Examples:
>>> encoding = 'utf-16-be'
>>> eff = p.join(DATA_DIR, 'utf16_big.csv')
>>>
>>> with open(eff, 'rb') as f:
... reenc = Reencoder(f, encoding)
... first = reenc.readline(keepends=False)
... first.decode('utf-8') == '\ufeffa,b,c'
... reenc.readlines()[1].decode('utf-8') == '4,5,ʤ'
True
True
>>> with open(eff, 'rb') as f:
... reenc = Reencoder(f, encoding, decode=True)
... reenc.readline(keepends=False) == '\ufeffa,b,c'
True
>>> with open(eff, 'rU', encoding=encoding) as f:
... reenc = Reencoder(f, remove_BOM=True)
... reenc.readline(keepends=False) == b'a,b,c'
... reenc.readline() == b'1,2,3\\n'
... reenc.readline().decode('utf-8') == '4,5,ʤ'
True
True
True
"""
self.fileno = f.fileno
first_line = next(f)
bytes_mode = isinstance(first_line, bytes)
decode = kwargs.get('decode')
rencode = not decode
if kwargs.get('remove_BOM'):
strip = BOM.encode(fromenc) if bytes_mode else BOM
first_line = first_line.lstrip(strip)
chained = it.chain([first_line], f)
if bytes_mode:
decoded = iterdecode(chained, fromenc)
self.binary = rencode
proper_newline = first_line.endswith(os.linesep.encode(fromenc))
else:
decoded = chained
self.binary = bytes_mode or rencode
proper_newline = first_line.endswith(os.linesep)
stream = iterencode(decoded, toenc) if rencode else decoded
if proper_newline:
self.stream = stream
else:
# TODO: make sure the read methods are consistent with
# proper_newline, e.g., `keepends`.
#
# TODO: since the newline isn't recognized, `stream` is contains
# just one (very long) line. we pass in this line to iterate
# over the chars
groups = groupby_line(next(stream))
if self.binary:
self.stream = (b''.join(g) for k, g in groups if k)
else:
self.stream = (''.join(g) for k, g in groups if k)
def __next__(self):
return next(self.stream)
def __iter__(self):
return self
def read(self, n=None):
stream = it.islice(self.stream, n) if n else self.stream
return b''.join(stream) if self.binary else ''.join(stream)
def readline(self, n=None, keepends=True):
line = next(self.stream)
return line if keepends else line.rstrip()
def readlines(self, sizehint=None):
return list(self.stream)
def tell(self):
pass
def reset(self):
pass
class BytesError(ValueError):
pass
def patch_http_response_read(func):
"""Patches httplib to read poorly encoded chunked data.
https://stackoverflow.com/a/14206036/408556
"""
def inner(*args):
"""inner"""
try:
return func(*args)
except client.IncompleteRead as err:
return err.partial
return inner
client.HTTPResponse.read = patch_http_response_read(client.HTTPResponse.read)<|fim▁hole|>
def _remove_bom_from_dict(row, bom):
"""Remove a byte order marker (BOM) from a dict"""
for k, v in row.items():
try:
if all([k, v, bom in k, bom in v]):
yield (k.lstrip(bom), v.lstrip(bom))
elif v and bom in v:
yield (k, v.lstrip(bom))
elif k and bom in k:
yield (k.lstrip(bom), v)
else:
yield (k, v)
except TypeError:
yield (k, v)
def _remove_bom_from_list(row, bom):
"""Remove a byte order marker (BOM) from a list"""
for pos, col in enumerate(row):
try:
if not pos and bom in col:
yield col.lstrip(bom)
else:
yield col
except TypeError:
yield col
def _remove_bom_from_scalar(row, bom):
"""Remove a byte order marker (BOM) from a scalar"""
try:
return row.lstrip(bom)
except AttributeError:
return row
def is_listlike(item):
"""Determine if a scalar is listlike"""
if hasattr(item, 'keys'):
listlike = False
else:
listlike = {'append', 'next', '__reversed__'}.intersection(dir(item))
return listlike
def remove_bom(row, bom):
"""Remove a byte order marker (BOM)"""
if is_listlike(row):
bomless = list(_remove_bom_from_list(row, bom))
else:
try:
# pylint: disable=R0204
bomless = dict(_remove_bom_from_dict(row, bom))
except AttributeError:
bomless = _remove_bom_from_scalar(row, bom)
return bomless
def get_encoding(filepath):
"""
Examples:
>>> get_encoding(p.join(DATA_DIR, 'utf16_big.csv')) == 'UTF-16'
True
"""
with open(filepath, 'rb') as f:
encoding = detect_encoding(f)['encoding']
return encoding
def get_file_encoding(f, encoding=None, bytes_error=False):
"""Detects a file's encoding"""
if not encoding and hasattr(f, 'encoding'):
encoding = f.encoding
if not bytes_error:
# Set the encoding to None so that we can detect the correct one.
extra = (' ({})'.format(encoding)) if encoding else ''
logger.warning('%s was opened with the wrong encoding%s', f, extra)
encoding = None
if not encoding:
try:
f.seek(0)
except AttributeError:
pass
else:
try:
# See if we have bytes to avoid reopening the file
encoding = detect_encoding(f)['encoding']
except UnicodeDecodeError:
msg = 'Incorrectly encoded file, reopening with bytes to detect'
msg += ' encoding'
logger.warning(msg)
f.close()
encoding = get_encoding(f.name)
finally:
if hasattr(f, 'name'): # otherwise we can't reopen it
f.close()
if encoding:
logger.debug('detected encoding: %s', encoding)
return encoding
def sanitize_file_encoding(encoding):
if encoding == 'Windows-1252' and os.name == 'posix':
# based on my testing, when excel for mac saves a csv file as
# 'Windows-1252', you have to open with 'mac-roman' in order
# to properly read it
new_encoding = 'mac-roman'
msg = 'Detected a `Windows-1252` encoded file on a %s machine.'
msg += ' Setting encoding to `%s` instead.'
logger.warning(msg, sys.platform, new_encoding)
else:
new_encoding = encoding
return new_encoding
def is_binary(f):
try:
result = 'b' in f.mode
except AttributeError:
result = isinstance(f, BytesIO)
return result
def reopen(f, encoding):
sanitized_encoding = sanitize_file_encoding(encoding)
logger.debug('Reopening %s with encoding: %s', f, sanitized_encoding)
try:
decoded_f = open(f.name, encoding=sanitized_encoding)
except AttributeError:
f.seek(0)
decoded_f = iterdecode(f, sanitized_encoding)
return decoded_f
def _read_any(f, reader, args, pos=0, recursed=False, **kwargs):
"""Helper func to read a file or filepath"""
try:
if is_binary(f) and reader.__name__ != 'writer':
# only allow binary mode for writing files, not reading
message = "%s was opened in bytes mode but isn't being written to"
raise BytesError(message % f)
for num, line in enumerate(reader(f, *args, **kwargs)):
if num >= pos:
yield line
pos += 1
except (UnicodeDecodeError, csvError, BytesError) as err:
logger.warning(err)
encoding = kwargs.pop('encoding', None)
bytes_error = type(err).__name__ == 'BytesError'
if not recursed:
ekwargs = {'encoding': encoding, 'bytes_error': bytes_error}
encoding = get_file_encoding(f, **ekwargs)
if recursed or not encoding:
logger.error('Unable to detect proper file encoding')
return
decoded_f = reopen(f, encoding)
try:
rkwargs = pr.merge([kwargs, {'pos': pos, 'recursed': True}])
for line in _read_any(decoded_f, reader, args, **rkwargs):
yield line
finally:
decoded_f.close()
def read_any(filepath, reader, mode='r', *args, **kwargs):
"""Reads a file or filepath
Args:
filepath (str): The file path or file like object.
reader (func): The processing function.
mode (Optional[str]): The file open mode (default: 'r').
kwargs (dict): Keyword arguments that are passed to the reader.
Kwargs:
encoding (str): File encoding.
See also:
`meza.io.read_csv`
`meza.io.read_fixed_fmt`
`meza.io.read_json`
`meza.io.read_geojson`
`meza.io.write`
`meza.io.hash_file`
Yields:
scalar: Result of applying the reader func to the file.
Examples:
>>> filepath = p.join(DATA_DIR, 'test.csv')
>>> reader = lambda f, **kw: (l.strip().split(',') for l in f)
>>> result = read_any(filepath, reader, 'r')
>>> next(result) == [
... 'Some Date', 'Sparse Data', 'Some Value', 'Unicode Test', '']
True
"""
if hasattr(filepath, 'read'):
if is_binary(filepath):
kwargs.setdefault('encoding', ENCODING)
else:
kwargs.pop('encoding', None)
for line in _read_any(filepath, reader, args, **kwargs):
yield remove_bom(line, BOM)
else:
encoding = None if 'b' in mode else kwargs.pop('encoding', ENCODING)
with open(filepath, mode, encoding=encoding) as f:
for line in _read_any(f, reader, args, **kwargs):
yield remove_bom(line, BOM)
def _read_csv(f, header=None, has_header=True, **kwargs):
"""Helps read a csv file.
Args:
f (obj): The csv file like object.
header (Seq[str]): Sequence of column names.
has_header (bool): Whether or not file has a header.
Kwargs:
first_col (int): The first column (default: 0).
Yields:
dict: A csv record.
See also:
`meza.io.read_csv`
Examples:
>>> filepath = p.join(DATA_DIR, 'test.csv')
>>> with open(filepath, 'r', encoding='utf-8') as f:
... sorted(next(_read_csv(f)).items()) == [
... ('Some Date', '05/04/82'),
... ('Some Value', '234'),
... ('Sparse Data', 'Iñtërnâtiônàližætiøn'),
... ('Unicode Test', 'Ādam')]
True
"""
first_col = kwargs.pop('first_col', 0)
if header and has_header:
next(f)
elif not (header or has_header):
raise ValueError('Either `header` or `has_header` must be specified.')
header = (list(it.repeat('', first_col)) + header) if first_col else header
reader = csv.DictReader(f, header, **kwargs)
# Remove empty keys
records = (dict(x for x in r.items() if x[0]) for r in reader)
# Remove empty rows
for row in records:
if any(v.strip() for v in row.values() if v):
yield row
def read_mdb(filepath, table=None, **kwargs):
"""Reads an MS Access file
Args:
filepath (str): The mdb file path.
table (str): The table to load (default: None, the first found table).
kwargs (dict): Keyword arguments that are passed to the csv reader.
Kwargs:
sanitize (bool): Underscorify and lowercase field names
(default: False).
dedupe (bool): Deduplicate field names (default: False).
ignorecase (bool): Treat file name as case insensitive (default: true).
Yields:
dict: A row of data whose keys are the field names.
Raises:
TypeError: If unable to read the db file.
Examples:
>>> filepath = p.join(DATA_DIR, 'test.mdb')
>>> records = read_mdb(filepath, sanitize=True)
>>> expected = {
... 'surname': 'Aaron',
... 'forenames': 'William',
... 'freedom': '07/03/60 00:00:00',
... 'notes': 'Order of Court',
... 'surname_master_or_father': '',
... 'how_admitted': 'Redn.',
... 'id_no': '1',
... 'forenames_master_or_father': '',
... 'remarks': '',
... 'livery': '',
... 'date_of_order_of_court': '06/05/60 00:00:00',
... 'source_ref': 'MF 324'}
>>> first_row = next(records)
>>> (expected == first_row) if first_row else True
True
"""
args = ['mdb-tables', '-1', filepath]
# Check if 'mdb-tools' is installed on system
try:
check_output(args)
except OSError:
logger.error(
'You must install [mdbtools]'
'(http://sourceforge.net/projects/mdbtools/) in order to use '
'this function')
yield
return
except CalledProcessError:
raise TypeError('{} is not readable by mdbtools'.format(filepath))
sanitize = kwargs.pop('sanitize', None)
dedupe = kwargs.pop('dedupe', False)
table = table or check_output(args).splitlines()[0]
pkwargs = {'stdout': PIPE, 'bufsize': 1, 'universal_newlines': True}
# https://stackoverflow.com/a/2813530/408556
# https://stackoverflow.com/a/17698359/408556
with Popen(['mdb-export', filepath, table], **pkwargs).stdout as pipe:
first_line = StringIO(str(pipe.readline()))
names = next(csv.reader(first_line, **kwargs))
uscored = ft.underscorify(names) if sanitize else names
header = list(ft.dedupe(uscored) if dedupe else uscored)
for line in iter(pipe.readline, b''):
next_line = StringIO(str(line))
values = next(csv.reader(next_line, **kwargs))
yield dict(zip(header, values))
def read_dbf(filepath, **kwargs):
"""Reads a dBase, Visual FoxPro, or FoxBase+ file
Args:
filepath (str): The dbf file path or file like object.
kwargs (dict): Keyword arguments that are passed to the DBF reader.
Kwargs:
load (bool): Load all records into memory (default: false).
encoding (str): Character encoding (default: None, parsed from
the `language_driver`).
sanitize (bool): Underscorify and lowercase field names
(default: False).
ignorecase (bool): Treat file name as case insensitive (default: true).
ignore_missing_memofile (bool): Suppress `MissingMemoFile` exceptions
(default: False).
Yields:
OrderedDict: A row of data whose keys are the field names.
Raises:
MissingMemoFile: If unable to find the memo file.
DBFNotFound: If unable to find the db file.
Examples:
>>> filepath = p.join(DATA_DIR, 'test.dbf')
>>> records = read_dbf(filepath, sanitize=True)
>>> next(records) == {
... 'awater10': 12416573076,
... 'aland10': 71546663636,
... 'intptlat10': '+47.2400052',
... 'lsad10': 'C2',
... 'cd111fp': '08',
... 'namelsad10': 'Congressional District 8',
... 'funcstat10': 'N',
... 'statefp10': '27',
... 'cdsessn': '111',
... 'mtfcc10': 'G5200',
... 'geoid10': '2708',
... 'intptlon10': '-092.9323194'}
True
"""
kwargs['lowernames'] = kwargs.pop('sanitize', None)
return iter(dbf.DBF2(filepath, **kwargs))
def read_sqlite(filepath, table=None):
"""Reads a sqlite file.
Args:
filepath (str): The sqlite file path
table (str): The table to load (default: None, the first found table).
Yields:
dict: A row of data whose keys are the field names.
Raises:
NotFound: If unable to find the resource.
See also:
`meza.io.read_any`
Examples:
>>> filepath = p.join(DATA_DIR, 'test.sqlite')
>>> records = read_sqlite(filepath)
>>> next(records) == {
... 'sparse_data': 'Iñtërnâtiônàližætiøn',
... 'some_date': '05/04/82',
... 'some_value': 234,
... 'unicode_test': 'Ādam'}
True
"""
con = sqlite3.connect(filepath)
con.row_factory = sqlite3.Row
cursor = con.cursor()
cursor.execute("SELECT name FROM sqlite_master WHERE type = 'table'")
if not table or table not in set(cursor.fetchall()):
table = cursor.fetchone()[0]
cursor.execute('SELECT * FROM {}'.format(table))
return map(dict, cursor)
def read_csv(filepath, mode='r', **kwargs):
"""Reads a csv file.
Args:
filepath (str): The csv file path or file like object.
mode (Optional[str]): The file open mode (default: 'r').
kwargs (dict): Keyword arguments that are passed to the csv reader.
Kwargs:
delimiter (str): Field delimiter (default: ',').
quotechar (str): Quote character (default: '"').
encoding (str): File encoding.
has_header (bool): Has header row (default: True).
custom_header (List[str]): Custom header names (default: None).
first_row (int): First row (zero based, default: 0).
first_col (int): First column (zero based, default: 0).
sanitize (bool): Underscorify and lowercase field names
(default: False).
dedupe (bool): Deduplicate field names (default: False).
Yields:
dict: A row of data whose keys are the field names.
Raises:
NotFound: If unable to find the resource.
See also:
`meza.io.read_any`
`meza.io._read_csv`
Examples:
>>> filepath = p.join(DATA_DIR, 'test.csv')
>>> records = read_csv(filepath, sanitize=True)
>>> next(records) == {
... 'sparse_data': 'Iñtërnâtiônàližætiøn',
... 'some_date': '05/04/82',
... 'some_value': '234',
... 'unicode_test': 'Ādam'}
True
"""
def reader(f, **kwargs):
"""File reader"""
first_row = kwargs.pop('first_row', 0)
first_col = kwargs.pop('first_col', 0)
sanitize = kwargs.pop('sanitize', False)
dedupe = kwargs.pop('dedupe', False)
has_header = kwargs.pop('has_header', True)
custom_header = kwargs.pop('custom_header', None)
# position file pointer at the first row
list(it.islice(f, first_row))
first_line = StringIO(str(next(f)))
names = next(csv.reader(first_line, **kwargs))
if has_header or custom_header:
names = custom_header if custom_header else names
stripped = (name for name in names if name.strip())
uscored = ft.underscorify(stripped) if sanitize else stripped
header = list(ft.dedupe(uscored) if dedupe else uscored)
if not has_header:
# reposition file pointer at the first row
try:
f.seek(0)
except AttributeError:
msg = 'Non seekable files must have either a specified or'
msg += 'custom header.'
logger.error(msg)
raise
list(it.islice(f, first_row))
if not (has_header or custom_header):
header = ['column_%i' % (n + 1) for n in range(len(names))]
return _read_csv(f, header, False, first_col=first_col, **kwargs)
return read_any(filepath, reader, mode, **kwargs)
def read_tsv(filepath, mode='r', **kwargs):
"""Reads a csv file.
Args:
filepath (str): The tsv file path or file like object.
mode (Optional[str]): The file open mode (default: 'r').
kwargs (dict): Keyword arguments that are passed to the csv reader.
Kwargs:
quotechar (str): Quote character (default: '"').
encoding (str): File encoding.
has_header (bool): Has header row (default: True).
first_row (int): First row (zero based, default: 0).
first_col (int): First column (zero based, default: 0).
sanitize (bool): Underscorify and lowercase field names
(default: False).
dedupe (bool): Deduplicate field names (default: False).
Yields:
dict: A row of data whose keys are the field names.
Raises:
NotFound: If unable to find the resource.
See also:
`meza.io.read_any`
Examples:
>>> filepath = p.join(DATA_DIR, 'test.tsv')
>>> records = read_tsv(filepath, sanitize=True)
>>> next(records) == {
... 'sparse_data': 'Iñtërnâtiônàližætiøn',
... 'some_date': '05/04/82',
... 'some_value': '234',
... 'unicode_test': 'Ādam'}
True
"""
return read_csv(filepath, mode, dialect='excel-tab', **kwargs)
def read_fixed_fmt(filepath, widths=None, mode='r', **kwargs):
"""Reads a fixed-width csv file.
Args:
filepath (str): The fixed width formatted file path or file like object.
widths (List[int]): The zero-based 'start' position of each column.
mode (Optional[str]): The file open mode (default: 'r').
kwargs (dict): Keyword arguments that are passed to the csv reader.
Kwargs:
has_header (bool): Has header row (default: False).
first_row (int): First row (zero based, default: 0).
first_col (int): First column (zero based, default: 0).
sanitize (bool): Underscorify and lowercase field names
(default: False).
dedupe (bool): Deduplicate field names (default: False).
Yields:
dict: A row of data whose keys are the field names.
Raises:
NotFound: If unable to find the resource.
See also:
`meza.io.read_any`
Examples:
>>> filepath = p.join(DATA_DIR, 'fixed.txt')
>>> widths = [0, 18, 29, 33, 38, 50]
>>> records = read_fixed_fmt(filepath, widths)
>>> next(records) == {
... 'column_1': 'Chicago Reader',
... 'column_2': '1971-01-01',
... 'column_3': '40',
... 'column_4': 'True',
... 'column_5': '1.0',
... 'column_6': '04:14:001971-01-01T04:14:00'}
True
"""
def reader(f, **kwargs):
"""File reader"""
sanitize = kwargs.get('sanitize')
dedupe = kwargs.pop('dedupe', False)
has_header = kwargs.get('has_header')
first_row = kwargs.get('first_row', 0)
schema = tuple(zip_longest(widths, widths[1:]))
[next(f) for _ in range(first_row)]
if has_header:
line = next(f)
names = (_f for _f in (line[s:e].strip() for s, e in schema) if _f)
uscored = ft.underscorify(names) if sanitize else names
header = list(ft.dedupe(uscored) if dedupe else uscored)
else:
header = ['column_%i' % (n + 1) for n in range(len(widths))]
zipped = zip(header, schema)
get_row = lambda line: {k: line[v[0]:v[1]].strip() for k, v in zipped}
return map(get_row, f)
return read_any(filepath, reader, mode, **kwargs)
def sanitize_sheet(sheet, mode, first_col=0, **kwargs):
"""Formats content from xls/xslx files as strings according to its cell
type.
Args:
sheet (obj): `xlrd` sheet object.
mode (str): `xlrd` workbook datemode property.
kwargs (dict): Keyword arguments
first_col (int): The first column (default: 0).
Kwargs:
date_format (str): `strftime()` date format.
dt_format (str): `strftime()` datetime format.
time_format (str): `strftime()` time format.
Yields:
Tuple[int, str]: A tuple of (row_number, value).
Examples:
>>> filepath = p.join(DATA_DIR, 'test.xls')
>>> book = xlrd.open_workbook(filepath)
>>> sheet = book.sheet_by_index(0)
>>> sheet.row_values(1) == [
... 30075.0, 'Iñtërnâtiônàližætiøn', 234.0, 'Ādam', ' ']
True
>>> sanitized = sanitize_sheet(sheet, book.datemode)
>>> [v for i, v in sanitized if i == 1] == [
... '1982-05-04', 'Iñtërnâtiônàližætiøn', '234.0', 'Ādam', ' ']
True
"""
date_format = kwargs.get('date_format', '%Y-%m-%d')
dt_format = kwargs.get('dt_format', '%Y-%m-%d %H:%M:%S')
time_format = kwargs.get('time_format', '%H:%M:%S')
def time_func(value):
"""Converts an excel time into python time"""
args = xlrd.xldate_as_tuple(value, mode)[3:]
return time(*args).strftime(time_format)
switch = {
XL_CELL_DATE: lambda v: xl2dt(v, mode).strftime(date_format),
'datetime': lambda v: xl2dt(v, mode).strftime(dt_format),
'time': time_func,
XL_CELL_EMPTY: lambda v: '',
XL_CELL_NUMBER: str,
XL_CELL_BOOLEAN: lambda v: str(bool(v)),
XL_CELL_ERROR: lambda v: xlrd.error_text_from_code[v],
}
for i in range(sheet.nrows):
types = sheet.row_types(i)[first_col:]
values = sheet.row_values(i)[first_col:]
for _type, value in zip(types, values):
if _type == XL_CELL_DATE and value < 1:
_type = 'time'
elif _type == XL_CELL_DATE and not value.is_integer:
_type = 'datetime'
yield (i, switch.get(_type, lambda v: v)(value))
# pylint: disable=unused-argument
def get_header(names, dedupe=False, sanitize=False, **kwargs):
"""Generates a header row"""
stripped = (name for name in names if name.strip())
uscored = ft.underscorify(stripped) if sanitize else stripped
return list(ft.dedupe(uscored) if dedupe else uscored)
def read_xls(filepath, **kwargs):
"""Reads an xls/xlsx file.
Args:
filepath (str): The xls/xlsx file path, file, or SpooledTemporaryFile.
kwargs (dict): Keyword arguments that are passed to the xls reader.
Kwargs:
sheet (int): Zero indexed sheet to open (default: 0)
has_header (bool): Has header row (default: True).
first_row (int): First row (zero based, default: 0).
first_col (int): First column (zero based, default: 0).
date_format (str): Date format passed to `strftime()` (default:
'%Y-%m-%d', i.e, 'YYYY-MM-DD').
encoding (str): File encoding. By default, the encoding is derived from
the file's `CODEPAGE` number, e.g., 1252 translates to `cp1252`.
sanitize (bool): Underscorify and lowercase field names
(default: False).
dedupe (bool): Deduplicate field names (default: False).
on_demand (bool): open_workbook() loads global data and returns without
releasing resources. At this stage, the only information available
about sheets is Book.nsheets and Book.sheet_names() (default:
False).
pad_rows (bool): Add empty cells so that all rows have the number of
columns `Sheet.ncols` (default: False).
Yields:
dict: A row of data whose keys are the field names.
Raises:
NotFound: If unable to find the resource.
Examples:
>>> filepath = p.join(DATA_DIR, 'test.xls')
>>> records = read_xls(filepath, sanitize=True)
>>> next(records) == {
... 'some_value': '234.0',
... 'some_date': '1982-05-04',
... 'sparse_data': 'Iñtërnâtiônàližætiøn',
... 'unicode_test': 'Ādam'}
True
"""
has_header = kwargs.get('has_header', True)
first_row = kwargs.get('first_row', 0)
xlrd_kwargs = {
'on_demand': kwargs.get('on_demand'),
'ragged_rows': not kwargs.get('pad_rows'),
'encoding_override': kwargs.get('encoding', True)
}
try:
contents = mmap(filepath.fileno(), 0)
book = xlrd.open_workbook(file_contents=contents, **xlrd_kwargs)
except AttributeError:
book = xlrd.open_workbook(filepath, **xlrd_kwargs)
sheet = book.sheet_by_index(kwargs.pop('sheet', 0))
# Get header row and remove empty columns
names = sheet.row_values(first_row)[kwargs.get('first_col', 0):]
if has_header:
header = get_header(names, kwargs.pop('dedupe', False), **kwargs)
else:
header = ['column_%i' % (n + 1) for n in range(len(names))]
# Convert to strings
sanitized = sanitize_sheet(sheet, book.datemode, **kwargs)
for key, group in it.groupby(sanitized, lambda v: v[0]):
if has_header and key == first_row:
continue
values = [g[1] for g in group]
# Remove empty rows
if any(v and v.strip() for v in values):
yield dict(zip(header, values))
def read_json(filepath, mode='r', path='item', newline=False):
"""Reads a json file (both regular and newline-delimited)
Args:
filepath (str): The json file path or file like object.
mode (Optional[str]): The file open mode (default: 'r').
path (Optional[str]): Path to the content you wish to read
(default: 'item', i.e., the root list). Note: `path` must refer to
a list.
newline (Optional[bool]): Interpret file as newline-delimited
(default: False).
Kwargs:
encoding (str): File encoding.
Returns:
Iterable: The parsed records
See also:
`meza.io.read_any`
Examples:
>>> filepath = p.join(DATA_DIR, 'test.json')
>>> records = read_json(filepath)
>>> next(records) == {
... 'text': 'Chicago Reader',
... 'float': 1,
... 'datetime': '1971-01-01T04:14:00',
... 'boolean': True,
... 'time': '04:14:00',
... 'date': '1971-01-01',
... 'integer': 40}
True
"""
reader = lambda f, **kw: map(json.loads, f) if newline else items(f, path)
return read_any(filepath, reader, mode)
def get_point(coords, lat_first):
"""Converts GeoJSON coordinates into a point tuple"""
if lat_first:
point = (coords[1], coords[0])
else:
point = (coords[0], coords[1])
return point
def gen_records(_type, record, coords, properties, **kwargs):
"""GeoJSON record generator"""
lat_first = kwargs.get('lat_first')
if _type == 'Point':
record['lon'], record['lat'] = get_point(coords, lat_first)
yield pr.merge([record, properties])
elif _type == 'LineString':
for point in coords:
record['lon'], record['lat'] = get_point(point, lat_first)
yield pr.merge([record, properties])
elif _type == 'Polygon':
for pos, poly in enumerate(coords):
for point in poly:
record['lon'], record['lat'] = get_point(point, lat_first)
record['pos'] = pos
yield pr.merge([record, properties])
else:
raise TypeError('Invalid geometry type {}.'.format(_type))
def read_geojson(filepath, key='id', mode='r', **kwargs):
"""Reads a geojson file
Args:
filepath (str): The geojson file path or file like object.
key (str): GeoJSON Feature ID (default: 'id').
mode (Optional[str]): The file open mode (default: 'r').
Kwargs:
lat_first (bool): Latitude listed as first coordinate (default: False).
encoding (str): File encoding.
Returns:
Iterable: The parsed records
Raise:
TypeError if no features list or invalid geometry type.
See also:
`meza.io.read_any`
`meza.convert.records2geojson`
Examples:
>>> from decimal import Decimal
>>> filepath = p.join(DATA_DIR, 'test.geojson')
>>> records = read_geojson(filepath)
>>> next(records) == {
... 'id': 6635402,
... 'iso3': 'ABW',
... 'bed_prv_pr': Decimal('0.003'),
... 'ic_mhg_cr': Decimal('0.0246'),
... 'bed_prv_cr': 0,
... 'type': 'Point',
... 'lon': Decimal('-70.0624999987871'),
... 'lat': Decimal('12.637499976568533')}
True
"""
def reader(f, **kwargs):
"""File reader"""
try:
features = items(f, 'features.item')
except KeyError:
raise TypeError('Only GeoJSON with features are supported.')
else:
for feature in features:
_type = feature['geometry']['type']
properties = feature.get('properties') or {}
coords = feature['geometry']['coordinates']
record = {
'id': feature.get(key, properties.get(key)),
'type': feature['geometry']['type']}
args = (record, coords, properties)
for rec in gen_records(_type, *args, **kwargs):
yield rec
return read_any(filepath, reader, mode, **kwargs)
def read_yaml(filepath, mode='r', **kwargs):
"""Reads a YAML file
TODO: convert to a streaming parser
Args:
filepath (str): The yaml file path or file like object.
mode (Optional[str]): The file open mode (default: 'r').
Kwargs:
encoding (str): File encoding.
Returns:
Iterable: The parsed records
See also:
`meza.io.read_any`
Examples:
>>> from datetime import date, datetime as dt
>>> filepath = p.join(DATA_DIR, 'test.yml')
>>> records = read_yaml(filepath)
>>> next(records) == {
... 'text': 'Chicago Reader',
... 'float': 1.0,
... 'datetime': dt(1971, 1, 1, 4, 14),
... 'boolean': True,
... 'time': '04:14:00',
... 'date': date(1971, 1, 1),
... 'integer': 40}
True
"""
return read_any(filepath, yaml.load, mode, **kwargs)
def get_text(element):
if element and element.text:
text = element.text.strip()
else:
text = ''
if not text and element and element.string:
text = element.string.strip()
if not text and element and element.a:
text = element.a.text or element.a.href or ''
text = text.strip()
return text
def _find_table(soup, pos=0):
if pos:
try:
table = soup.find_all('table')[pos]
except IndexError:
table = None
else:
table = soup.table
return table
def _gen_from_rows(rows, header, vertical=False):
if vertical:
# nested_tds = [('one', 'two'), ('uno', 'dos'), ('un', 'deux')]
nested_tds = (tr.find_all('td') for tr in rows)
# tds = ('one', 'uno', 'un')
for tds in zip(*nested_tds):
row = map(get_text, tds)
yield dict(zip(header, row))
else:
for tr in rows: # pylint: disable=C0103
row = map(get_text, tr.find_all('td'))
yield dict(zip(header, row))
def read_html(filepath, table=0, mode='r', **kwargs):
"""Reads tables from an html file
TODO: convert to lxml.etree.iterparse
http://lxml.de/parsing.html#iterparse-and-iterwalk
Args:
filepath (str): The html file path or file like object.
table (int): Zero indexed table to open (default: 0)
mode (Optional[str]): The file open mode (default: 'r').
kwargs (dict): Keyword arguments
Kwargs:
encoding (str): File encoding.
sanitize (bool): Underscorify and lowercase field names
(default: False).
dedupe (bool): Deduplicate field names (default: False).
vertical (bool): The table has headers in the left column (default:
False).
Returns:
Iterable: The parsed records
See also:
`meza.io.read_any`
Examples:
>>> filepath = p.join(DATA_DIR, 'test.html')
>>> records = read_html(filepath, sanitize=True)
>>> next(records) == {
... '': 'Mediterranean',
... 'january': '82',
... 'february': '346',
... 'march': '61',
... 'april': '1,244',
... 'may': '95',
... 'june': '10',
... 'july': '230',
... 'august': '684',
... 'september': '268',
... 'october': '432',
... 'november': '105',
... 'december': '203',
... 'total_to_date': '3,760'}
True
"""
def reader(f, **kwargs):
"""File reader"""
try:
soup = BeautifulSoup(f, 'lxml-xml')
except FeatureNotFound:
soup = BeautifulSoup(f, 'html.parser')
sanitize = kwargs.get('sanitize')
dedupe = kwargs.get('dedupe')
vertical = kwargs.get('vertical')
first_row_as_header = kwargs.get('first_row_as_header')
tbl = _find_table(soup, table)
if tbl:
rows = tbl.find_all('tr')
for num, first_row in enumerate(rows):
if first_row.find('th'):
break
ths = first_row.find_all('th')
if first_row_as_header and not ths:
ths = rows[0].find_all('td')
if vertical or len(ths) == 1:
# the headers are vertical instead of horizontal
vertical = True
names = (get_text(row.th) for row in rows)
elif ths:
rows = rows[1:]
names = map(get_text, ths)
else:
col_nums = range(len(first_row))
names = ['column_{}'.format(i) for i in col_nums]
uscored = ft.underscorify(names) if sanitize else names
header = list(ft.dedupe(uscored) if dedupe else uscored)
records = _gen_from_rows(rows, header, vertical)
else:
records = iter([])
return records
return read_any(filepath, reader, mode, **kwargs)
def write(filepath, content, mode='wb+', **kwargs):
"""Writes content to a file path or file like object.
Args:
filepath (str): The file path or file like object to write to.
content (obj): File like object or `requests` iterable response.
mode (Optional[str]): The file open mode (default: 'wb+').
kwargs: Keyword arguments.
Kwargs:
encoding (str): The file encoding.
chunksize (Optional[int]): Number of bytes to write at a time (default:
None, i.e., all).
length (Optional[int]): Length of content (default: 0).
bar_len (Optional[int]): Length of progress bar (default: 50).
Returns:
int: bytes written
See also:
`meza.io.read_any`
Examples:
>>> from tempfile import TemporaryFile
>>>
>>> write(TemporaryFile(), StringIO('Hello World'))
11
>>> write(StringIO(), StringIO('Hello World'))
11
>>> content = IterStringIO(iter('Internationalization'))
>>> write(StringIO(), content)
20
>>> content = IterStringIO(iter('Iñtërnâtiônàližætiøn'))
>>> write(StringIO(), content)
28
"""
def writer(f, content, **kwargs):
"""File writer"""
chunksize = kwargs.get('chunksize')
length = int(kwargs.get('length') or 0)
bar_len = kwargs.get('bar_len', 50)
encoding = kwargs.get('encoding', ENCODING)
progress = 0
for chunk in ft.chunk(content, chunksize):
text = ft.byte(chunk) if hasattr(chunk, 'sort') else chunk
try:
f.write(text)
except UnicodeEncodeError:
f.write(text.encode(encoding))
except TypeError:
try:
f.write(text.decode(encoding))
except AttributeError:
f.write(bytes(text, encoding))
progress += chunksize or len(text)
if length:
bars = min(int(bar_len * progress / length), bar_len)
logger.debug('\r[%s%s]', '=' * bars, ' ' * (bar_len - bars))
sys.stdout.flush()
yield progress
return sum(read_any(filepath, writer, mode, content, **kwargs))
def hash_file(filepath, algo='sha1', chunksize=0, verbose=False):
"""Hashes a file path or file like object.
https://stackoverflow.com/a/1131255/408556
Args:
filepath (str): The file path or file like object to hash.
algo (str): The hashlib hashing algorithm to use (default: sha1).
chunksize (Optional[int]): Number of bytes to write at a time
(default: 0, i.e., all).
verbose (Optional[bool]): Print debug statements (default: False).
Returns:
str: File hash.
See also:
`meza.io.read_any`
`meza.process.hash`
Examples:
>>> from tempfile import TemporaryFile
>>> resp = 'da39a3ee5e6b4b0d3255bfef95601890afd80709'
>>> hash_file(TemporaryFile()) == resp
True
"""
def writer(f, hasher, **kwargs): # pylint: disable=W0613
"""File writer"""
if chunksize:
while True:
data = f.read(chunksize)
if not data:
break
hasher.update(data)
else:
hasher.update(f.read())
yield hasher.hexdigest()
args = [getattr(hashlib, algo)()]
file_hash = next(read_any(filepath, writer, 'rb', *args))
if verbose:
logger.debug('File %s hash is %s.', filepath, file_hash)
return file_hash
def reencode(f, fromenc=ENCODING, toenc=ENCODING, **kwargs):
"""Reencodes a file from one encoding to another
Args:
f (obj): The file like object to convert.
Kwargs:
fromenc (str): The input encoding.
toenc (str): The output encoding (default: ENCODING).
remove_BOM (bool): Remove Byte Order Marker (default: True)
Returns:
obj: file like object of decoded strings
Examples:
>>> eff = p.join(DATA_DIR, 'utf16_big.csv')
>>>
>>> with open(eff, 'rb') as f:
... encoded = reencode(f, 'utf-16-be', remove_BOM=True)
... encoded.readline(keepends=False) == b'a,b,c'
True
"""
return Reencoder(f, fromenc, toenc, **kwargs)
def detect_encoding(f, verbose=False):
"""Detects a file's encoding.
Args:
f (obj): The file like object to detect.
verbose (Optional[bool]): The file open mode (default: False).
mode (Optional[str]): The file open mode (default: 'r').
Returns:
dict: The encoding result
Examples:
>>> filepath = p.join(DATA_DIR, 'test.csv')
>>>
>>> with open(filepath, 'rb') as f:
... result = detect_encoding(f)
... result == {
... 'confidence': 0.99, 'language': '', 'encoding': 'utf-8'}
True
"""
pos = f.tell()
detector = UniversalDetector()
for line in f:
detector.feed(line)
if detector.done:
break
detector.close()
f.seek(pos)
if verbose:
logger.debug('result %s', detector.result)
return detector.result
def get_reader(extension):
"""Gets the appropriate reader for a given file extension.
Args:
extension (str): The file extension.
Returns:
func: The file reading function
See also:
`meza.io.read`
Raises:
TypeError: If unable to find a suitable reader.
Examples:
>>> get_reader('xls') # doctest: +ELLIPSIS
<function read_xls at 0x...>
"""
switch = {
'csv': read_csv,
'xls': read_xls,
'xlsx': read_xls,
'mdb': read_mdb,
'json': read_json,
'geojson': read_geojson,
'geojson.json': read_geojson,
'sqlite': read_sqlite,
'dbf': read_dbf,
'tsv': read_tsv,
'yaml': read_yaml,
'yml': read_yaml,
'html': read_html,
'fixed': read_fixed_fmt,
}
try:
return switch[extension.lstrip('.').lower()]
except IndexError:
msg = 'Reader for extension `{}` not found!'
raise TypeError(msg.format(extension))
def read(filepath, ext=None, **kwargs):
"""Reads any supported file format.
Args:
filepath (str): The file path or file like object.
ext (str): The file extension.
Returns:
Iterable: The parsed records
See also:
`meza.io.get_reader`
`meza.io.join`
Examples:
>>> filepath = p.join(DATA_DIR, 'test.xls')
>>> next(read(filepath, sanitize=True)) == {
... 'some_value': '234.0',
... 'some_date': '1982-05-04',
... 'sparse_data': 'Iñtërnâtiônàližætiøn',
... 'unicode_test': 'Ādam'}
True
>>> filepath = p.join(DATA_DIR, 'test.csv')
>>> next(read(filepath, sanitize=True)) == {
... 'sparse_data': 'Iñtërnâtiônàližætiøn',
... 'some_date': '05/04/82',
... 'some_value': '234',
... 'unicode_test': 'Ādam'}
True
"""
ext = ext or p.splitext(filepath)[1]
return get_reader(ext)(filepath, **kwargs)
def join(*filepaths, **kwargs):
"""Reads multiple filepaths and yields all the resulting records.
Args:
filepaths (iter[str]): Iterator of filepaths or file like objects.
kwargs (dict): keyword args passed to the individual readers.
Kwargs:
ext (str): The file extension.
Yields:
dict: A parsed record
See also:
`meza.io.read`
Examples:
>>> fs = [p.join(DATA_DIR, 'test.xls'), p.join(DATA_DIR, 'test.csv')]
>>> next(join(*fs, sanitize=True)) == {
... 'some_value': '234.0',
... 'some_date': '1982-05-04',
... 'sparse_data': 'Iñtërnâtiônàližætiøn',
... 'unicode_test': 'Ādam'}
True
"""
reader = partial(read, **kwargs)
return it.chain.from_iterable(map(reader, filepaths))<|fim▁end|> | |
<|file_name|>template_constants.js<|end_file_name|><|fim▁begin|>(function () {
'use strict';
angular.module('openSnap')
.constant('CONFIG', {<|fim▁hole|> menuItems: [{
state: 'home',
icon: 'home'
},
{
state: 'codes',
icon: 'code'
},
{
state: 'create',
icon: 'create'
},
{
state: 'info',
icon: 'info'
}
],
backendUrl: ''
})
})();<|fim▁end|> | |
<|file_name|>switch.py<|end_file_name|><|fim▁begin|>"""Support for switches which integrates with other components."""
import logging
import voluptuous as vol
from homeassistant.components.switch import (
ENTITY_ID_FORMAT,
PLATFORM_SCHEMA,
SwitchEntity,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_FRIENDLY_NAME,
CONF_ENTITY_PICTURE_TEMPLATE,
CONF_ICON_TEMPLATE,
CONF_SWITCHES,
CONF_UNIQUE_ID,
CONF_VALUE_TEMPLATE,
STATE_OFF,
STATE_ON,
)
from homeassistant.core import callback
from homeassistant.exceptions import TemplateError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import async_generate_entity_id
from homeassistant.helpers.reload import async_setup_reload_service
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.script import Script
from .const import CONF_AVAILABILITY_TEMPLATE, DOMAIN, PLATFORMS
from .template_entity import TemplateEntity
_LOGGER = logging.getLogger(__name__)
_VALID_STATES = [STATE_ON, STATE_OFF, "true", "false"]
ON_ACTION = "turn_on"
OFF_ACTION = "turn_off"
<|fim▁hole|> vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_ICON_TEMPLATE): cv.template,
vol.Optional(CONF_ENTITY_PICTURE_TEMPLATE): cv.template,
vol.Optional(CONF_AVAILABILITY_TEMPLATE): cv.template,
vol.Required(ON_ACTION): cv.SCRIPT_SCHEMA,
vol.Required(OFF_ACTION): cv.SCRIPT_SCHEMA,
vol.Optional(ATTR_FRIENDLY_NAME): cv.string,
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Optional(CONF_UNIQUE_ID): cv.string,
}
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_SWITCHES): cv.schema_with_slug_keys(SWITCH_SCHEMA)}
)
async def _async_create_entities(hass, config):
"""Create the Template switches."""
switches = []
for device, device_config in config[CONF_SWITCHES].items():
friendly_name = device_config.get(ATTR_FRIENDLY_NAME, device)
state_template = device_config.get(CONF_VALUE_TEMPLATE)
icon_template = device_config.get(CONF_ICON_TEMPLATE)
entity_picture_template = device_config.get(CONF_ENTITY_PICTURE_TEMPLATE)
availability_template = device_config.get(CONF_AVAILABILITY_TEMPLATE)
on_action = device_config[ON_ACTION]
off_action = device_config[OFF_ACTION]
unique_id = device_config.get(CONF_UNIQUE_ID)
switches.append(
SwitchTemplate(
hass,
device,
friendly_name,
state_template,
icon_template,
entity_picture_template,
availability_template,
on_action,
off_action,
unique_id,
)
)
return switches
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the template switches."""
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
async_add_entities(await _async_create_entities(hass, config))
class SwitchTemplate(TemplateEntity, SwitchEntity, RestoreEntity):
"""Representation of a Template switch."""
def __init__(
self,
hass,
device_id,
friendly_name,
state_template,
icon_template,
entity_picture_template,
availability_template,
on_action,
off_action,
unique_id,
):
"""Initialize the Template switch."""
super().__init__(
availability_template=availability_template,
icon_template=icon_template,
entity_picture_template=entity_picture_template,
)
self.entity_id = async_generate_entity_id(
ENTITY_ID_FORMAT, device_id, hass=hass
)
self._name = friendly_name
self._template = state_template
domain = __name__.split(".")[-2]
self._on_script = Script(hass, on_action, friendly_name, domain)
self._off_script = Script(hass, off_action, friendly_name, domain)
self._state = False
self._unique_id = unique_id
@callback
def _update_state(self, result):
super()._update_state(result)
if isinstance(result, TemplateError):
self._state = None
return
self._state = result.lower() in ("true", STATE_ON)
async def async_added_to_hass(self):
"""Register callbacks."""
if self._template is None:
# restore state after startup
await super().async_added_to_hass()
state = await self.async_get_last_state()
if state:
self._state = state.state == STATE_ON
# no need to listen for events
else:
self.add_template_attribute(
"_state", self._template, None, self._update_state
)
await super().async_added_to_hass()
@property
def name(self):
"""Return the name of the switch."""
return self._name
@property
def unique_id(self):
"""Return the unique id of this switch."""
return self._unique_id
@property
def is_on(self):
"""Return true if device is on."""
return self._state
@property
def should_poll(self):
"""Return the polling state."""
return False
async def async_turn_on(self, **kwargs):
"""Fire the on action."""
await self._on_script.async_run(context=self._context)
if self._template is None:
self._state = True
self.async_write_ha_state()
async def async_turn_off(self, **kwargs):
"""Fire the off action."""
await self._off_script.async_run(context=self._context)
if self._template is None:
self._state = False
self.async_write_ha_state()
@property
def assumed_state(self):
"""State is assumed, if no template given."""
return self._template is None<|fim▁end|> | SWITCH_SCHEMA = vol.Schema(
{ |
<|file_name|>pad-test.js<|end_file_name|><|fim▁begin|>var assert = require('chai').assert;
var Pad = require('../lib/pad');
describe('Pad', function() {
it('should be an object', function() {
var pad = new Pad();
assert.isObject(pad);
});
it('should have a x coordinate of 310 by default', function() {
var terminator = new Pad();
assert.equal(terminator.x, 310);
});
it('should have a y coordinate of 470 by default', function() {
var jon = new Pad();
assert.equal(jon.y, 470);
});<|fim▁hole|>
it('should have a r value of 23 by default', function() {
var terminator = new Pad();
assert.equal(terminator.r, 23);
});
it('should have a sAngle value of 0 by default', function() {
var jon = new Pad();
assert.equal(jon.sAngle, 0);
});
it('should have an eAngle value of 2*Math.PI by default', function() {
var jon = new Pad();
assert.equal(jon.eAngle, 2*Math.PI);
});
it('should have a draw function', function(){
var jon = new Pad();
assert.isFunction(jon.draw);
});
});<|fim▁end|> | |
<|file_name|>TimelineBodyPanel.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2010-2021 JPEXS
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.jpexs.decompiler.flash.gui.timeline;
import com.jpexs.decompiler.flash.configuration.Configuration;
import com.jpexs.decompiler.flash.tags.base.CharacterTag;
import com.jpexs.decompiler.flash.tags.base.MorphShapeTag;
import com.jpexs.decompiler.flash.timeline.DepthState;
import com.jpexs.decompiler.flash.timeline.Timeline;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.RenderingHints;
import java.awt.SystemColor;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.swing.JPanel;
import org.pushingpixels.substance.api.ColorSchemeAssociationKind;
import org.pushingpixels.substance.api.ComponentState;
import org.pushingpixels.substance.api.DecorationAreaType;
import org.pushingpixels.substance.api.SubstanceLookAndFeel;
import org.pushingpixels.substance.internal.utils.SubstanceColorUtilities;
/**
*
* @author JPEXS
*/
public class TimelineBodyPanel extends JPanel implements MouseListener, KeyListener {
private final Timeline timeline;
public static final Color shapeTweenColor = new Color(0x59, 0xfe, 0x7c);
public static final Color motionTweenColor = new Color(0xd1, 0xac, 0xf1);
//public static final Color frameColor = new Color(0xbd, 0xd8, 0xfc);
public static final Color borderColor = Color.black;
public static final Color emptyBorderColor = new Color(0xbd, 0xd8, 0xfc);
public static final Color keyColor = Color.black;
public static final Color aColor = Color.black;
public static final Color stopColor = Color.white;
public static final Color stopBorderColor = Color.black;
public static final Color borderLinesColor = new Color(0xde, 0xde, 0xde);
//public static final Color selectedColor = new Color(113, 174, 235);
public static final int borderLinesLength = 2;
public static final float fontSize = 10.0f;
private final List<FrameSelectionListener> listeners = new ArrayList<>();
public Point cursor = null;
private enum BlockType {
EMPTY, NORMAL, MOTION_TWEEN, SHAPE_TWEEN
}
public static Color getEmptyFrameColor() {
return SubstanceColorUtilities.getLighterColor(getControlColor(), 0.7);
}
public static Color getEmptyFrameSecondColor() {
return SubstanceColorUtilities.getLighterColor(getControlColor(), 0.9);
}
public static Color getSelectedColor() {
if (Configuration.useRibbonInterface.get()) {
return SubstanceLookAndFeel.getCurrentSkin().getColorScheme(DecorationAreaType.GENERAL, ColorSchemeAssociationKind.FILL, ComponentState.ROLLOVER_SELECTED).getBackgroundFillColor();
} else {
return SystemColor.textHighlight;
}
}
private static Color getControlColor() {
if (Configuration.useRibbonInterface.get()) {
return SubstanceLookAndFeel.getCurrentSkin().getColorScheme(DecorationAreaType.GENERAL, ColorSchemeAssociationKind.FILL, ComponentState.ENABLED).getBackgroundFillColor();
} else {
return SystemColor.control;
}
}
public static Color getFrameColor() {
return SubstanceColorUtilities.getDarkerColor(getControlColor(), 0.1);
}
public void addFrameSelectionListener(FrameSelectionListener l) {
listeners.add(l);
}
public void removeFrameSelectionListener(FrameSelectionListener l) {
listeners.remove(l);
}
public TimelineBodyPanel(Timeline timeline) {
this.timeline = timeline;
Dimension dim = new Dimension(TimelinePanel.FRAME_WIDTH * timeline.getFrameCount() + 1, TimelinePanel.FRAME_HEIGHT * timeline.getMaxDepth());
setSize(dim);
setPreferredSize(dim);
addMouseListener(this);
addKeyListener(this);
setFocusable(true);
}
@Override
protected void paintComponent(Graphics g1) {
Graphics2D g = (Graphics2D) g1;
g.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BILINEAR);
g.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY);
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
g.setColor(TimelinePanel.getBackgroundColor());
g.fillRect(0, 0, getWidth(), getHeight());
Rectangle clip = g.getClipBounds();
int frameWidth = TimelinePanel.FRAME_WIDTH;
int frameHeight = TimelinePanel.FRAME_HEIGHT;
int start_f = clip.x / frameWidth;
int start_d = clip.y / frameHeight;
int end_f = (clip.x + clip.width) / frameWidth;
int end_d = (clip.y + clip.height) / frameHeight;
int max_d = timeline.getMaxDepth();
if (max_d < end_d) {
end_d = max_d;
}
int max_f = timeline.getFrameCount() - 1;
if (max_f < end_f) {
end_f = max_f;
}
if (end_d - start_d + 1 < 0) {
return;
}
// draw background
for (int f = start_f; f <= end_f; f++) {
g.setColor((f + 1) % 5 == 0 ? getEmptyFrameSecondColor() : getEmptyFrameColor());<|fim▁hole|> g.drawRect(f * frameWidth, d * frameHeight, frameWidth, frameHeight);
}
}
// draw selected cell
if (cursor != null) {
g.setColor(getSelectedColor());
g.fillRect(cursor.x * frameWidth + 1, cursor.y * frameHeight + 1, frameWidth - 1, frameHeight - 1);
}
g.setColor(aColor);
g.setFont(getFont().deriveFont(fontSize));
int awidth = g.getFontMetrics().stringWidth("a");
for (int f = start_f; f <= end_f; f++) {
if (!timeline.getFrame(f).actions.isEmpty()) {
g.drawString("a", f * frameWidth + frameWidth / 2 - awidth / 2, frameHeight / 2 + fontSize / 2);
}
}
Map<Integer, Integer> depthMaxFrames = timeline.getDepthMaxFrame();
for (int d = start_d; d <= end_d; d++) {
int maxFrame = depthMaxFrames.containsKey(d) ? depthMaxFrames.get(d) : -1;
if (maxFrame < 0) {
continue;
}
int end_f2 = Math.min(end_f, maxFrame);
int start_f2 = Math.min(start_f, end_f2);
// find the start frame number of the current block
DepthState dsStart = timeline.getFrame(start_f2).layers.get(d);
for (; start_f2 >= 1; start_f2--) {
DepthState ds = timeline.getFrame(start_f2 - 1).layers.get(d);
if (((dsStart == null) != (ds == null))
|| (ds != null && dsStart.characterId != ds.characterId)) {
break;
}
}
for (int f = start_f2; f <= end_f2; f++) {
DepthState fl = timeline.getFrame(f).layers.get(d);
boolean motionTween = fl == null ? false : fl.motionTween;
DepthState flNext = f < max_f ? timeline.getFrame(f + 1).layers.get(d) : null;
DepthState flPrev = f > 0 ? timeline.getFrame(f - 1).layers.get(d) : null;
CharacterTag cht = fl == null ? null : timeline.swf.getCharacter(fl.characterId);
boolean shapeTween = cht != null && (cht instanceof MorphShapeTag);
boolean motionTweenStart = !motionTween && (flNext != null && flNext.motionTween);
boolean motionTweenEnd = !motionTween && (flPrev != null && flPrev.motionTween);
//boolean shapeTweenStart = shapeTween && (flPrev == null || flPrev.characterId != fl.characterId);
//boolean shapeTweenEnd = shapeTween && (flNext == null || flNext.characterId != fl.characterId);
/*if (motionTweenStart || motionTweenEnd) {
motionTween = true;
}*/
int draw_f = f;
int num_frames = 1;
Color backColor;
BlockType blockType;
if (fl == null) {
for (; f + 1 < timeline.getFrameCount(); f++) {
fl = timeline.getFrame(f + 1).layers.get(d);
if (fl != null && fl.characterId != -1) {
break;
}
num_frames++;
}
backColor = getEmptyFrameColor();
blockType = BlockType.EMPTY;
} else {
for (; f + 1 < timeline.getFrameCount(); f++) {
fl = timeline.getFrame(f + 1).layers.get(d);
if (fl == null || fl.key) {
break;
}
num_frames++;
}
backColor = shapeTween ? shapeTweenColor : motionTween ? motionTweenColor : getFrameColor();
blockType = shapeTween ? BlockType.SHAPE_TWEEN : motionTween ? BlockType.MOTION_TWEEN : BlockType.NORMAL;
}
drawBlock(g, backColor, d, draw_f, num_frames, blockType);
}
}
if (cursor != null && cursor.x >= start_f && cursor.x <= end_f) {
g.setColor(TimelinePanel.selectedBorderColor);
g.drawLine(cursor.x * frameWidth + frameWidth / 2, 0, cursor.x * frameWidth + frameWidth / 2, getHeight());
}
}
private void drawBlock(Graphics2D g, Color backColor, int depth, int frame, int num_frames, BlockType blockType) {
int frameWidth = TimelinePanel.FRAME_WIDTH;
int frameHeight = TimelinePanel.FRAME_HEIGHT;
g.setColor(backColor);
g.fillRect(frame * frameWidth, depth * frameHeight, num_frames * frameWidth, frameHeight);
g.setColor(borderColor);
g.drawRect(frame * frameWidth, depth * frameHeight, num_frames * frameWidth, frameHeight);
boolean selected = false;
if (cursor != null && frame <= cursor.x && (frame + num_frames) > cursor.x && depth == cursor.y) {
selected = true;
}
if (selected) {
g.setColor(getSelectedColor());
g.fillRect(cursor.x * frameWidth + 1, depth * frameHeight + 1, frameWidth - 1, frameHeight - 1);
}
boolean isTween = blockType == BlockType.MOTION_TWEEN || blockType == BlockType.SHAPE_TWEEN;
g.setColor(keyColor);
if (isTween) {
g.drawLine(frame * frameWidth, depth * frameHeight + frameHeight * 3 / 4,
frame * frameWidth + num_frames * frameWidth - frameWidth / 2, depth * frameHeight + frameHeight * 3 / 4
);
}
if (blockType == BlockType.EMPTY) {
g.drawOval(frame * frameWidth + frameWidth / 4, depth * frameHeight + frameHeight * 3 / 4 - frameWidth / 2 / 2, frameWidth / 2, frameWidth / 2);
} else {
g.fillOval(frame * frameWidth + frameWidth / 4, depth * frameHeight + frameHeight * 3 / 4 - frameWidth / 2 / 2, frameWidth / 2, frameWidth / 2);
}
if (num_frames > 1) {
int endFrame = frame + num_frames - 1;
if (isTween) {
g.fillOval(endFrame * frameWidth + frameWidth / 4, depth * frameHeight + frameHeight * 3 / 4 - frameWidth / 2 / 2, frameWidth / 2, frameWidth / 2);
} else {
g.setColor(stopColor);
g.fillRect(endFrame * frameWidth + frameWidth / 4, depth * frameHeight + frameHeight / 2 - 2, frameWidth / 2, frameHeight / 2);
g.setColor(stopBorderColor);
g.drawRect(endFrame * frameWidth + frameWidth / 4, depth * frameHeight + frameHeight / 2 - 2, frameWidth / 2, frameHeight / 2);
}
g.setColor(borderLinesColor);
for (int n = frame + 1; n < frame + num_frames; n++) {
g.drawLine(n * frameWidth, depth * frameHeight + 1, n * frameWidth, depth * frameHeight + borderLinesLength);
g.drawLine(n * frameWidth, depth * frameHeight + frameHeight - 1, n * frameWidth, depth * frameHeight + frameHeight - borderLinesLength);
}
}
}
@Override
public void mouseClicked(MouseEvent e) {
}
public void frameSelect(int frame, int depth) {
if (cursor != null && cursor.x == frame && (cursor.y == depth || depth == -1)) {
return;
}
if (depth == -1 && cursor != null) {
depth = cursor.y;
}
cursor = new Point(frame, depth);
for (FrameSelectionListener l : listeners) {
l.frameSelected(frame, depth);
}
repaint();
}
@Override
public void mousePressed(MouseEvent e) {
Point p = e.getPoint();
p.x = p.x / TimelinePanel.FRAME_WIDTH;
p.y = p.y / TimelinePanel.FRAME_HEIGHT;
if (p.x >= timeline.getFrameCount()) {
p.x = timeline.getFrameCount() - 1;
}
int maxDepth = timeline.getMaxDepth();
if (p.y > maxDepth) {
p.y = maxDepth;
}
frameSelect(p.x, p.y);
}
@Override
public void mouseReleased(MouseEvent e) {
}
@Override
public void mouseEntered(MouseEvent e) {
}
@Override
public void mouseExited(MouseEvent e) {
}
@Override
public void keyTyped(KeyEvent e) {
}
@Override
public void keyPressed(KeyEvent e) {
switch (e.getKeyCode()) {
case 37: //left
if (cursor.x > 0) {
frameSelect(cursor.x - 1, cursor.y);
}
break;
case 39: //right
if (cursor.x < timeline.getFrameCount() - 1) {
frameSelect(cursor.x + 1, cursor.y);
}
break;
case 38: //up
if (cursor.y > 0) {
frameSelect(cursor.x, cursor.y - 1);
}
break;
case 40: //down
if (cursor.y < timeline.getMaxDepth()) {
frameSelect(cursor.x, cursor.y + 1);
}
break;
}
}
@Override
public void keyReleased(KeyEvent e) {
}
}<|fim▁end|> | g.fillRect(f * frameWidth, start_d * frameHeight, frameWidth, (end_d - start_d + 1) * frameHeight);
g.setColor(emptyBorderColor);
for (int d = start_d; d <= end_d; d++) { |
<|file_name|>Event.js<|end_file_name|><|fim▁begin|>import React, { PropTypes } from 'react';
import EventImage from '../components/EventImage';
const Event = ({ eventUrl, images, ingress, startDate, title }) => (
<div>
<div className="col-sm-8 col-md-4">
<div className="hero-title">
<a href={eventUrl}>
<p>{ title }</p>
</a>
</div>
<div className="hero-ingress hidden-xs">
<p>{ ingress }</p>
</div>
</div>
<EventImage
date={startDate}
images={images}
eventUrl={eventUrl}
/>
</div>
);
Event.propTypes = {
eventUrl: PropTypes.string.isRequired,
images: EventImage.propTypes.images,<|fim▁hole|> title: PropTypes.string.isRequired,
};
export default Event;<|fim▁end|> | ingress: PropTypes.string.isRequired,
startDate: PropTypes.string.isRequired, |
<|file_name|>compass.js<|end_file_name|><|fim▁begin|>/**
* Compile sass files to css using compass
*/
module.exports = {
dev: {<|fim▁hole|> config: 'config.rb',
environment: 'development'
}
},
};<|fim▁end|> | options: { |
<|file_name|>wallet_api_doc.go<|end_file_name|><|fim▁begin|>package client
const walletAPIDoc = `"keybase wallet api" provides a JSON API to the Keybase wallet.
EXAMPLES:
List the balances in all your accounts:
{"method": "balances"}
See payment history in an account:
{"method": "history", "params": {"options": {"account-id": "GDUKZH6Q3U5WQD4PDGZXYLJE3P76BDRDWPSALN4OUFEESI2QL5UZHCK4"}}}
Get details about a single transaction:<|fim▁hole|>Lookup the primary Stellar account ID for a user:
{"method": "lookup", "params": {"options": {"name": "patrick"}}}
Get the inflation destination for an account:
{"method": "get-inflation", "params": {"options": {"account-id": "GDUKZH6Q3U5WQD4PDGZXYLJE3P76BDRDWPSALN4OUFEESI2QL5UZHCK4"}}}
Set the inflation destination for an account to the Lumenaut pool:
{"method": "set-inflation", "params": {"options": {"account-id": "GDUKZH6Q3U5WQD4PDGZXYLJE3P76BDRDWPSALN4OUFEESI2QL5UZHCK4", "destination": "lumenaut"}}}
Set the inflation destination for an account to some other account:
{"method": "set-inflation", "params": {"options": {"account-id": "GDUKZH6Q3U5WQD4PDGZXYLJE3P76BDRDWPSALN4OUFEESI2QL5UZHCK4", "destination": "GD5CR6MG5R3BADYP2RUVAGC5PKCZGS4CFSAK3FYKD7WEUTRW25UH6C2J"}}}
Set the inflation destination for an account to itself:
{"method": "set-inflation", "params": {"options": {"account-id": "GDUKZH6Q3U5WQD4PDGZXYLJE3P76BDRDWPSALN4OUFEESI2QL5UZHCK4", "destination": "self"}}}
Send XLM to a Keybase user (there is no confirmation so be careful):
{"method": "send", "params": {"options": {"recipient": "patrick", "amount": "1"}}}
Send $10 USD worth of XLM to a Keybase user:
{"method": "send", "params": {"options": {"recipient": "patrick", "amount": "10", "currency": "USD", "message": "here's the money I owe you"}}}
Find a payment path to a Keybase user between two assets:
{"method": "find-payment-path", "params": {"options": {"recipient": "patrick", "amount": "10", "source-asset": "native", "destination-asset": "USD/GDUKMGUGDZQK6YHYA5Z6AY2G4XDSZPSZ3SW5UN3ARVMO6QSRDWP5YLEX"}}}
Send 10 AnchorUSD to a Keybase user as a path payment by converting at most 120 XLM (there is no confirmation so be careful):
{"method": "send-path-payment", "params": {"options": {"recipient": "patrick", "amount": "10", "source-max-amount": "120", "source-asset": "native", "destination-asset": "USD/GDUKMGUGDZQK6YHYA5Z6AY2G4XDSZPSZ3SW5UN3ARVMO6QSRDWP5YLEX"}}}
If you send XLM to a Keybase user who has not established a wallet yet, you can
cancel the payment before the recipient claims it and the XLM will be returned
to your account:
{"method": "cancel", "params": {"options": {"txid": "e5334601b9dc2a24e031ffeec2fce37bb6a8b4b51fc711d16dec04d3e64976c4"}}}
Initialize the wallet for an account:
{"method": "setup-wallet"}
`<|fim▁end|> | {"method": "details", "params": {"options": {"txid": "e5334601b9dc2a24e031ffeec2fce37bb6a8b4b51fc711d16dec04d3e64976c4"}}}
|
<|file_name|>HorizontalListWidget.cpp<|end_file_name|><|fim▁begin|>/***********************************************************************************
** MIT License **
** **
** Copyright (c) 2018 Victor DENIS (victordenis01@gmail.com) **
** **
** Permission is hereby granted, free of charge, to any person obtaining a copy **
** of this software and associated documentation files (the "Software"), to deal **
** in the Software without restriction, including without limitation the rights **
** to use, copy, modify, merge, publish, distribute, sublicense, and/or sell **
** copies of the Software, and to permit persons to whom the Software is **
** furnished to do so, subject to the following conditions: **
** **
** The above copyright notice and this permission notice shall be included in all **
** copies or substantial portions of the Software. **
** **
** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR **
** IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, **
** FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE **
** AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER **
** LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, **
** OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE **
** SOFTWARE. **
***********************************************************************************/
#include "HorizontalListWidget.hpp"
namespace Sn {
HorizontalListWidget::HorizontalListWidget(QWidget* parent) :
QListWidget(parent),
m_mouseDown(false)
{
setFocusPolicy(Qt::NoFocus);
setVerticalScrollBarPolicy(Qt::ScrollBarAlwaysOff);
setMovement(QListView::Static);
setResizeMode(QListView::Adjust);
setViewMode(QListView::IconMode);
setSelectionRectVisible(false);
}
void HorizontalListWidget::mousePressEvent(QMouseEvent* event)<|fim▁hole|>{
m_mouseDown = true;
QListWidget::mousePressEvent(event);
}
void HorizontalListWidget::mouseMoveEvent(QMouseEvent* event)
{
if (!itemAt(event->pos()))
return;
QListWidget::mouseMoveEvent(event);
}
void HorizontalListWidget::mouseReleaseEvent(QMouseEvent* event)
{
m_mouseDown = false;
QListWidget::mouseReleaseEvent(event);
}
void HorizontalListWidget::wheelEvent(QWheelEvent* event)
{
Q_UNUSED(event);
}
}<|fim▁end|> | |
<|file_name|>messenger.py<|end_file_name|><|fim▁begin|>"""
Implements a simple, robust, safe, Messenger class that allows one to
register callbacks for a signal/slot (or event/handler) kind of
messaging system. One can basically register a callback
function/method to be called when an object sends a particular event.
The Messenger class is Borg. So it is easy to instantiate and use.
This module is also reload-safe, so if the module is reloaded the
callback information is not lost. Method callbacks do not have a
reference counting problem since weak references are used.
The main functionality of this module is provided by three functions,
`connect`, `disconnect` and `send`.
Here is example usage with VTK::
>>> import messenger, vtk
>>> def cb(obj, evt):
... print obj.__class__.__name__, evt
...
>>> o = vtk.vtkProperty()
>>> o.AddObserver('ModifiedEvent', messenger.send)
1
>>> messenger.connect(o, 'ModifiedEvent', cb)
>>>
>>> o.SetRepresentation(1)
vtkOpenGLProperty ModifiedEvent
>>> messenger.connect(o, 'AnyEvent', cb)
>>> o.SetRepresentation(2)
vtkOpenGLProperty ModifiedEvent
vtkOpenGLProperty ModifiedEvent
>>>
>>> messenger.send(o, 'foo')
vtkOpenGLProperty foo
>>> messenger.disconnect(o, 'AnyEvent')
>>> messenger.send(o, 'foo')
>>>
This approach is necessary if you don't want to be bitten by reference
cycles. If you have a Python object holding a reference to a VTK
object and pass a method of the object to the AddObserver call, you
will get a reference cycle that cannot be collected by the garbage
collector. Using this messenger module gets around the problem.
Also note that adding a connection for 'AnyEvent' will trigger a
callback no matter what event was generated. The code above also
shows how disconnection works.
"""
# Author: Prabhu Ramachandran
# Copyright (c) 2004-2007, Enthought, Inc.
# License: BSD Style.
__all__ = ['Messenger', 'MessengerError',
'connect', 'disconnect', 'send']
import types
import sys
import weakref
#################################################################
# This code makes the module reload-safe.
#################################################################
_saved = {}
for name in ['messenger', 'tvtk.messenger']:
if sys.modules.has_key(name):
mod = sys.modules[name]
if hasattr(mod, 'Messenger'):
_saved = mod.Messenger._shared_data
del mod
break
#################################################################
# `MessengerError` class for exceptions raised by Messenger.
#################################################################
class MessengerError(Exception):
pass
#################################################################
# `Messenger` class.
#################################################################
class Messenger:
"""Implements a messenger class which deals with something like
signals and slots. Basically, an object can register a signal
that it plans to emit. Any other object can decide to handle that
signal (of that particular object) by registering itself with the
messenger. When a signal is emitted the messenger calls all
handlers. This makes it totally easy to deal with communication
between objects. The class is Borg. Rather than use this class,
please use the 'connect' and 'disconnect' functions.
"""
_shared_data = _saved
def __init__(self):
"""Create the messenger. This class is Borg. So all
instances are the same.
"""
self.__dict__ = self._shared_data
if not hasattr(self, '_signals'):
# First instantiation.
self._signals = {}
self._catch_all = ['AnyEvent', 'all']
#################################################################
# 'Messenger' interface.
#################################################################
def connect(self, obj, event, callback):
""" Registers a slot given an object and its signal to slot
into and also given a bound method in `callback` that should
have two arguments. `send` will call the callback
with the object that emitted the signal and the actual
event/signal as arguments.
Parameters
----------
- obj : Python object
Any Python object that will generate the particular event.
- event : An event (can be anything, usually strings)
The event `obj` will generate. If this is in the list
`self._catch_all`, then any event will call this callback.
- callback : `function` or `method`
This callback will be called when the object generates the<|fim▁hole|> and keyword arguments given by the `obj` are passed along to
the callback.
"""
typ = type(callback)
key = hash(obj)
if not self._signals.has_key(key):
self._signals[key] = {}
signals = self._signals[key]
if not signals.has_key(event):
signals[event] = {}
slots = signals[event]
callback_key = hash(callback)
if typ is types.FunctionType:
slots[callback_key] = (None, callback)
elif typ is types.MethodType:
obj = weakref.ref(callback.im_self)
name = callback.__name__
slots[callback_key] = (obj, name)
else:
raise MessengerError, \
"Callback must be a function or method. "\
"You passed a %s."%(str(callback))
def disconnect(self, obj, event=None, callback=None, obj_is_hash=False):
"""Disconnects the object and its event handlers.
Parameters
----------
- obj : Object
The object that generates events.
- event : The event. (defaults to None)
- callback : `function` or `method`
The event handler.
If `event` and `callback` are None (the default) all the
events and handlers for the object are removed. If only
`callback` is None, only this handler is removed. If `obj`
and 'event' alone are specified, all handlers for the event
are removed.
- obj_is_hash : `bool`
Specifies if the object passed is a hash instead of the object itself.
This is needed if the object is gc'd but only the hash exists and one
wants to disconnect the object.
"""
signals = self._signals
if obj_is_hash:
key = obj
else:
key = hash(obj)
if not signals.has_key(key):
return
if callback is None:
if event is None:
del signals[key]
else:
del signals[key][event]
else:
del signals[key][event][hash(callback)]
def send(self, source, event, *args, **kw_args):
"""To be called by the object `source` that desires to
generate a particular event. This function in turn invokes
all the handlers for the event passing the `source` object,
event and any additional arguments and keyword arguments. If
any connected callback is garbage collected without being
disconnected, it is silently removed from the existing slots.
Parameters
----------
- source : Python object
This is the object that generated the event.
- event : The event.
If there are handlers connected to events called 'AnyEvent'
or 'all', then any event will invoke these.
"""
try:
sigs = self._get_signals(source)
except (MessengerError, KeyError):
return
events = self._catch_all[:]
if event not in events:
events.append(event)
for evt in events:
if sigs.has_key(evt):
slots = sigs[evt]
for key in slots.keys():
obj, meth = slots[key]
if obj: # instance method
inst = obj()
if inst:
getattr(inst, meth)(source, event, *args, **kw_args)
else:
# Oops, dead reference.
del slots[key]
else: # normal function
meth(source, event, *args, **kw_args)
def is_registered(self, obj):
"""Returns if the given object has registered itself with the
messenger.
"""
try:
sigs = self._get_signals(obj)
except MessengerError:
return 0
else:
return 1
def get_signal_names(self, obj):
"""Returns a list of signal names the object passed has
registered.
"""
return self._get_signals(obj).keys()
#################################################################
# Non-public interface.
#################################################################
def _get_signals(self, obj):
"""Given an object `obj` it returns the signals of that
object.
"""
ret = self._signals.get(hash(obj))
if ret is None:
raise MessengerError, \
"No such object: %s, has registered itself "\
"with the messenger."%obj
else:
return ret
#################################################################
# Convenience functions.
#################################################################
_messenger = Messenger()
def connect(obj, event, callback):
_messenger.connect(obj, event, callback)
connect.__doc__ = _messenger.connect.__doc__
def disconnect(obj, event=None, callback=None, obj_is_hash=False):
_messenger.disconnect(obj, event, callback)
disconnect.__doc__ = _messenger.disconnect.__doc__
def send(obj, event, *args, **kw_args):
_messenger.send(obj, event, *args, **kw_args)
send.__doc__ = _messenger.send.__doc__
del _saved<|fim▁end|> | particular event. The object, event and any other arguments |
<|file_name|>mini_lambda.py<|end_file_name|><|fim▁begin|>import json
<|fim▁hole|> 'statusCode': 200,
'body': json.dumps('Hello from Lambda!')
}<|fim▁end|> | def lambda_handler(event, context):
return { |
<|file_name|>MySQLToolTableCheckSettings.java<|end_file_name|><|fim▁begin|>/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2021 DBeaver Corp and others
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software<|fim▁hole|> * See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ext.mysql.tasks;
import org.jkiss.dbeaver.ext.mysql.model.MySQLTableBase;
import org.jkiss.dbeaver.model.data.json.JSONUtils;
import org.jkiss.dbeaver.model.meta.IPropertyValueListProvider;
import org.jkiss.dbeaver.model.meta.Property;
import org.jkiss.dbeaver.model.runtime.DBRRunnableContext;
import org.jkiss.dbeaver.model.sql.task.SQLToolExecuteSettings;
import java.util.Map;
/**
* Table check settings
*/
public class MySQLToolTableCheckSettings extends SQLToolExecuteSettings<MySQLTableBase> {
private String option;
@Property(viewable = true, editable = true, updatable = true, listProvider = CheckOptionListProvider.class)
public String getOption() {
return option;
}
public void setOption(String option) {
this.option = option;
}
@Override
public void loadConfiguration(DBRRunnableContext runnableContext, Map<String, Object> config) {
super.loadConfiguration(runnableContext, config);
option = JSONUtils.getString(config, "option");
}
@Override
public void saveConfiguration(Map<String, Object> config) {
super.saveConfiguration(config);
config.put("option", option);
}
public static class CheckOptionListProvider implements IPropertyValueListProvider<MySQLToolTableCheckSettings> {
@Override
public boolean allowCustomValue() {
return false;
}
@Override
public Object[] getPossibleValues(MySQLToolTableCheckSettings object) {
return new String[] {
"",
"FOR UPGRADE",
"QUICK",
"FAST",
"MEDIUM",
"EXTENDED",
"CHANGED"
};
}
}
}<|fim▁end|> | * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
<|file_name|>bump_maven_version.py<|end_file_name|><|fim▁begin|>#! /usr/bin/python
# Script for increasing versions numbers across the code
import sys
import glob
import re
import argparse
def check_version_format(version):
"""Check format of version number"""
pattern = '^[0-9]+[\.][0-9]+[\.][0-9]+(\-.+)*$'
return re.match(pattern, version) is not None
BIO_FORMATS_ARTIFACT = (
r"(<groupId>%s</groupId>\n"
".*<artifactId>pom-bio-formats</artifactId>\n"
".*<version>).*(</version>)")
class Replacer(object):
def __init__(self, old_group="ome", new_group="ome"):
self.old_group = old_group
self.new_group = new_group
self.group_pattern = \
r"(<groupId>)%s(</groupId>)" % \
old_group
self.artifact_pattern = BIO_FORMATS_ARTIFACT % old_group
self.release_version_pattern = \
r"(<release.version>).*(</release.version>)"
self.stableversion_pattern = \
r"(STABLE_VERSION = \").*(\";)"
self.upgradecheck = \
"components/formats-bsd/src/loci/formats/UpgradeChecker.java"
def replace_file(self, input_path, pattern, version):
"""Substitute a pattern with version in a file"""
with open(input_path, "r") as infile:
regexp = re.compile(pattern)
new_content = regexp.sub(r"\g<1>%s\g<2>" % version, infile.read())
with open(input_path, "w") as output:
output.write(new_content)
output.close()
infile.close()
def bump_pom_versions(self, version):
"""Replace versions in pom.xml files"""
# Replace versions in components pom.xml
for pomfile in (glob.glob("*/*/pom.xml") + glob.glob("*/*/*/pom.xml")):
self.replace_file(pomfile, self.artifact_pattern, version)
self.replace_file(pomfile, self.group_pattern, self.new_group)
# Replace versions in top-level pom.xml
toplevelpomfile = "pom.xml"
self.replace_file(
toplevelpomfile, self.artifact_pattern, version)<|fim▁hole|>
def bump_stable_version(self, version):
"""Replace UpgradeChecker stable version"""
self.replace_file(
self.upgradecheck, self.stableversion_pattern, version)
if __name__ == "__main__":
# Input check
parser = argparse.ArgumentParser()
parser.add_argument("--old-group", type=str, default="ome")
parser.add_argument("--new-group", type=str, default="ome")
parser.add_argument("version", type=str)
ns = parser.parse_args()
if not check_version_format(ns.version):
print "Invalid version format"
sys.exit(1)
replacer = Replacer(old_group=ns.old_group, new_group=ns.new_group)
replacer.bump_pom_versions(ns.version)
if not ns.version.endswith('SNAPSHOT'):
replacer.bump_stable_version(ns.version)<|fim▁end|> | self.replace_file(
toplevelpomfile, self.release_version_pattern, version)
self.replace_file(
toplevelpomfile, self.group_pattern, self.new_group) |
<|file_name|>range_map.rs<|end_file_name|><|fim▁begin|>//! Implements a map from integer indices to data.
//! Rather than storing data for every index, internally, this maps entire ranges to the data.
//! To this end, the APIs all work on ranges, not on individual integers. Ranges are split as
//! necessary (e.g., when [0,5) is first associated with X, and then [1,2) is mutated).
//! Users must not depend on whether a range is coalesced or not, even though this is observable
//! via the iteration APIs.
use std::ops;
use rustc::ty::layout::Size;
#[derive(Clone, Debug)]
struct Elem<T> {
/// The range covered by this element; never empty.
range: ops::Range<u64>,
/// The data stored for this element.
data: T,
}
#[derive(Clone, Debug)]
pub struct RangeMap<T> {
v: Vec<Elem<T>>,
}
impl<T> RangeMap<T> {
/// Creates a new `RangeMap` for the given size, and with the given initial value used for
/// the entire range.
#[inline(always)]
pub fn new(size: Size, init: T) -> RangeMap<T> {
let size = size.bytes();
let mut map = RangeMap { v: Vec::new() };
if size > 0 {
map.v.push(Elem { range: 0..size, data: init });
}
map
}
/// Finds the index containing the given offset.
fn find_offset(&self, offset: u64) -> usize {
// We do a binary search.
let mut left = 0usize; // inclusive
let mut right = self.v.len(); // exclusive
loop {
debug_assert!(left < right, "find_offset: offset {} is out-of-bounds", offset);
let candidate = left.checked_add(right).unwrap() / 2;
let elem = &self.v[candidate];
if offset < elem.range.start {
// We are too far right (offset is further left).
debug_assert!(candidate < right); // we are making progress
right = candidate;
} else if offset >= elem.range.end {
// We are too far left (offset is further right).
debug_assert!(candidate >= left); // we are making progress
left = candidate + 1;
} else {
// This is it!
return candidate;
}
}
}
/// Provides read-only iteration over everything in the given range. This does
/// *not* split items if they overlap with the edges. Do not use this to mutate
/// through interior mutability.
pub fn iter<'a>(&'a self, offset: Size, len: Size) -> impl Iterator<Item = &'a T> + 'a {
let offset = offset.bytes();
let len = len.bytes();
// Compute a slice starting with the elements we care about.
let slice: &[Elem<T>] = if len == 0 {
// We just need any empty iterator. We don't even want to
// yield the element that surrounds this position.
&[]
} else {
let first_idx = self.find_offset(offset);
&self.v[first_idx..]
};
// The first offset that is not included any more.
let end = offset + len;
slice.iter().take_while(move |elem| elem.range.start < end).map(|elem| &elem.data)
}
pub fn iter_mut_all<'a>(&'a mut self) -> impl Iterator<Item = &'a mut T> + 'a {
self.v.iter_mut().map(|elem| &mut elem.data)
}
// Splits the element situated at the given `index`, such that the 2nd one starts at offset
// `split_offset`. Do nothing if the element already starts there.
// Returns whether a split was necessary.
fn split_index(&mut self, index: usize, split_offset: u64) -> bool
where
T: Clone,
{
let elem = &mut self.v[index];
if split_offset == elem.range.start || split_offset == elem.range.end {
// Nothing to do.
return false;
}
debug_assert!(
elem.range.contains(&split_offset),
"the `split_offset` is not in the element to be split"
);
// Now we really have to split. Reduce length of first element.
let second_range = split_offset..elem.range.end;
elem.range.end = split_offset;
// Copy the data, and insert second element.
let second = Elem { range: second_range, data: elem.data.clone() };
self.v.insert(index + 1, second);
return true;
}
/// Provides mutable iteration over everything in the given range. As a side-effect,
/// this will split entries in the map that are only partially hit by the given range,
/// to make sure that when they are mutated, the effect is constrained to the given range.
/// Moreover, this will opportunistically merge neighbouring equal blocks.
pub fn iter_mut<'a>(
&'a mut self,
offset: Size,
len: Size,
) -> impl Iterator<Item = &'a mut T> + 'a
where
T: Clone + PartialEq,
{
let offset = offset.bytes();
let len = len.bytes();
// Compute a slice containing exactly the elements we care about
let slice: &mut [Elem<T>] = if len == 0 {
// We just need any empty iterator. We don't even want to
// yield the element that surrounds this position, nor do
// any splitting.
&mut []
} else {
// Make sure we got a clear beginning
let mut first_idx = self.find_offset(offset);
if self.split_index(first_idx, offset) {
// The newly created 2nd element is ours
first_idx += 1;
}
// No more mutation.
let first_idx = first_idx;
// Find our end. Linear scan, but that's ok because the iteration
// is doing the same linear scan anyway -- no increase in complexity.
// We combine this scan with a scan for duplicates that we can merge, to reduce
// the number of elements.
// We stop searching after the first "block" of size 1, to avoid spending excessive
// amounts of time on the merging.
let mut equal_since_idx = first_idx;
// Once we see too many non-mergeable blocks, we stop.
// The initial value is chosen via... magic. Benchmarking and magic.
let mut successful_merge_count = 3usize;
// When the loop is done, this is the first excluded element.
let mut end_idx = first_idx;
loop {
// Compute if `end` is the last element we need to look at.
let done = self.v[end_idx].range.end >= offset + len;
// We definitely need to include `end`, so move the index.
end_idx += 1;
debug_assert!(
done || end_idx < self.v.len(),
"iter_mut: end-offset {} is out-of-bounds",
offset + len
);
// see if we want to merge everything in `equal_since..end` (exclusive at the end!)
if successful_merge_count > 0 {
if done || self.v[end_idx].data != self.v[equal_since_idx].data {
// Everything in `equal_since..end` was equal. Make them just one element covering
// the entire range.
let removed_elems = end_idx - equal_since_idx - 1; // number of elements that we would remove
if removed_elems > 0 {
// Adjust the range of the first element to cover all of them.
let equal_until = self.v[end_idx - 1].range.end; // end of range of last of the equal elements
self.v[equal_since_idx].range.end = equal_until;
// Delete the rest of them.
self.v.splice(equal_since_idx + 1..end_idx, std::iter::empty());
// Adjust `end_idx` because we made the list shorter.
end_idx -= removed_elems;
// Adjust the count for the cutoff.
successful_merge_count += removed_elems;
} else {
// Adjust the count for the cutoff.
successful_merge_count -= 1;
}
// Go on scanning for the next block starting here.
equal_since_idx = end_idx;
}
}
// Leave loop if this is the last element.
if done {
break;
}
}
// Move to last included instead of first excluded index.
let end_idx = end_idx - 1;
// We need to split the end as well. Even if this performs a
// split, we don't have to adjust our index as we only care about
// the first part of the split.
self.split_index(end_idx, offset + len);
// Now we yield the slice. `end` is inclusive.
&mut self.v[first_idx..=end_idx]
};
slice.iter_mut().map(|elem| &mut elem.data)
}
}
#[cfg(test)]
mod tests {
use super::*;
/// Query the map at every offset in the range and collect the results.
fn to_vec<T: Copy>(map: &RangeMap<T>, offset: u64, len: u64) -> Vec<T> {
(offset..offset + len)
.into_iter()
.map(|i| map.iter(Size::from_bytes(i), Size::from_bytes(1)).next().map(|&t| t).unwrap())
.collect()
}
#[test]
fn basic_insert() {
let mut map = RangeMap::<i32>::new(Size::from_bytes(20), -1);
// Insert.
for x in map.iter_mut(Size::from_bytes(10), Size::from_bytes(1)) {
*x = 42;
}
// Check.
assert_eq!(to_vec(&map, 10, 1), vec![42]);
assert_eq!(map.v.len(), 3);
// Insert with size 0.
for x in map.iter_mut(Size::from_bytes(10), Size::from_bytes(0)) {
*x = 19;
}
for x in map.iter_mut(Size::from_bytes(11), Size::from_bytes(0)) {
*x = 19;
}
assert_eq!(to_vec(&map, 10, 2), vec![42, -1]);
assert_eq!(map.v.len(), 3);
}
#[test]
fn gaps() {
let mut map = RangeMap::<i32>::new(Size::from_bytes(20), -1);
for x in map.iter_mut(Size::from_bytes(11), Size::from_bytes(1)) {
*x = 42;
}
for x in map.iter_mut(Size::from_bytes(15), Size::from_bytes(1)) {
*x = 43;
}
assert_eq!(map.v.len(), 5);
assert_eq!(to_vec(&map, 10, 10), vec![-1, 42, -1, -1, -1, 43, -1, -1, -1, -1]);
for x in map.iter_mut(Size::from_bytes(10), Size::from_bytes(10)) {
if *x < 42 {
*x = 23;
}
}
assert_eq!(map.v.len(), 6);
assert_eq!(to_vec(&map, 10, 10), vec![23, 42, 23, 23, 23, 43, 23, 23, 23, 23]);
assert_eq!(to_vec(&map, 13, 5), vec![23, 23, 43, 23, 23]);
for x in map.iter_mut(Size::from_bytes(15), Size::from_bytes(5)) {
*x = 19;
}
assert_eq!(map.v.len(), 6);
assert_eq!(to_vec(&map, 10, 10), vec![23, 42, 23, 23, 23, 19, 19, 19, 19, 19]);
// Should be seeing two blocks with 19.
assert_eq!(
map.iter(Size::from_bytes(15), Size::from_bytes(2)).map(|&t| t).collect::<Vec<_>>(),<|fim▁hole|> for _ in map.iter_mut(Size::from_bytes(15), Size::from_bytes(5)) {}
assert_eq!(map.v.len(), 5);
assert_eq!(to_vec(&map, 10, 10), vec![23, 42, 23, 23, 23, 19, 19, 19, 19, 19]);
}
}<|fim▁end|> | vec![19, 19]
);
// A NOP `iter_mut` should trigger merging. |
<|file_name|>amqp_publisher.cpp<|end_file_name|><|fim▁begin|>#include <amqp_tcp_socket.h>
#include <amqp.h>
#include <amqp_framing.h>
#include <iostream>
#include <thread>
#include <chrono>
#include <string.h>
#include <signal.h>
volatile bool g_running = false;
void handler(int)
{
g_running = true;
}
int main(int argc, char**argv)
{
if(argc != 4)
{
std::cerr<<"usage: " << argv[0] << " <host> <messages> <size>" << std::endl;
return -1;
}
std::string HOST(argv[1]);
unsigned long MESSAGES = stoul(std::string(argv[2]));
unsigned long SIZE = stoul(std::string(argv[3]));
auto pub_conn = amqp_new_connection();
auto pub_socket = amqp_tcp_socket_new(pub_conn);
int status = amqp_socket_open(pub_socket, HOST.c_str(), 5672);
if(status) {
std::cerr << "could not open " << HOST << ":5672" << std::endl;
return -1;
}
char* data = new char[SIZE];
memset(data,'a',SIZE);
amqp_bytes_t message_bytes;
message_bytes.len = SIZE;
message_bytes.bytes = data;
amqp_rpc_reply_t response = amqp_login(pub_conn,"/",0,131072,0,AMQP_SASL_METHOD_PLAIN,"guest","guest");
if(response.reply_type == AMQP_RESPONSE_SERVER_EXCEPTION)
{
std::cerr << "SERVER EXCEPTION" << std::endl;
return -1;
}
else if (response.reply_type == AMQP_RESPONSE_LIBRARY_EXCEPTION)
{
std::cerr << "CLIENT EXCEPTION" << std::endl;
}
amqp_channel_open(pub_conn,1);<|fim▁hole|> //amqp_confirm_select(pub_conn,1);
amqp_get_rpc_reply(pub_conn);
signal(SIGUSR1, handler);
while(!g_running)
{
std::this_thread::sleep_for(std::chrono::milliseconds(1));
}
auto start = std::chrono::high_resolution_clock::now();
for(int i = 0; i<MESSAGES; ++i)
{
amqp_basic_properties_t props;
props._flags = AMQP_BASIC_CONTENT_TYPE_FLAG | AMQP_BASIC_DELIVERY_MODE_FLAG;
props.content_type = amqp_cstring_bytes("text/plain");
props.delivery_mode = 2; /* persistent delivery mode */
amqp_basic_publish(pub_conn,1,amqp_cstring_bytes("test-exchange"),
amqp_cstring_bytes("test-queue"),0,0,&props,message_bytes);
}
auto end = std::chrono::high_resolution_clock::now();
std::chrono::duration<double,std::milli> elapsed = end-start;
std::cout << "\nPublish " << MESSAGES << " complete in " << elapsed.count() << "ms (" << (MESSAGES*1000/elapsed.count()) << "/s)" << std::endl;
amqp_channel_close(pub_conn,1,AMQP_REPLY_SUCCESS);
amqp_connection_close(pub_conn,AMQP_REPLY_SUCCESS);
}<|fim▁end|> | |
<|file_name|>UniversalBodyWrapper.java<|end_file_name|><|fim▁begin|>/*
* ################################################################
*
* ProActive Parallel Suite(TM): The Java(TM) library for
* Parallel, Distributed, Multi-Core Computing for
* Enterprise Grids & Clouds
*
* Copyright (C) 1997-2012 INRIA/University of
* Nice-Sophia Antipolis/ActiveEon
* Contact: proactive@ow2.org or contact@activeeon.com
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Affero General Public License
* as published by the Free Software Foundation; version 3 of
* the License.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
* USA
*
* If needed, contact us to obtain a release under GPL Version 2 or 3
* or a different license than the AGPL.
*
* Initial developer(s): The ProActive Team
* http://proactive.inria.fr/team_members.htm
* Contributor(s):
*
* ################################################################
* $$PROACTIVE_INITIAL_DEV$$
*/
package org.objectweb.proactive.extensions.mixedlocation;
import java.io.IOException;
import java.security.AccessControlException;
import java.security.PublicKey;
import org.objectweb.proactive.core.ProActiveException;
import org.objectweb.proactive.core.ProActiveRuntimeException;
import org.objectweb.proactive.core.UniqueID;
import org.objectweb.proactive.core.body.UniversalBody;
import org.objectweb.proactive.core.body.ft.internalmsg.FTMessage;
import org.objectweb.proactive.core.body.reply.Reply;
import org.objectweb.proactive.core.body.request.Request;
import org.objectweb.proactive.core.component.request.Shortcut;
import org.objectweb.proactive.core.gc.GCMessage;
import org.objectweb.proactive.core.gc.GCResponse;
import org.objectweb.proactive.core.security.PolicyServer;
import org.objectweb.proactive.core.security.ProActiveSecurityManager;
import org.objectweb.proactive.core.security.SecurityContext;
import org.objectweb.proactive.core.security.TypedCertificate;
import org.objectweb.proactive.core.security.crypto.KeyExchangeException;
import org.objectweb.proactive.core.security.crypto.SessionException;
import org.objectweb.proactive.core.security.exceptions.RenegotiateSessionException;
import org.objectweb.proactive.core.security.exceptions.SecurityNotAvailableException;
import org.objectweb.proactive.core.security.securityentity.Entities;
import org.objectweb.proactive.core.security.securityentity.Entity;
public class UniversalBodyWrapper implements UniversalBody, Runnable {
/**
*
*/
protected UniversalBody wrappedBody;
protected long time;
protected UniqueID id;
protected boolean stop;
protected long creationTime;
//protected Thread t ;
/**
* Create a time-limited wrapper around a UniversalBody
* @param body the wrapped UniversalBody
* @param time the life expectancy of this wrapper in milliseconds
*/
public UniversalBodyWrapper(UniversalBody body, long time) {
this.wrappedBody = body;
this.time = time;
this.creationTime = System.currentTimeMillis();
// t =new Thread(this);
this.id = this.wrappedBody.getID();
// t.start();
}
public int receiveRequest(Request request) throws IOException, RenegotiateSessionException {
// System.out.println("UniversalBodyWrapper.receiveRequest");
if (this.wrappedBody == null) {
throw new IOException();
}
//the forwarder should be dead
if (System.currentTimeMillis() > (this.creationTime + this.time)) {
// this.updateServer();
// this.wrappedBody = null;
// t.start();
// System.gc();
throw new IOException();
} else {
try {
return this.wrappedBody.receiveRequest(request);
} catch (IOException e) {
e.printStackTrace();
throw e;
}
}
// this.stop();
}
public int receiveReply(Reply r) throws IOException {
return this.wrappedBody.receiveReply(r);
}
public String getNodeURL() {
return this.wrappedBody.getNodeURL();
}
public UniqueID getID() {
return this.id;
}
public void updateLocation(UniqueID id, UniversalBody body) throws IOException {
this.wrappedBody.updateLocation(id, body);
}
public UniversalBody getRemoteAdapter() {
return this.wrappedBody.getRemoteAdapter();
}
public String getReifiedClassName() {
return this.wrappedBody.getReifiedClassName();
}
public void enableAC() throws java.io.IOException {
this.wrappedBody.enableAC();
}
public void disableAC() throws java.io.IOException {
this.wrappedBody.disableAC();
}
protected void updateServer() {
// System.out.println("UniversalBodyWrapper.updateServer");
// LocationServer server = LocationServerFactory.getLocationServer();
// try {
// server.updateLocation(id, this.wrappedBody);
// } catch (Exception e) {
// System.out.println("XXXX Error XXXX");
// // e.printStackTrace();
// }
}
//protected synchronized void stop() {
// this.stop=true;
// this.notifyAll();
//}
//
//protected synchronized void waitForStop(long time) {
// if (!this.stop) {
// try {
// wait(time);
// } catch (InterruptedException e) {
// e.printStackTrace();
// }
// }
//
//}
public void run() {
// System.out.println("UniversalBodyWrapper.run life expectancy " + time);
try {
// Thread.currentThread().sleep(time);
// this.waitForStop(time);
} catch (Exception e) {
e.printStackTrace();
}
// System.out.println("UniversalBodyWrapper.run end of life...");
this.updateServer();
this.wrappedBody = null;
// System.gc();
}
// SECURITY
public void terminateSession(long sessionID) throws java.io.IOException, SecurityNotAvailableException {
this.wrappedBody.terminateSession(sessionID);
}
public TypedCertificate getCertificate() throws java.io.IOException, SecurityNotAvailableException {
return this.wrappedBody.getCertificate();
}
public long startNewSession(long distantSessionID, SecurityContext policy,
TypedCertificate distantCertificate) throws IOException, SecurityNotAvailableException,
SessionException {
return this.wrappedBody.startNewSession(distantSessionID, policy, distantCertificate);
}
public PublicKey getPublicKey() throws java.io.IOException, SecurityNotAvailableException {
return this.wrappedBody.getPublicKey();
}
public byte[] randomValue(long sessionID, byte[] cl_rand) throws IOException,
SecurityNotAvailableException, RenegotiateSessionException {
return this.wrappedBody.randomValue(sessionID, cl_rand);
}
public byte[] publicKeyExchange(long sessionID, byte[] sig_code) throws IOException,
SecurityNotAvailableException, RenegotiateSessionException, KeyExchangeException {
return this.wrappedBody.publicKeyExchange(sessionID, sig_code);
}
public byte[][] secretKeyExchange(long sessionID, byte[] tmp, byte[] tmp1, byte[] tmp2, byte[] tmp3,
byte[] tmp4) throws IOException, SecurityNotAvailableException, RenegotiateSessionException {
return this.wrappedBody.secretKeyExchange(sessionID, tmp, tmp1, tmp2, tmp3, tmp4);
}
<|fim▁hole|> // * @see org.objectweb.proactive.core.body.UniversalBody#getCertificateEncoded()
// */
// public byte[] getCertificateEncoded()
// throws IOException, SecurityNotAvailableException {
// return this.wrappedBody.getCertificateEncoded();
// }
/* (non-Javadoc)
* @see org.objectweb.proactive.core.body.UniversalBody#getPolicy(org.objectweb.proactive.ext.security.SecurityContext)
*/
public SecurityContext getPolicy(Entities local, Entities distant) throws SecurityNotAvailableException,
IOException {
return this.wrappedBody.getPolicy(local, distant);
}
public Entities getEntities() throws SecurityNotAvailableException, IOException {
return this.wrappedBody.getEntities();
}
/**
* @see org.objectweb.proactive.core.body.UniversalBody#receiveFTMessage(org.objectweb.proactive.core.body.ft.internalmsg.FTMessage)
*/
public Object receiveFTMessage(FTMessage ev) throws IOException {
return this.wrappedBody.receiveFTMessage(ev);
}
public GCResponse receiveGCMessage(GCMessage msg) throws IOException {
return this.wrappedBody.receiveGCMessage(msg);
}
public void setRegistered(boolean registered) throws IOException {
this.wrappedBody.setRegistered(registered);
}
public void createShortcut(Shortcut shortcut) throws IOException {
// TODO implement
throw new ProActiveRuntimeException("create shortcut method not implemented yet");
}
public String registerByName(String name, boolean rebind) throws IOException, ProActiveException {
return this.wrappedBody.registerByName(name, rebind);
}
public String registerByName(String name, boolean rebind, String protocol) throws IOException,
ProActiveException {
return this.wrappedBody.registerByName(name, rebind, protocol);
}
public ProActiveSecurityManager getProActiveSecurityManager(Entity user)
throws SecurityNotAvailableException, AccessControlException, IOException {
return this.wrappedBody.getProActiveSecurityManager(user);
}
public void setProActiveSecurityManager(Entity user, PolicyServer policyServer)
throws SecurityNotAvailableException, AccessControlException, IOException {
this.wrappedBody.setProActiveSecurityManager(user, policyServer);
}
public String getUrl() {
return this.wrappedBody.getUrl();
}
}<|fim▁end|> | // /* (non-Javadoc) |
<|file_name|>api.go<|end_file_name|><|fim▁begin|>// Copyright 2016 The go-ethereum Authors
// This file is part of the go-ethereum library.<|fim▁hole|>// (at your option) any later version.
//
// The go-ethereum library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
package api
import (
"fmt"
"io"
"net/http"
"regexp"
"strings"
"sync"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/log"
"github.com/ethereum/go-ethereum/swarm/storage"
)
var (
hashMatcher = regexp.MustCompile("^[0-9A-Fa-f]{64}")
slashes = regexp.MustCompile("/+")
domainAndVersion = regexp.MustCompile("[@:;,]+")
)
type Resolver interface {
Resolve(string) (common.Hash, error)
}
/*
Api implements webserver/file system related content storage and retrieval
on top of the dpa
it is the public interface of the dpa which is included in the ethereum stack
*/
type Api struct {
dpa *storage.DPA
dns Resolver
}
//the api constructor initialises
func NewApi(dpa *storage.DPA, dns Resolver) (self *Api) {
self = &Api{
dpa: dpa,
dns: dns,
}
return
}
// DPA reader API
func (self *Api) Retrieve(key storage.Key) storage.LazySectionReader {
return self.dpa.Retrieve(key)
}
func (self *Api) Store(data io.Reader, size int64, wg *sync.WaitGroup) (key storage.Key, err error) {
return self.dpa.Store(data, size, wg, nil)
}
type ErrResolve error
// DNS Resolver
func (self *Api) Resolve(hostPort string, nameresolver bool) (storage.Key, error) {
log.Trace(fmt.Sprintf("Resolving : %v", hostPort))
if hashMatcher.MatchString(hostPort) || self.dns == nil {
log.Trace(fmt.Sprintf("host is a contentHash: '%v'", hostPort))
return storage.Key(common.Hex2Bytes(hostPort)), nil
}
if !nameresolver {
return nil, fmt.Errorf("'%s' is not a content hash value.", hostPort)
}
contentHash, err := self.dns.Resolve(hostPort)
if err != nil {
err = ErrResolve(err)
log.Warn(fmt.Sprintf("DNS error : %v", err))
}
log.Trace(fmt.Sprintf("host lookup: %v -> %v", hostPort, contentHash))
return contentHash[:], err
}
func Parse(uri string) (hostPort, path string) {
if uri == "" {
return
}
parts := slashes.Split(uri, 3)
var i int
if len(parts) == 0 {
return
}
// beginning with slash is now optional
for len(parts[i]) == 0 {
i++
}
hostPort = parts[i]
for i < len(parts)-1 {
i++
if len(path) > 0 {
path = path + "/" + parts[i]
} else {
path = parts[i]
}
}
log.Debug(fmt.Sprintf("host: '%s', path '%s' requested.", hostPort, path))
return
}
func (self *Api) parseAndResolve(uri string, nameresolver bool) (key storage.Key, hostPort, path string, err error) {
hostPort, path = Parse(uri)
//resolving host and port
contentHash, err := self.Resolve(hostPort, nameresolver)
log.Debug(fmt.Sprintf("Resolved '%s' to contentHash: '%s', path: '%s'", uri, contentHash, path))
return contentHash[:], hostPort, path, err
}
// Put provides singleton manifest creation on top of dpa store
func (self *Api) Put(content, contentType string) (string, error) {
r := strings.NewReader(content)
wg := &sync.WaitGroup{}
key, err := self.dpa.Store(r, int64(len(content)), wg, nil)
if err != nil {
return "", err
}
manifest := fmt.Sprintf(`{"entries":[{"hash":"%v","contentType":"%s"}]}`, key, contentType)
r = strings.NewReader(manifest)
key, err = self.dpa.Store(r, int64(len(manifest)), wg, nil)
if err != nil {
return "", err
}
wg.Wait()
return key.String(), nil
}
// Get uses iterative manifest retrieval and prefix matching
// to resolve path to content using dpa retrieve
// it returns a section reader, mimeType, status and an error
func (self *Api) Get(uri string, nameresolver bool) (reader storage.LazySectionReader, mimeType string, status int, err error) {
key, _, path, err := self.parseAndResolve(uri, nameresolver)
if err != nil {
return nil, "", 500, fmt.Errorf("can't resolve: %v", err)
}
quitC := make(chan bool)
trie, err := loadManifest(self.dpa, key, quitC)
if err != nil {
log.Warn(fmt.Sprintf("loadManifestTrie error: %v", err))
return
}
log.Trace(fmt.Sprintf("getEntry(%s)", path))
entry, _ := trie.getEntry(path)
if entry != nil {
key = common.Hex2Bytes(entry.Hash)
status = entry.Status
mimeType = entry.ContentType
log.Trace(fmt.Sprintf("content lookup key: '%v' (%v)", key, mimeType))
reader = self.dpa.Retrieve(key)
} else {
status = http.StatusNotFound
err = fmt.Errorf("manifest entry for '%s' not found", path)
log.Warn(fmt.Sprintf("%v", err))
}
return
}
func (self *Api) Modify(uri, contentHash, contentType string, nameresolver bool) (newRootHash string, err error) {
root, _, path, err := self.parseAndResolve(uri, nameresolver)
if err != nil {
return "", fmt.Errorf("can't resolve: %v", err)
}
quitC := make(chan bool)
trie, err := loadManifest(self.dpa, root, quitC)
if err != nil {
return
}
if contentHash != "" {
entry := &manifestTrieEntry{
Path: path,
Hash: contentHash,
ContentType: contentType,
}
trie.addEntry(entry, quitC)
} else {
trie.deleteEntry(path, quitC)
}
err = trie.recalcAndStore()
if err != nil {
return
}
return trie.hash.String(), nil
}<|fim▁end|> | //
// The go-ethereum library is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or |
<|file_name|>fix-dynamic-style-sheets.js<|end_file_name|><|fim▁begin|>'use strict';
module.exports = function (t, a) {
t(document.createElement('p'));<|fim▁hole|><|fim▁end|> | }; |
<|file_name|>permission.rs<|end_file_name|><|fim▁begin|>use std::{error::Error as StdError, fmt, io::Write, str::FromStr};
use diesel::{backend::Backend, deserialize, serialize, sql_types::Text};
#[derive(AsExpression, Clone, Copy, Debug, Eq, FromSqlRow, Hash, PartialEq)]
#[sql_type = "Text"]
pub enum Permission {
MakePost,
MakeMediaPost,
MakeComment,
FollowUser,
MakePersona,
SwitchPersona,
DeletePersona,
ManageFollowRequest,
ConfigureInstance,
BanUser,
BlockInstance,
GrantRole,
RevokeRole,
}
impl fmt::Display for Permission {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Permission::MakePost => write!(f, "make-post"),
Permission::MakeMediaPost => write!(f, "make-media-post"),
Permission::MakeComment => write!(f, "make-comment"),
Permission::FollowUser => write!(f, "follow-user"),
Permission::MakePersona => write!(f, "make-persona"),
Permission::SwitchPersona => write!(f, "switch-persona"),
Permission::DeletePersona => write!(f, "delete-persona"),
Permission::ManageFollowRequest => write!(f, "manage-follow-request"),
Permission::ConfigureInstance => write!(f, "configure-instance"),
Permission::BanUser => write!(f, "ban-user"),
Permission::BlockInstance => write!(f, "block-instance"),
Permission::GrantRole => write!(f, "grant-role"),
Permission::RevokeRole => write!(f, "revoke-role"),
}
}
}
impl FromStr for Permission {
type Err = PermissionParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"make-post" => Ok(Permission::MakePost),
"make-media-post" => Ok(Permission::MakeMediaPost),
"make-comment" => Ok(Permission::MakeComment),
"follow-user" => Ok(Permission::FollowUser),
"make-persona" => Ok(Permission::MakePersona),
"switch-persona" => Ok(Permission::SwitchPersona),
"delete-persona" => Ok(Permission::DeletePersona),
"manage-follow-request" => Ok(Permission::ManageFollowRequest),
"configure-instance" => Ok(Permission::ConfigureInstance),
"ban-user" => Ok(Permission::BanUser),
"block-instance" => Ok(Permission::BlockInstance),
"grant-role" => Ok(Permission::GrantRole),
"revoke-role" => Ok(Permission::RevokeRole),
_ => Err(PermissionParseError),
}
}
}
impl<DB> serialize::ToSql<Text, DB> for Permission
where
DB: Backend,
{
fn to_sql<W: Write>(&self, out: &mut serialize::Output<W, DB>) -> serialize::Result {
serialize::ToSql::<Text, DB>::to_sql(&format!("{}", self), out)
}
}
impl<DB> deserialize::FromSql<Text, DB> for Permission
where
DB: Backend<RawValue = [u8]>,
{
fn from_sql(bytes: Option<&DB::RawValue>) -> deserialize::Result<Self> {
deserialize::FromSql::<Text, DB>::from_sql(bytes).and_then(|string: String| {
string
.parse::<Permission>()
.map_err(|e| Box::new(e) as Box<StdError + Send + Sync>)
})
}
}
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub struct PermissionParseError;<|fim▁hole|> write!(f, "Failed to parse Permission")
}
}
impl StdError for PermissionParseError {
fn description(&self) -> &str {
"Failed to parse Permission"
}
fn cause(&self) -> Option<&StdError> {
None
}
}<|fim▁end|> |
impl fmt::Display for PermissionParseError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
<|file_name|>common.rs<|end_file_name|><|fim▁begin|>/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
* Copyright 2017 - Dario Ostuni <dario.ostuni@gmail.com>
*
*/
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Hash, Clone)]
pub struct Point
{
pub x: usize,
pub y: usize,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Player
{
pub name: String,
pub points: u64,
pub position: Point,
pub id: u64,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Overview
{
pub players: Vec<Player>,
pub grid: Vec<Vec<Option<u64>>>,
pub turns_left: u64,
pub ms_for_turn: u64,
pub tokens: Vec<Point>,
pub game_id: u64,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct PlayerInfo
{
pub name: String,
}
#[derive(Serialize, Deserialize, Debug)]
pub enum ClientRole
{
Viewer,
Player(PlayerInfo),
}
#[derive(Serialize, Deserialize, Debug)]
pub enum Direction
{
Up,
Down,
Left,
Right,
}
<|fim▁hole|>#[derive(Serialize, Deserialize, Debug)]
pub enum ClientCommand
{
Move(Direction),
Nothing,
}
#[derive(Serialize, Deserialize, Debug)]
pub enum ServerResponse
{
Ok,
Error(String),
}
#[derive(Serialize, Deserialize, Debug)]
pub enum ClientMessage
{
HandShake(ClientRole),
Command(ClientCommand),
}<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | # -*- coding: utf-8 -*-
default_app_config = 'escolar.apps.EscolarConfig' |
<|file_name|>backend.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
"""
Resources:
http://code.google.com/p/pybluez/
http://lightblue.sourceforge.net/
http://code.google.com/p/python-bluetooth-scanner
"""
from __future__ import with_statement
import select
import logging
import bluetooth
import gobject
import util.misc as misc_utils
_moduleLogger = logging.getLogger(__name__)
class _BluetoothConnection(gobject.GObject):
__gsignals__ = {
'data_ready' : (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE,
(),
),
'closed' : (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE,
(),
),
}
def __init__(self, socket, addr, protocol):
gobject.GObject.__init__(self)
self._socket = socket
self._address = addr
self._dataId = gobject.io_add_watch (self._socket, gobject.IO_IN, self._on_data)
self._protocol = protocol
def close(self):
gobject.source_remove(self._dataId)
self._dataId = None
self._socket.close()
self._socket = None
self.emit("closed")
@property
def socket(self):
return self._socket
@property
def address(self):
return self._address
@property
def protocol(self):
return self._protocol
@misc_utils.log_exception(_moduleLogger)
def _on_data(self, source, condition):
self.emit("data_ready")
return True
gobject.type_register(_BluetoothConnection)
class _BluetoothListener(gobject.GObject):
__gsignals__ = {
'incoming_connection' : (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT, ),
),
'start_listening' : (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE,
(),
),
'stop_listening' : (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE,
(),
),
}
def __init__(self, protocol, timeout):
gobject.GObject.__init__(self)
self._timeout = timeout
self._protocol = protocol
self._socket = None
self._incomingId = None
def start(self):
assert self._socket is None and self._incomingId is None
self._socket = bluetooth.BluetoothSocket(self._protocol["transport"])
self._socket.settimeout(self._timeout)
self._socket.bind(("", bluetooth.PORT_ANY))
self._socket.listen(1)
self._incomingId = gobject.io_add_watch(
self._socket, gobject.IO_IN, self._on_incoming
)
bluetooth.advertise_service(self._socket, self._protocol["name"], self._protocol["uuid"])
self.emit("start_listening")
def stop(self):
if self._socket is None or self._incomingId is None:
return
gobject.source_remove(self._incomingId)
self._incomingId = None
bluetooth.stop_advertising(self._socket)
self._socket.close()
self._socket = None
self.emit("stop_listening")
@property
def isListening(self):
return self._socket is not None and self._incomingId is not None
@property
def socket(self):
assert self._socket is not None
return self._socket
@misc_utils.log_exception(_moduleLogger)
def _on_incoming(self, source, condition):
newSocket, (address, port) = self._socket.accept()
newSocket.settimeout(self._timeout)
connection = _BluetoothConnection(newSocket, address, self._protocol)
self.emit("incoming_connection", connection)
return True
gobject.type_register(_BluetoothListener)
class _DeviceDiscoverer(bluetooth.DeviceDiscoverer):
def __init__(self, timeout):
bluetooth.DeviceDiscoverer.__init__(self)
self._timeout = timeout
self._devices = []
self._devicesInProgress = []
@property
def devices(self):
return self._devices
def find_devices(self, *args, **kwds):
# Ensure we always start clean and is the reason we overroad this
self._devicesInProgress = []
newArgs = [self]
newArgs.extend(args)
bluetooth.DeviceDiscoverer.find_devices(*newArgs, **kwds)
def process_inquiry(self):
# The default impl calls into some hci code but an example used select,
# so going with the example
while self.is_inquiring or 0 < len(self.names_to_find):
# The whole reason for overriding this
_moduleLogger.debug("Event (%r, %r)"% (self.is_inquiring, self.names_to_find))
rfds = select.select([self], [], [], self._timeout)[0]
if self in rfds:
self.process_event()
@misc_utils.log_exception(_moduleLogger)
def device_discovered(self, address, deviceclass, name):
device = address, deviceclass, name
_moduleLogger.debug("Device Discovered %r" % (device, ))
self._devicesInProgress.append(device)
@misc_utils.log_exception(_moduleLogger)
def inquiry_complete(self):
_moduleLogger.debug("Inquiry Complete")
self._devices = self._devicesInProgress
class BluetoothBackend(gobject.GObject):
__gsignals__ = {
'login' : (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE,
(),
),
'logout' : (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE,
(),
),
'contacts_update' : (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT, ),
),
}
def __init__(self):
gobject.GObject.__init__(self)
self._disco = None
self._timeout = 8
self._listeners = {}
self._protocols = []
self._isListening = True
def add_protocol(self, protocol):
assert not self.is_logged_in()
self._protocols.append(protocol)
def login(self):
self._disco = _DeviceDiscoverer(self._timeout)
isListening = self._isListening
for protocol in self._protocols:
protoId = protocol["uuid"]
self._listeners[protoId] = _BluetoothListener(protocol, self._timeout)
if isListening:
self._listeners[protoId].start()
self.emit("login")
def logout(self):
for protocol in self._protocols:
protoId = protocol["uuid"]
listener = self._listeners[protoId]
listener.close()
self._listeners.clear()
self._disco.cancel_inquiry() # precaution
self.emit("logout")
def is_logged_in(self):
if self._listeners:
return True
else:
return False
def is_listening(self):
return self._isListening
def enable_listening(self, enable):
if enable:
for listener in self._listeners.itervalues():
assert not listener.isListening
for listener in self._listeners.itervalues():
listener.start()
else:
for listener in self._listeners.itervalues():
assert listener.isListening
for listener in self._listeners.itervalues():
listener.stop()
def get_contacts(self):
try:
self._disco.find_devices(
duration=self._timeout,
flush_cache = True,
lookup_names = True,
)
self._disco.process_inquiry()
except bluetooth.BluetoothError, e:
# lightblue does this, so I guess I will too
_moduleLogger.error("Error while getting contacts, attempting to cancel")
try:
self._disco.cancel_inquiry()
finally:
raise e
return self._disco.devices
def get_contact_services(self, address):
services = bluetooth.find_service(address = address)
return services
def connect(self, addr, transport, port):
sock = bluetooth.BluetoothSocket(transport)
sock.settimeout(self._timeout)
try:
sock.connect((addr, port))
except bluetooth.error, e:
sock.close()
raise
return _BluetoothConnection(sock, addr, "")
gobject.type_register(BluetoothBackend)
class BluetoothClass(object):
def __init__(self, description):
self.description = description
def __str__(self):
return self.description
MAJOR_CLASS = BluetoothClass("Major Class")
MAJOR_CLASS.MISCELLANEOUS = BluetoothClass("Miscellaneous")
MAJOR_CLASS.COMPUTER = BluetoothClass("Computer")
MAJOR_CLASS.PHONE = BluetoothClass("Phone")
MAJOR_CLASS.LAN = BluetoothClass("LAN/Network Access Point")
MAJOR_CLASS.AV = BluetoothClass("Audio/Video")
MAJOR_CLASS.PERIPHERAL = BluetoothClass("Peripheral")
MAJOR_CLASS.IMAGING = BluetoothClass("Imaging")
MAJOR_CLASS.UNCATEGORIZED = BluetoothClass("Uncategorized")
MAJOR_CLASS.MISCELLANEOUS.RESERVED = BluetoothClass("Reserved")
MAJOR_CLASS.COMPUTER.UNCATEGORIZED = BluetoothClass("Uncategorized, code for device not assigned")
MAJOR_CLASS.COMPUTER.DESKTOP = BluetoothClass("Desktop workstation")
MAJOR_CLASS.COMPUTER.SERVER = BluetoothClass("Server-class computer")
MAJOR_CLASS.COMPUTER.LAPTOP = BluetoothClass("Laptop")
MAJOR_CLASS.COMPUTER.HANDHELD = BluetoothClass("Handheld PC/PDA (clam shell)")
MAJOR_CLASS.COMPUTER.PALM_SIZE = BluetoothClass("Palm sized PC/PDA")
MAJOR_CLASS.COMPUTER.WEARABLE = BluetoothClass("Wearable computer (Watch sized)")
MAJOR_CLASS.COMPUTER.RESERVED = BluetoothClass("Reserved")
MAJOR_CLASS.PHONE.UNCATEGORIZED = BluetoothClass("Uncategorized, code for device not assigned")
MAJOR_CLASS.PHONE.CELLULAR = BluetoothClass("Cellular")
MAJOR_CLASS.PHONE.CORDLESS = BluetoothClass("Cordless")
MAJOR_CLASS.PHONE.SMART_PHONE = BluetoothClass("Smart phone")
MAJOR_CLASS.PHONE.MODEM = BluetoothClass("Wired modem or voice gateway")
MAJOR_CLASS.PHONE.ISDN = BluetoothClass("Common ISDN Access")
MAJOR_CLASS.PHONE.RESERVED = BluetoothClass("Reserved")
MAJOR_CLASS.LAN.UNCATEGORIZED = BluetoothClass("Uncategorized")
MAJOR_CLASS.LAN.RESERVED = BluetoothClass("Reserved")
MAJOR_CLASS.AV.UNCATEGORIZED = BluetoothClass("Uncategorized, code for device not assigned")
MAJOR_CLASS.AV.HEADSET = BluetoothClass("Device conforms to headset profile")
MAJOR_CLASS.AV.HANDS_FREE = BluetoothClass("Hands-free")
MAJOR_CLASS.AV.MICROPHONE = BluetoothClass("Microphone")
MAJOR_CLASS.AV.LOUDSPEAKER = BluetoothClass("Loudspeaker")
MAJOR_CLASS.AV.HEADPHONES = BluetoothClass("Headphones")
MAJOR_CLASS.AV.PORTABLE_AUDIO = BluetoothClass("Portable Audio")
MAJOR_CLASS.AV.CAR_AUDIO = BluetoothClass("Car Audio")
MAJOR_CLASS.AV.SET_TOP_BOX = BluetoothClass("Set-top box")
MAJOR_CLASS.AV.HIFI_AUDIO_DEVICE = BluetoothClass("HiFi Audio Device")
MAJOR_CLASS.AV.VCR = BluetoothClass("VCR")
MAJOR_CLASS.AV.VIDEO_CAMERA = BluetoothClass("Video Camera")
MAJOR_CLASS.AV.CAMCORDER = BluetoothClass("Camcorder")
MAJOR_CLASS.AV.VIDEO_MONITOR = BluetoothClass("Video Monitor")
MAJOR_CLASS.AV.VIDEO_DISPLAY = BluetoothClass("Video Display and Loudspeaker")
MAJOR_CLASS.AV.VIDEO_CONFERENCING = BluetoothClass("Video Conferencing")
MAJOR_CLASS.AV.GAMING = BluetoothClass("Gaming/Toy")
MAJOR_CLASS.AV.RESERVED = BluetoothClass("Reserved")
MAJOR_CLASS.PERIPHERAL.UNCATEGORIZED = BluetoothClass("Uncategorized, code for device not assigned")
MAJOR_CLASS.PERIPHERAL.JOYSTICK = BluetoothClass("Joystick")
MAJOR_CLASS.PERIPHERAL.GAMEPAD = BluetoothClass("Gamepad")
MAJOR_CLASS.PERIPHERAL.REMOTE_CONTROL = BluetoothClass("Remote Control")
MAJOR_CLASS.PERIPHERAL.SENSING_DEVICE = BluetoothClass("Sensing Device")
MAJOR_CLASS.PERIPHERAL.DIGITIZER_TABLET = BluetoothClass("Digitizer Tablet")
MAJOR_CLASS.PERIPHERAL.CARD_READER = BluetoothClass("Card Reader (e.g. SIM Card Reader)")
MAJOR_CLASS.PERIPHERAL.RESERVED = BluetoothClass("Reserved")
MAJOR_CLASS.IMAGING.UNCATEGORIZED = BluetoothClass("Uncategorized, code for device not assigned")
MAJOR_CLASS.IMAGING.DISPLAY = BluetoothClass("Display")
MAJOR_CLASS.IMAGING.CAMERA = BluetoothClass("Camera")
MAJOR_CLASS.IMAGING.SCANNER = BluetoothClass("Scanner")
MAJOR_CLASS.IMAGING.PRINTER = BluetoothClass("Printer")
MAJOR_CLASS.IMAGING.RESERVED = BluetoothClass("Reserved")
SERVICE_CLASS = BluetoothClass("Service Class")
SERVICE_CLASS.LIMITED = BluetoothClass("Limited Discoverable Mode")
SERVICE_CLASS.POSITIONING = BluetoothClass("Positioning (Location identification)")
SERVICE_CLASS.NETWORKING = BluetoothClass("Networking (LAN, Ad hoc, ...)")
SERVICE_CLASS.RENDERING = BluetoothClass("Rendering (Printing, speaking, ...)")
SERVICE_CLASS.CAPTURING = BluetoothClass("Capturing (Scanner, microphone, ...)")
SERVICE_CLASS.OBJECT_TRANSFER = BluetoothClass("Object Transfer (v-Inbox, v-Folder, ...)")
SERVICE_CLASS.AUDIO = BluetoothClass("Audio (Speaker, Microphone, Headset service, ...")
SERVICE_CLASS.TELEPHONY = BluetoothClass("Telephony (Cordless telephony, Modem, Headset service, ...)")
SERVICE_CLASS.INFORMATION = BluetoothClass("Information (WEB-server, WAP-server, ...)")
_ORDERED_MAJOR_CLASSES = (
MAJOR_CLASS.MISCELLANEOUS,
MAJOR_CLASS.COMPUTER,
MAJOR_CLASS.PHONE,
MAJOR_CLASS.LAN,
MAJOR_CLASS.AV,
MAJOR_CLASS.PERIPHERAL,
MAJOR_CLASS.IMAGING,
)
_SERVICE_CLASSES = (
(13 - 13, SERVICE_CLASS.LIMITED),
(16 - 13, SERVICE_CLASS.POSITIONING),
(17 - 13, SERVICE_CLASS.NETWORKING),<|fim▁hole|> (21 - 13, SERVICE_CLASS.AUDIO),
(22 - 13, SERVICE_CLASS.TELEPHONY),
(23 - 13, SERVICE_CLASS.INFORMATION),
)
def _parse_device_class(deviceclass):
# get some information out of the device class and display it.
# voodoo magic specified at:
#
# https://www.bluetooth.org/foundry/assignnumb/document/baseband
majorClass = (deviceclass >> 8) & 0xf
minorClass = (deviceclass >> 2) & 0x3f
serviceClasses = (deviceclass >> 13) & 0x7ff
return majorClass, minorClass, serviceClasses
def parse_device_class(deviceclass):
majorClassCode, minorClassCode, serviceClassCodes = _parse_device_class(deviceclass)
try:
majorClass = _ORDERED_MAJOR_CLASSES[majorClassCode]
except IndexError:
majorClass = MAJOR_CLASS.UNCATEGORIZED
serviceClasses = []
for bitpos, cls in _SERVICE_CLASSES:
if serviceClassCodes & (1 << bitpos):
serviceClasses.append(cls)
return majorClass, minorClassCode, serviceClasses<|fim▁end|> | (18 - 13, SERVICE_CLASS.RENDERING),
(19 - 13, SERVICE_CLASS.CAPTURING),
(20 - 13, SERVICE_CLASS.OBJECT_TRANSFER), |
<|file_name|>PageView.py<|end_file_name|><|fim▁begin|>"""
Page view class
"""
import os
from Server.Importer import ImportFromModule
<|fim▁hole|>
class PageView(ImportFromModule("Server.PageViewBase", "PageViewBase")):
"""
Page view class.
"""
_PAGE_TITLE = "Python Web Framework"
def __init__(self, htmlToLoad):
"""
Constructor.
- htmlToLoad : HTML to load
"""
self.SetPageTitle(self._PAGE_TITLE)
self.AddMetaData("charset=\"UTF-8\"")
self.AddMetaData("name=\"viewport\" content=\"width=device-width, initial-scale=1\"")
self.AddStyleSheet("/css/styles.css")
self.AddJavaScript("/js/http.js")
self.LoadHtml(os.path.join(os.path.dirname(__file__), "%s.html" % htmlToLoad))
self.SetPageData({ "PageTitle" : self._PAGE_TITLE })<|fim▁end|> | |
<|file_name|>Json.py<|end_file_name|><|fim▁begin|>import warnings
warnings.filterwarnings( "ignore", message = "The sre module is deprecated, please import re." )
from simplejson import JSONEncoder
from datetime import datetime, date
class Json( JSONEncoder ):
def __init__( self, *args, **kwargs ):
JSONEncoder.__init__( self )
if args and kwargs:
raise ValueError( "Please provide either args or kwargs, not both." )
self.__args = args
self.__kwargs = kwargs
def __str__( self ):
if self.__args:
if len( self.__args ) == 1:
return self.encode( self.__args[ 0 ] )
return self.encode( self.__args )
return self.encode( self.__kwargs )
def default( self, obj ):
"""
Invoked by JSONEncoder.encode() for types that it doesn't know how to encode.
"""
if isinstance( obj, datetime ) or isinstance( obj, date ):
return unicode( obj )
if hasattr( obj, "to_dict" ):<|fim▁hole|> return obj.to_dict()
raise TypeError<|fim▁end|> | |
<|file_name|>p114.rs<|end_file_name|><|fim▁begin|>//! [Problem 114](https://projecteuler.net/problem=114) solver.
#![warn(
bad_style,
unused,
unused_extern_crates,
unused_import_braces,
unused_qualifications,
unused_results
)]
use std::collections::HashMap;
fn get_cnt((n, m): (u32, u32), map: &mut HashMap<(u32, u32), u64>) -> u64 {
if let Some(&x) = map.get(&(n, m)) {
return x;
}
if n < m {
let _ = map.insert((n, m), 1);
return 1;
}
let mut sum = 0;
for len in m..(n + 1) {
// most left red block length
for i in 0..(n - len + 1) {
// most left red block position
if n > len + i {
sum += get_cnt((n - (len + i) - 1, m), map); // red block and black block
} else {
sum += 1;
}
}
}<|fim▁hole|> let _ = map.insert((n, m), sum);
sum
}
fn solve() -> String {
let mut map = HashMap::new();
get_cnt((50, 3), &mut map).to_string()
}
common::problem!("16475640049", solve);
#[cfg(test)]
mod tests {
use super::get_cnt;
use std::collections::HashMap;
#[test]
fn small_len() {
let mut map = HashMap::new();
assert_eq!(1, get_cnt((1, 3), &mut map));
assert_eq!(1, get_cnt((2, 3), &mut map));
assert_eq!(2, get_cnt((3, 3), &mut map));
assert_eq!(4, get_cnt((4, 3), &mut map));
assert_eq!(17, get_cnt((7, 3), &mut map));
}
}<|fim▁end|> | sum += 1; // all black block |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.