prompt large_stringlengths 70 991k | completion large_stringlengths 0 1.02k |
|---|---|
<|file_name|>UserPromptDialog.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright (c) 2006 Subclipse project and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Subclipse project committers - initial API and implementation
******************************************************************************/
package org.tigris.subversion.subclipse.ui.authentication;
import org.eclipse.jface.dialogs.IDialogConstants;
import org.eclipse.swt.SWT;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;<|fim▁hole|>import org.eclipse.swt.widgets.Text;
import org.eclipse.ui.PlatformUI;
import org.tigris.subversion.subclipse.ui.IHelpContextIds;
import org.tigris.subversion.subclipse.ui.Policy;
import org.tigris.subversion.subclipse.ui.dialogs.SubclipseTrayDialog;
public class UserPromptDialog extends SubclipseTrayDialog {
private String realm;
private String username;
private boolean save;
private boolean maySave;
private Text userText;
private Button saveButton;
private Button okButton;
private static int WIDTH = 300;
public UserPromptDialog(Shell parentShell, String realm, String username, boolean maySave) {
super(parentShell);
this.realm = realm;
this.username = username;
this.maySave = maySave;
}
protected Control createDialogArea(Composite parent) {
Composite rtnGroup = (Composite)super.createDialogArea(parent);
getShell().setText(Policy.bind("UserPromptDialog.title")); //$NON-NLS-1$
GridLayout layout = new GridLayout();
layout.numColumns = 2;
rtnGroup.setLayout(layout);
rtnGroup.setLayoutData(
new GridData(GridData.HORIZONTAL_ALIGN_BEGINNING));
Label realmLabel = new Label(rtnGroup, SWT.NONE);
realmLabel.setText(Policy.bind("PasswordPromptDialog.repository")); //$NON-NLS-1$
Text realmText = new Text(rtnGroup, SWT.BORDER);
GridData gd = new GridData();
gd.widthHint = WIDTH;
realmText.setLayoutData(gd);
realmText.setEditable(false);
realmText.setText(realm);
Label userLabel = new Label(rtnGroup, SWT.NONE);
userLabel.setText(Policy.bind("UserPromptDialog.username")); //$NON-NLS-1$
userText = new Text(rtnGroup, SWT.BORDER);
gd = new GridData();
gd.widthHint = WIDTH;
userText.setLayoutData(gd);
userText.setText(username == null? "": username);
userText.selectAll();
if (maySave) {
saveButton = new Button(rtnGroup, SWT.CHECK);
saveButton.setText(Policy.bind("UserPromptDialog.save")); //$NON-NLS-1$
gd = new GridData();
gd.horizontalSpan = 2;
saveButton.setLayoutData(gd);
}
// set F1 help
PlatformUI.getWorkbench().getHelpSystem().setHelp(rtnGroup, IHelpContextIds.USER_PROMPT_DIALOG);
userText.setFocus();
return rtnGroup;
}
public Button createButton(Composite parent, int id, String label, boolean isDefault) {
Button button = super.createButton(parent, id, label, isDefault);
if (id == IDialogConstants.OK_ID) {
okButton = button;
okButton.setEnabled(true);
}
return button;
}
protected void okPressed() {
username = userText.getText().trim();
if (maySave) save = saveButton.getSelection();
super.okPressed();
}
public boolean isSave() {
return save;
}
public String getUsername() {
return username;
}
}<|fim▁end|> | import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Shell; |
<|file_name|>Gruntfile.js<|end_file_name|><|fim▁begin|>/**
* Created by Tivie on 12-11-2014.
*/
module.exports = function (grunt) {
// Project configuration.
var config = {
pkg: grunt.file.readJSON('package.json'),
concat: {
options: {
sourceMap: true,
banner: ';/*! <%= pkg.name %> <%= grunt.template.today("dd-mm-yyyy") %> */\n(function(){\n',
footer: '}).call(this);'
},
dist: {
src: [
'src/showdown.js',
'src/helpers.js',
'src/converter.js',
'src/subParsers/*.js',
'src/loader.js'
],
dest: 'dist/<%= pkg.name %>.js'
}
},
uglify: {
options: {
sourceMap: true,
banner: '/*! <%= pkg.name %> <%= grunt.template.today("dd-mm-yyyy") %> */\n'
},
dist: {
files: {
'dist/<%= pkg.name %>.min.js': ['<%= concat.dist.dest %>']
}
}
},
jshint: {
options: {
jshintrc: '.jshintrc'
},
files: [
'Gruntfile.js',
'src/**/*.js',
'test/**/*.js'
]
},
jscs: {
options: {
config: '.jscs.json'
},
files: {
src: [
'Gruntfile.js',
'src/**/*.js',
'test/**/*.js'
]
}
},
changelog: {
options: {
repository: 'http://github.com/showdownjs/showdown',
dest: 'CHANGELOG.md'
}
},
bump: {
options: {
files: ['package.json'],
updateConfigs: [],
commit: true,
commitMessage: 'Release version %VERSION%',
commitFiles: ['package.json'],
createTag: true,
tagName: '%VERSION%',
tagMessage: 'Version %VERSION%',
push: true,
pushTo: 'upstream',
gitDescribeOptions: '--tags --always --abbrev=1 --dirty=-d',
globalReplace: false,
prereleaseName: 'alpha',
regExp: false
}
},
simplemocha: {
node: {
src: 'test/node/**/*.js',
options: {
globals: ['should'],
timeout: 3000,
ignoreLeaks: false,
reporter: 'spec'
}
},
karlcow: {
src: 'test/node/testsuite.karlcow.js',
options: {
globals: ['should'],
timeout: 3000,
ignoreLeaks: false,
reporter: 'spec'
}
},
browser: {
src: 'test/browser/**/*.js',
options: {
reporter: 'spec'
<|fim▁hole|> }
}
};
grunt.initConfig(config);
require('load-grunt-tasks')(grunt);
grunt.registerTask('concatenate', ['concat']);
grunt.registerTask('lint', ['jshint', 'jscs']);
grunt.registerTask('test', ['lint', 'concat', 'simplemocha:node']);
grunt.registerTask('test-without-building', ['simplemocha:node']);
grunt.registerTask('build', ['test', 'uglify']);
grunt.registerTask('prep-release', ['build', 'changelog']);
// Default task(s).
grunt.registerTask('default', ['test']);
};<|fim▁end|> | }
|
<|file_name|>upload.py<|end_file_name|><|fim▁begin|>import requests
import os
print('\n***数据与算法 GitHub自动上传脚本***\n')
username = input('输入你在GitHub上的用户名,如 Robert Ying:')
email = input('输入你注册GitHub用的Email:')
print('\n开始配置Git...')
os.system('git config --global user.name "' + username + '"')
os.system('git config --global user.email ' + email)
print('\n你输入的信息如下:')
os.system('git config user.name')
os.system('git config user.email')
if input('\n信息正确则输入y继续,直接回车则重新输入用户名与Email:') != 'y':
username = input('输入你在GitHub上的用户名,如 Robert Ying:')
email = input('输入你注册GitHub用的Email:')
print('\n你输入的信息如下:')
os.system('git config user.name')
os.system('git config user.email')
url = input(
'\n输入你fork后自己对应仓库的url,如https://github.com/robertying/data-and-algorithms.git,注意!最后有".git":')
name = input('\n输入你的真实姓名:')
payload = {"username": input('输入你的OJ用户名:'), "password": input('输入你的OJ密码:')}
print()
response = requests.post(
'http://lambda.ee.tsinghua.edu.cn/api/auth/login/', data=payload)
answer = requests.get('http://lambda.ee.tsinghua.edu.cn/api/my/submits/', headers={
'Authorization': 'TOKEN ' + response.json()['auth_token']}, params={'page': 1, 'page_size': 1})
count = answer.json()['count']
answer = requests.get('http://lambda.ee.tsinghua.edu.cn/api/my/submits/', headers={
'Authorization': 'TOKEN ' + response.json()['auth_token']}, params={'page': 1, 'page_size': count})
results = answer.json()['results']
if not os.path.exists('data-and-algorithms'):
os.system('git clone ' + url)
else:
os.system('cd data-and-algorithms & git pull')
os.system('cd data-and-algorithms & git remote add upstream https://github.com/ee64/data-and-algorithms.git & git fetch upstream & git checkout master & git merge upstream/master')
problem_dir = os.listdir('./data-and-algorithms')
already_walked_results = []
for result in results:
if result['problem_title'] not in already_walked_results and result['problem_title'] in problem_dir:
if result['score'] == 100:
if not os.path.exists('./data-and-algorithms/' + result['problem_title'] + '/' + name):
os.mkdir('./data-and-algorithms/' +
result['problem_title'] + '/' + name)
with open('./data-and-algorithms/' + result['problem_title'] + '/' + name + '/README.md', 'w', encoding='utf-8') as md:
md.write('# ' + result['problem_title'] + '\n\n')
md.write(
'| # | 时间 | 内存 |\n')
md.write(
'|:----------:|:------------------------------:|:------------------------------:|\n')
run_results = results[0]['run_results']
for i in range(len(run_results)):
md.write(
'|' + str(i + 1) + '|' + str(run_results[i][1]) + ' ms|' + str(run_results[i][2]) + ' KB|\n')
with open('./data-and-algorithms/' + result['problem_title'] + '/' + name + '/source.cpp', 'w', encoding='utf-8') as source:
source.write(result['code'] + '\n')
os.system('cd data-and-algorithms & git add .')
os.system('cd data-and-algorithms & git commit -m "Initial commit to ' +
result['problem_title'] + ' by ' + name)
already_walked_results.append(result['problem_title'])<|fim▁hole|>input('\n上传完成!如果发现没有效果,那么请重新再试一遍。回车退出程序:')<|fim▁end|> | continue
os.system('cd data-and-algorithms & git push origin master') |
<|file_name|>increment.js<|end_file_name|><|fim▁begin|>//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
if (this.WScript && this.WScript.LoadScriptFile) { // Check for running in ch
this.WScript.LoadScriptFile("..\\UnitTestFramework\\UnitTestFramework.js");
}
var tests = [
{
name: "Increment BigInt literal",
body: function () {
var x = 123n;
assert.isTrue(x == 123n);
x++;
assert.isTrue(x == 124n);
++x;
assert.isTrue(x == 125n);
}
},
{
name: "Increment negative BigInt literal",
body: function () {
var x = -123n;
assert.isTrue(x == -123n);
x++;
assert.isTrue(x == -122n);
++x;
assert.isTrue(x == -121n);
}
},
{
name: "Increment -1n",
body: function () {
var x = -1n;
assert.isTrue(x == -1n);
x++;
assert.isTrue(x == 0n);
++x;
assert.isTrue(x == 1n);
}
},
{
name: "Increment to change length",
body: function () {
var x = 4294967295n;
assert.isTrue(x == 4294967295n);
x++;
assert.isTrue(x == 4294967296n);
++x;
assert.isTrue(x == 4294967297n);
var y = -4294967297n;
assert.isTrue(y == -4294967297n);
y++;
assert.isTrue(y == -4294967296n);
++y;
assert.isTrue(y == -4294967295n);
}
},
{
name: "Increment BigInt Object",
body: function () {
var x = BigInt(12345678901234567890n);
var y = BigInt(12345678901234567891n);
assert.isTrue(x < y);
++x;
assert.isTrue(x == y);
x++;
assert.isTrue(x >= y);
}
},
{
name: "Out of 64 bit range",
body: function () {
var x = 1234567890123456789012345678901234567890n;
var y = BigInt(1234567890123456789012345678901234567891n);
assert.isFalse(x == y);
x++;
++y;
assert.isTrue(x < y);
++x;
assert.isTrue(x == y);
}
},
{
name: "Very big",
body: function () {
var x = eval('1234567890'.repeat(20)+'0n');
var y = BigInt(eval('1234567890'.repeat(20)+'1n'));
assert.isFalse(x == y);
x++;
<|fim▁hole|> assert.isTrue(x < y);
++x;
assert.isTrue(x == y);
}
},
{
name: "With assign",
body: function () {
var x = 3n;
var y = x++;
assert.isTrue(x == 4n);
assert.isTrue(y == 3n);
y = ++x;
assert.isTrue(x == 5n);
assert.isTrue(y == 5n);
}
},
];
testRunner.runTests(tests, { verbose: WScript.Arguments[0] != "summary" });<|fim▁end|> | ++y;
|
<|file_name|>green_exchange_graph_test.cc<|end_file_name|><|fim▁begin|>/*
* green_exchange_graph_test.cc
* Copyright 2014-2015 John Lawson
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "gtest/gtest.h"
#include "green_exchange_graph.h"
#include "ginac_util.h"
namespace cluster {
namespace {
typedef green_exchange::MultiArrowTriangleCheck GCheck;
cluster::Seed::Cluster
default_cluster(size_t size) {
cluster::Seed::Cluster result(size);
std::string var = "x";
for(size_t i = 0; i < size; ++i) {
result[i] = cluster::ginac::symbol(var + std::to_string(i));
}
return result;
}
}
TEST(GreenContinue, Simple) {
const QuiverMatrix m("{ { 0 1 0 } { -1 0 1 } { 0 -1 0 } }");
GCheck chk;
EXPECT_TRUE(chk(&m, 0));
EXPECT_TRUE(chk(&m, 1));
EXPECT_TRUE(chk(&m, 2));
EXPECT_TRUE(chk(&m, 3));
}
TEST(GreenContinue, Cycle) {
const QuiverMatrix m("{ { 0 1 -1 0 } { -1 0 1 1 } { 1 -1 0 1 } { 0 -1 -1 0 } }");
GCheck chk;
EXPECT_TRUE(chk(&m, 0));
EXPECT_TRUE(chk(&m, 1));
EXPECT_TRUE(chk(&m, 2));
EXPECT_TRUE(chk(&m, 3));
}
TEST(GreenContinue, InfiniteCycle) {
const QuiverMatrix m("{ { 0 2 -1 0 } { -2 0 1 1 } { 1 -1 0 1 } { 0 -1 -1 0 } }");
GCheck chk;
EXPECT_TRUE(chk(&m, 0));
/* Disabled as this functionality is not implemented.
* The vertex is not taken into account at this point, instead only the matrix
* is considered, and the computation of the exchange graph stops after these
* infinite type matrices have been computed, not before
EXPECT_FALSE(chk(&m, 1));
*/
EXPECT_TRUE(chk(&m, 2));
EXPECT_TRUE(chk(&m, 3));
}
TEST(GreenContinue, AllInfiniteCycle) {
const QuiverMatrix m("{ { 0 2 -2 0 } { -2 0 2 1 } { 2 -2 0 1 } { 0 -1 -1 0 } }");
GCheck chk;
EXPECT_FALSE(chk(&m, 0));
EXPECT_FALSE(chk(&m, 1));
EXPECT_FALSE(chk(&m, 2));
EXPECT_FALSE(chk(&m, 3));
}
TEST(GreenContinue, Reuse) {
const QuiverMatrix m("{ { 0 2 -2 0 } { -2 0 2 1 } { 2 -2 0 1 } { 0 -1 -1 0 } }");
GCheck chk;
EXPECT_FALSE(chk(&m, 0));
EXPECT_FALSE(chk(&m, 1));
EXPECT_FALSE(chk(&m, 2));
EXPECT_FALSE(chk(&m, 3));
const QuiverMatrix n("{ { 0 1 -1 0 } { -1 0 1 1 } { 1 -1 0 1 } { 0 -1 -1 0 } }");
EXPECT_TRUE(chk(&n, 0));
EXPECT_TRUE(chk(&n, 1));
EXPECT_TRUE(chk(&n, 2));
EXPECT_TRUE(chk(&n, 3));
const QuiverMatrix k("{ { 0 1 0 0 } { -1 0 3 -4 } { 0 -3 0 5 } { 0 4 -5 0 } }");
EXPECT_FALSE(chk(&k, 0));
EXPECT_FALSE(chk(&k, 1));
EXPECT_FALSE(chk(&k, 2));
EXPECT_FALSE(chk(&k, 3));
}<|fim▁hole|> Seed::Cluster cl = default_cluster(4);
Seed s(std::move(k), std::move(cl));
GCheck chk;
EXPECT_FALSE(chk(&s, 0));
EXPECT_FALSE(chk(&s, 1));
EXPECT_FALSE(chk(&s, 2));
EXPECT_FALSE(chk(&s, 3));
}
}<|fim▁end|> | TEST(GreenContinue, Seed) {
EquivQuiverMatrix k("{ { 0 1 0 0 } { -1 0 3 -4 } { 0 -3 0 2 } { 0 4 -2 0 } }"); |
<|file_name|>auth.py<|end_file_name|><|fim▁begin|># Copyright 2014 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import copy
import datetime
import exceptions
import re
import urlparse
import six
from tempest import config
from tempest.openstack.common import log as logging
from tempest.services.identity.json import token_client as json_id
from tempest.services.identity.v3.json import token_client as json_v3id
CONF = config.CONF
LOG = logging.getLogger(__name__)
@six.add_metaclass(abc.ABCMeta)
class AuthProvider(object):
"""
Provide authentication
"""
def __init__(self, credentials, interface=None):
"""
:param credentials: credentials for authentication
:param interface: 'json' or 'xml'. Applicable for tempest client only
(deprecated: only json now supported)
"""
credentials = self._convert_credentials(credentials)
if self.check_credentials(credentials):
self.credentials = credentials
else:
raise TypeError("Invalid credentials")
self.interface = 'json'
self.cache = None
self.alt_auth_data = None
self.alt_part = None
def _convert_credentials(self, credentials):
# Support dict credentials for backwards compatibility
if isinstance(credentials, dict):
return get_credentials(**credentials)
else:
return credentials
def __str__(self):
return "Creds :{creds}, interface: {interface}, " \
"cached auth data: {cache}".format(
creds=self.credentials, interface=self.interface,
cache=self.cache)
@abc.abstractmethod
def _decorate_request(self, filters, method, url, headers=None, body=None,
auth_data=None):
"""
Decorate request with authentication data
"""
return
@abc.abstractmethod
def _get_auth(self):
return
@abc.abstractmethod
def _fill_credentials(self, auth_data_body):
return
def fill_credentials(self):
"""
Fill credentials object with data from auth
"""
auth_data = self.get_auth()
self._fill_credentials(auth_data[1])
return self.credentials
@classmethod
def check_credentials(cls, credentials):
"""
Verify credentials are valid.
"""
return isinstance(credentials, Credentials) and credentials.is_valid()
@property
def auth_data(self):
return self.get_auth()
@auth_data.deleter
def auth_data(self):
self.clear_auth()
def get_auth(self):
"""
Returns auth from cache if available, else auth first
"""
if self.cache is None or self.is_expired(self.cache):
self.set_auth()
return self.cache
def set_auth(self):
"""
Forces setting auth, ignores cache if it exists.
Refills credentials
"""
self.cache = self._get_auth()
self._fill_credentials(self.cache[1])
def clear_auth(self):
"""
Can be called to clear the access cache so that next request
will fetch a new token and base_url.
"""
self.cache = None
self.credentials.reset()
@abc.abstractmethod
def is_expired(self, auth_data):
return
def auth_request(self, method, url, headers=None, body=None, filters=None):
"""
Obtains auth data and decorates a request with that.
:param method: HTTP method of the request
:param url: relative URL of the request (path)
:param headers: HTTP headers of the request
:param body: HTTP body in case of POST / PUT
:param filters: select a base URL out of the catalog
:returns a Tuple (url, headers, body)
"""
orig_req = dict(url=url, headers=headers, body=body)
auth_url, auth_headers, auth_body = self._decorate_request(
filters, method, url, headers, body)
auth_req = dict(url=auth_url, headers=auth_headers, body=auth_body)
# Overwrite part if the request if it has been requested
if self.alt_part is not None:
if self.alt_auth_data is not None:
alt_url, alt_headers, alt_body = self._decorate_request(
filters, method, url, headers, body,
auth_data=self.alt_auth_data)
alt_auth_req = dict(url=alt_url, headers=alt_headers,
body=alt_body)
auth_req[self.alt_part] = alt_auth_req[self.alt_part]
else:
# If alt auth data is None, skip auth in the requested part
auth_req[self.alt_part] = orig_req[self.alt_part]
# Next auth request will be normal, unless otherwise requested
self.reset_alt_auth_data()
return auth_req['url'], auth_req['headers'], auth_req['body']
def reset_alt_auth_data(self):<|fim▁hole|> """
Configure auth provider to provide valid authentication data
"""
self.alt_part = None
self.alt_auth_data = None
def set_alt_auth_data(self, request_part, auth_data):
"""
Configure auth provider to provide alt authentication data
on a part of the *next* auth_request. If credentials are None,
set invalid data.
:param request_part: request part to contain invalid auth: url,
headers, body
:param auth_data: alternative auth_data from which to get the
invalid data to be injected
"""
self.alt_part = request_part
self.alt_auth_data = auth_data
@abc.abstractmethod
def base_url(self, filters, auth_data=None):
"""
Extracts the base_url based on provided filters
"""
return
class KeystoneAuthProvider(AuthProvider):
token_expiry_threshold = datetime.timedelta(seconds=60)
def __init__(self, credentials, interface=None):
super(KeystoneAuthProvider, self).__init__(credentials, interface)
self.auth_client = self._auth_client()
def _decorate_request(self, filters, method, url, headers=None, body=None,
auth_data=None):
if auth_data is None:
auth_data = self.auth_data
token, _ = auth_data
base_url = self.base_url(filters=filters, auth_data=auth_data)
# build authenticated request
# returns new request, it does not touch the original values
_headers = copy.deepcopy(headers) if headers is not None else {}
_headers['X-Auth-Token'] = str(token)
if url is None or url == "":
_url = base_url
else:
# Join base URL and url, and remove multiple contiguous slashes
_url = "/".join([base_url, url])
parts = [x for x in urlparse.urlparse(_url)]
parts[2] = re.sub("/{2,}", "/", parts[2])
_url = urlparse.urlunparse(parts)
# no change to method or body
return str(_url), _headers, body
@abc.abstractmethod
def _auth_client(self):
return
@abc.abstractmethod
def _auth_params(self):
return
def _get_auth(self):
# Bypasses the cache
auth_func = getattr(self.auth_client, 'get_token')
auth_params = self._auth_params()
# returns token, auth_data
token, auth_data = auth_func(**auth_params)
return token, auth_data
def get_token(self):
return self.auth_data[0]
class KeystoneV2AuthProvider(KeystoneAuthProvider):
EXPIRY_DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
def _auth_client(self):
return json_id.TokenClientJSON()
def _auth_params(self):
return dict(
user=self.credentials.username,
password=self.credentials.password,
tenant=self.credentials.tenant_name,
auth_data=True)
def _fill_credentials(self, auth_data_body):
tenant = auth_data_body['token']['tenant']
user = auth_data_body['user']
if self.credentials.tenant_name is None:
self.credentials.tenant_name = tenant['name']
if self.credentials.tenant_id is None:
self.credentials.tenant_id = tenant['id']
if self.credentials.username is None:
self.credentials.username = user['name']
if self.credentials.user_id is None:
self.credentials.user_id = user['id']
def base_url(self, filters, auth_data=None):
"""
Filters can be:
- service: compute, image, etc
- region: the service region
- endpoint_type: adminURL, publicURL, internalURL
- api_version: replace catalog version with this
- skip_path: take just the base URL
"""
if auth_data is None:
auth_data = self.auth_data
token, _auth_data = auth_data
service = filters.get('service')
region = filters.get('region')
endpoint_type = filters.get('endpoint_type', 'publicURL')
if service is None:
raise exceptions.EndpointNotFound("No service provided")
_base_url = None
for ep in _auth_data['serviceCatalog']:
if ep["type"] == service:
for _ep in ep['endpoints']:
if region is not None and _ep['region'] == region:
_base_url = _ep.get(endpoint_type)
if not _base_url:
# No region matching, use the first
_base_url = ep['endpoints'][0].get(endpoint_type)
break
if _base_url is None:
raise exceptions.EndpointNotFound(service)
parts = urlparse.urlparse(_base_url)
if filters.get('api_version', None) is not None:
path = "/" + filters['api_version']
noversion_path = "/".join(parts.path.split("/")[2:])
if noversion_path != "":
path += "/" + noversion_path
_base_url = _base_url.replace(parts.path, path)
if filters.get('skip_path', None) is not None and parts.path != '':
_base_url = _base_url.replace(parts.path, "/")
return _base_url
def is_expired(self, auth_data):
_, access = auth_data
expiry = datetime.datetime.strptime(access['token']['expires'],
self.EXPIRY_DATE_FORMAT)
return expiry - self.token_expiry_threshold <= \
datetime.datetime.utcnow()
class KeystoneV3AuthProvider(KeystoneAuthProvider):
EXPIRY_DATE_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ'
def _auth_client(self):
return json_v3id.V3TokenClientJSON()
def _auth_params(self):
return dict(
user=self.credentials.username,
password=self.credentials.password,
tenant=self.credentials.tenant_name,
domain=self.credentials.user_domain_name,
auth_data=True)
def _fill_credentials(self, auth_data_body):
# project or domain, depending on the scope
project = auth_data_body.get('project', None)
domain = auth_data_body.get('domain', None)
# user is always there
user = auth_data_body['user']
# Set project fields
if project is not None:
if self.credentials.project_name is None:
self.credentials.project_name = project['name']
if self.credentials.project_id is None:
self.credentials.project_id = project['id']
if self.credentials.project_domain_id is None:
self.credentials.project_domain_id = project['domain']['id']
if self.credentials.project_domain_name is None:
self.credentials.project_domain_name = \
project['domain']['name']
# Set domain fields
if domain is not None:
if self.credentials.domain_id is None:
self.credentials.domain_id = domain['id']
if self.credentials.domain_name is None:
self.credentials.domain_name = domain['name']
# Set user fields
if self.credentials.username is None:
self.credentials.username = user['name']
if self.credentials.user_id is None:
self.credentials.user_id = user['id']
if self.credentials.user_domain_id is None:
self.credentials.user_domain_id = user['domain']['id']
if self.credentials.user_domain_name is None:
self.credentials.user_domain_name = user['domain']['name']
def base_url(self, filters, auth_data=None):
"""
Filters can be:
- service: compute, image, etc
- region: the service region
- endpoint_type: adminURL, publicURL, internalURL
- api_version: replace catalog version with this
- skip_path: take just the base URL
"""
if auth_data is None:
auth_data = self.auth_data
token, _auth_data = auth_data
service = filters.get('service')
region = filters.get('region')
endpoint_type = filters.get('endpoint_type', 'public')
if service is None:
raise exceptions.EndpointNotFound("No service provided")
if 'URL' in endpoint_type:
endpoint_type = endpoint_type.replace('URL', '')
_base_url = None
catalog = _auth_data['catalog']
# Select entries with matching service type
service_catalog = [ep for ep in catalog if ep['type'] == service]
if len(service_catalog) > 0:
service_catalog = service_catalog[0]['endpoints']
else:
# No matching service
raise exceptions.EndpointNotFound(service)
# Filter by endpoint type (interface)
filtered_catalog = [ep for ep in service_catalog if
ep['interface'] == endpoint_type]
if len(filtered_catalog) == 0:
# No matching type, keep all and try matching by region at least
filtered_catalog = service_catalog
# Filter by region
filtered_catalog = [ep for ep in filtered_catalog if
ep['region'] == region]
if len(filtered_catalog) == 0:
# No matching region, take the first endpoint
filtered_catalog = [service_catalog[0]]
# There should be only one match. If not take the first.
_base_url = filtered_catalog[0].get('url', None)
if _base_url is None:
raise exceptions.EndpointNotFound(service)
parts = urlparse.urlparse(_base_url)
if filters.get('api_version', None) is not None:
path = "/" + filters['api_version']
noversion_path = "/".join(parts.path.split("/")[2:])
if noversion_path != "":
path += "/" + noversion_path
_base_url = _base_url.replace(parts.path, path)
if filters.get('skip_path', None) is not None:
_base_url = _base_url.replace(parts.path, "/")
return _base_url
def is_expired(self, auth_data):
_, access = auth_data
expiry = datetime.datetime.strptime(access['expires_at'],
self.EXPIRY_DATE_FORMAT)
return expiry - self.token_expiry_threshold <= \
datetime.datetime.utcnow()
def get_default_credentials(credential_type, fill_in=True):
"""
Returns configured credentials of the specified type
based on the configured auth_version
"""
return get_credentials(fill_in=fill_in, credential_type=credential_type)
def get_credentials(credential_type=None, fill_in=True, **kwargs):
"""
Builds a credentials object based on the configured auth_version
:param credential_type (string): requests credentials from tempest
configuration file. Valid values are defined in
Credentials.TYPE.
:param kwargs (dict): take into account only if credential_type is
not specified or None. Dict of credential key/value pairs
Examples:
Returns credentials from the provided parameters:
>>> get_credentials(username='foo', password='bar')
Returns credentials from tempest configuration:
>>> get_credentials(credential_type='user')
"""
if CONF.identity.auth_version == 'v2':
credential_class = KeystoneV2Credentials
auth_provider_class = KeystoneV2AuthProvider
elif CONF.identity.auth_version == 'v3':
credential_class = KeystoneV3Credentials
auth_provider_class = KeystoneV3AuthProvider
else:
raise exceptions.InvalidConfiguration('Unsupported auth version')
if credential_type is not None:
creds = credential_class.get_default(credential_type)
else:
creds = credential_class(**kwargs)
# Fill in the credentials fields that were not specified
if fill_in:
auth_provider = auth_provider_class(creds)
creds = auth_provider.fill_credentials()
return creds
class Credentials(object):
"""
Set of credentials for accessing OpenStack services
ATTRIBUTES: list of valid class attributes representing credentials.
TYPES: types of credentials available in the configuration file.
For each key there's a tuple (section, prefix) to match the
configuration options.
"""
ATTRIBUTES = []
TYPES = {
'identity_admin': ('identity', 'admin'),
'user': ('identity', None),
'alt_user': ('identity', 'alt')
}
def __init__(self, **kwargs):
"""
Enforce the available attributes at init time (only).
Additional attributes can still be set afterwards if tests need
to do so.
"""
self._initial = kwargs
self._apply_credentials(kwargs)
def _apply_credentials(self, attr):
for key in attr.keys():
if key in self.ATTRIBUTES:
setattr(self, key, attr[key])
else:
raise exceptions.InvalidCredentials
def __str__(self):
"""
Represent only attributes included in self.ATTRIBUTES
"""
_repr = dict((k, getattr(self, k)) for k in self.ATTRIBUTES)
return str(_repr)
def __eq__(self, other):
"""
Credentials are equal if attributes in self.ATTRIBUTES are equal
"""
return str(self) == str(other)
def __getattr__(self, key):
# If an attribute is set, __getattr__ is not invoked
# If an attribute is not set, and it is a known one, return None
if key in self.ATTRIBUTES:
return None
else:
raise AttributeError
def __delitem__(self, key):
# For backwards compatibility, support dict behaviour
if key in self.ATTRIBUTES:
delattr(self, key)
else:
raise AttributeError
def get(self, item, default):
# In this patch act as dict for backward compatibility
try:
return getattr(self, item)
except AttributeError:
return default
@classmethod
def get_default(cls, credentials_type):
if credentials_type not in cls.TYPES:
raise exceptions.InvalidCredentials()
creds = cls._get_default(credentials_type)
if not creds.is_valid():
msg = ("The %s credentials are incorrectly set in the config file."
" Double check that all required values are assigned" %
credentials_type)
raise exceptions.InvalidConfiguration(msg)
return creds
@classmethod
def _get_default(cls, credentials_type):
raise NotImplementedError
def is_valid(self):
raise NotImplementedError
def reset(self):
# First delete all known attributes
for key in self.ATTRIBUTES:
if getattr(self, key) is not None:
delattr(self, key)
# Then re-apply initial setup
self._apply_credentials(self._initial)
class KeystoneV2Credentials(Credentials):
CONF_ATTRIBUTES = ['username', 'password', 'tenant_name']
ATTRIBUTES = ['user_id', 'tenant_id']
ATTRIBUTES.extend(CONF_ATTRIBUTES)
@classmethod
def _get_default(cls, credentials_type='user'):
params = {}
section, prefix = cls.TYPES[credentials_type]
for attr in cls.CONF_ATTRIBUTES:
_section = getattr(CONF, section)
if prefix is None:
params[attr] = getattr(_section, attr)
else:
params[attr] = getattr(_section, prefix + "_" + attr)
return cls(**params)
def is_valid(self):
"""
Minimum set of valid credentials, are username and password.
Tenant is optional.
"""
return None not in (self.username, self.password)
class KeystoneV3Credentials(KeystoneV2Credentials):
"""
Credentials suitable for the Keystone Identity V3 API
"""
CONF_ATTRIBUTES = ['domain_name', 'password', 'tenant_name', 'username']
ATTRIBUTES = ['project_domain_id', 'project_domain_name', 'project_id',
'project_name', 'tenant_id', 'tenant_name', 'user_domain_id',
'user_domain_name', 'user_id']
ATTRIBUTES.extend(CONF_ATTRIBUTES)
def __init__(self, **kwargs):
"""
If domain is not specified, load the one configured for the
identity manager.
"""
domain_fields = set(x for x in self.ATTRIBUTES if 'domain' in x)
if not domain_fields.intersection(kwargs.keys()):
kwargs['user_domain_name'] = CONF.identity.admin_domain_name
super(KeystoneV3Credentials, self).__init__(**kwargs)
def __setattr__(self, key, value):
parent = super(KeystoneV3Credentials, self)
# for tenant_* set both project and tenant
if key == 'tenant_id':
parent.__setattr__('project_id', value)
elif key == 'tenant_name':
parent.__setattr__('project_name', value)
# for project_* set both project and tenant
if key == 'project_id':
parent.__setattr__('tenant_id', value)
elif key == 'project_name':
parent.__setattr__('tenant_name', value)
# for *_domain_* set both user and project if not set yet
if key == 'user_domain_id':
if self.project_domain_id is None:
parent.__setattr__('project_domain_id', value)
if key == 'project_domain_id':
if self.user_domain_id is None:
parent.__setattr__('user_domain_id', value)
if key == 'user_domain_name':
if self.project_domain_name is None:
parent.__setattr__('project_domain_name', value)
if key == 'project_domain_name':
if self.user_domain_name is None:
parent.__setattr__('user_domain_name', value)
# support domain_name coming from config
if key == 'domain_name':
parent.__setattr__('user_domain_name', value)
parent.__setattr__('project_domain_name', value)
# finally trigger default behaviour for all attributes
parent.__setattr__(key, value)
def is_valid(self):
"""
Valid combinations of v3 credentials (excluding token, scope)
- User id, password (optional domain)
- User name, password and its domain id/name
For the scope, valid combinations are:
- None
- Project id (optional domain)
- Project name and its domain id/name
"""
valid_user_domain = any(
[self.user_domain_id is not None,
self.user_domain_name is not None])
valid_project_domain = any(
[self.project_domain_id is not None,
self.project_domain_name is not None])
valid_user = any(
[self.user_id is not None,
self.username is not None and valid_user_domain])
valid_project = any(
[self.project_name is None and self.project_id is None,
self.project_id is not None,
self.project_name is not None and valid_project_domain])
return all([self.password is not None, valid_user, valid_project])<|fim▁end|> | |
<|file_name|>test_seqreader.py<|end_file_name|><|fim▁begin|>import pytest
from canon.seq.seqreader import SeqReader
from .. import resource
def test_read_seq():
reader = SeqReader(resource('seq/Quartz_500Mpa_.SEQ'))
reader.get_Om()
Z, _, N = reader.get_Zmap('orsnr___')
def test_merge_Zmap():
reader = SeqReader()
<|fim▁hole|> Z2, _, N2 = reader.get_Zmap('orsnr___')
Z, N = SeqReader.merge_Zmap(Z1, Z2, N1, N2)
if __name__ == '__main__':
pytest.main()<|fim▁end|> | reader.read_seq(resource('seq/au30_a1_.SEQ'))
Z1, _, N1 = reader.get_Zmap('orsnr___')
reader.read_seq(resource('seq/au30_m1_.SEQ')) |
<|file_name|>baBackTop.component.ts<|end_file_name|><|fim▁begin|>import {Component, ViewChild, HostListener, Input, ElementRef} from '@angular/core';
@Component({
selector: 'ba-back-top',
styleUrls: ['./baBackTop.scss'],
template: `<|fim▁hole|>export class BaBackTop {
@Input() position:number = 400;
@Input() showSpeed:number = 500;
@Input() moveSpeed:number = 1000;
@ViewChild('baBackTop') _selector:ElementRef;
ngAfterViewInit () {
this._onWindowScroll();
}
@HostListener('click')
_onClick():boolean {
jQuery('html, body').animate({scrollTop:0}, {duration:this.moveSpeed});
return false;
}
@HostListener('window:scroll')
_onWindowScroll():void {
let el = this._selector.nativeElement;
window.scrollY > this.position ? jQuery(el).fadeIn(this.showSpeed) : jQuery(el).fadeOut(this.showSpeed);
}
}<|fim▁end|> | <i #baBackTop class="fa fa-angle-up back-top ba-back-top" title="Back to Top"></i>
`
}) |
<|file_name|>graph_algorithms_test.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import os
import sys
sys.path.append(os.path.join(os.getcwd(), os.path.pardir))
import unittest
from digraph import digraph
from graph import graph
from graph_algorithms import *
class test_graph(unittest.TestCase):
def setUp(self):
self.gr = graph()
self.gr.add_nodes(["s", "a", "b", "c", "d", "e",
"f", "g", "h", "j", "k", "l"])
self.gr.add_edges([("s", "a"), ("s", "b"), ("a", "c"), ("c", "e")])
self.gr.add_edges([("e", "d"), ("d", "b"), ("a", "b"), ("c", "d")])
self.gr.add_edges([("g", "h"), ("f", "g")])<|fim▁hole|> self.digr.add_nodes(['s', 'a', 'b', 'c', 'd', 'e', 'f'])
self.digr.add_edges([("s", "a"), ("a", "b"), ("b", "a"), ("c", "b")])
self.digr.add_edges([("b", "s"), ("s", "d"), ("d", "e"), ("e", "d")])
self.digr.add_edges([("b", "f"), ("e", "f")])
def test_bfs_undirected_graph(self):
self.assertEqual(len(BFS(self.gr, "s")), 6)
self.assertEqual(len(BFS(self.gr, "j")), 3)
self.assertEqual(len(BFS(self.gr, "g")), 3)
def test_bfs_directed_graph(self):
self.assertEqual(len(BFS(self.digr, "s")), 6)
self.assertEqual(len(BFS(self.digr, "c")), 7)
self.assertEqual(len(BFS(self.digr, "f")), 1)
def test_dfs_undirected_graph(self):
self.assertEqual(len(DFS(self.gr, "s")), 6)
self.assertEqual(len(DFS(self.gr, "j")), 3)
self.assertEqual(len(DFS(self.gr, "g")), 3)
def test_dfs_directed_graph(self):
self.assertEqual(len(DFS(self.digr, "s")), 6)
self.assertEqual(len(DFS(self.digr, "c")), 7)
self.assertEqual(len(DFS(self.digr, "f")), 1)
def test_shortest_hops_undirected_graph(self):
self.assertEqual(shortest_hops(self.gr, "s")["c"], 2)
self.assertEqual(shortest_hops(self.gr, "c")["s"], 2)
self.assertEqual(shortest_hops(self.gr, "s")["s"], 0)
self.assertEqual(shortest_hops(self.gr, "c")["j"], float('inf'))
def test_shortest_hops_directed_graph(self):
self.assertEqual(shortest_hops(self.digr, "s")["f"], 3)
self.assertEqual(shortest_hops(self.digr, "f")["s"], float('inf'))
self.assertEqual(shortest_hops(self.digr, "s")["s"], 0)
self.assertEqual(shortest_hops(self.digr, "s")["c"], float('inf'))
def test_undirected_connected_component(self):
self.assertEqual(len(undirected_connected_components(self.gr)), 3)
self.assertRaises(
Exception, undirected_connected_components, self.digr)
def test_topological_ordering(self):
dag = digraph() # directed acyclic graph
dag.add_nodes(["a", "b", "c", "d", "e", "f", "g", "h"])
dag.add_edges([("a", "b"), ("a", "c"), ("a", "e"), ("d", "a")])
dag.add_edges(
[("g", "b"), ("g", "f"), ("f", "e"), ("h", "f"), ("h", "a")])
order = {o[0]: o[1] for o in topological_ordering(dag)}
self.assertEqual(sum([order[u] < order[v] for (u, v) in
dag.edges()]), len(dag.edges())) # all comparisons are True
def test_directed_connected_components(self):
digr = digraph()
digr.add_nodes(["a", "b", "c", "d", "e", "f", "g", "h", "i"])
digr.add_edges([("b", "a"), ("a", "c"), ("c", "b"), ("d", "b")])
digr.add_edges([("d", "f"), ("f", "e"), ("e", "d"), ("g", "e")])
digr.add_edges([("g", "h"), ("h", "i"), ("i", "g")])
self.assertEqual(len(directed_connected_components(digr)), 3)
digr2 = digraph()
digr2.add_nodes(
["a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k"])
digr2.add_edges(
[("a", "b"), ("b", "c"), ("c", "a"), ("b", "d"), ("d", "e")])
digr2.add_edges(
[("e", "f"), ("f", "g"), ("g", "e"), ("d", "g"), ("i", "f")])
digr2.add_edges(
[("h", "g"), ("c", "h"), ("c", "k"), ("h", "i"), ("i", "j")])
digr2.add_edges([("h", "j"), ("j", "k"), ("k", "h")])
self.assertEqual(len(directed_connected_components(digr2)), 4)
def test_shortest_path_in_directed_graph(self):
digr = digraph()
digr.add_nodes(["a", "b", "c", "d", "e", "f"])
digr.add_edge(("a", "b"), 7)
digr.add_edge(("a", "c"), 9)
digr.add_edge(("a", "f"), 14)
digr.add_edge(("f", "e"), 9)
digr.add_edge(("c", "f"), 2)
digr.add_edge(("c", "d"), 11)
digr.add_edge(("b", "c"), 10)
digr.add_edge(("b", "d"), 15)
digr.add_edge(("d", "e"), 6)
self.assertEqual(shortest_path(digr, "a")["a"], 0)
self.assertEqual(shortest_path(digr, "a")["b"], 7)
self.assertEqual(shortest_path(digr, "a")["c"], 9)
self.assertEqual(shortest_path(digr, "a")["d"], 20)
self.assertEqual(shortest_path(digr, "a")["e"], 20)
self.assertEqual(shortest_path(digr, "a")["f"], 11)
def test_prims_minimum_spanning_tree(self):
gr = graph()
gr.add_nodes(["a", "b", "c", "d"])
gr.add_edge(("a", "b"), 4)
gr.add_edge(("b", "c"), 3)
gr.add_edge(("a", "c"), 1)
gr.add_edge(("c", "d"), 2)
min_cost = minimum_spanning_tree(gr)
self.assertEqual(min_cost, 6)
def test_kruskals_minimum_spanning_tree(self):
gr = graph()
gr.add_nodes(["a", "b", "c", "d"])
gr.add_edge(("a", "b"), 4)
gr.add_edge(("b", "c"), 3)
gr.add_edge(("a", "c"), 1)
gr.add_edge(("c", "d"), 2)
min_cost = kruskal_MST(gr)
self.assertEqual(min_cost, 6)
if __name__ == "__main__":
unittest.main()
os.system("pause")<|fim▁end|> | self.gr.add_edges([("j", "k"), ("j", "l")])
self.digr = digraph() |
<|file_name|>Dependencies.py<|end_file_name|><|fim▁begin|><|fim▁hole|># list of header (.h) files used by source files (.cxx).
# As a module, provides
# FindPathToHeader(header, includePath) -> path
# FindHeadersInFile(filePath) -> [headers]
# FindHeadersInFileRecursive(filePath, includePath, renames) -> [paths]
# FindDependencies(sourceGlobs, includePath, objExt, startDirectory, renames) -> [dependencies]
# ExtractDependencies(input) -> [dependencies]
# TextFromDependencies(dependencies)
# WriteDependencies(output, dependencies)
# UpdateDependencies(filepath, dependencies)
# PathStem(p) -> stem
# InsertSynonym(dependencies, current, additional) -> [dependencies]
# If run as a script reads from stdin and writes to stdout.
# Only tested with ASCII file names.
# Copyright 2019 by Neil Hodgson <neilh@scintilla.org>
# The License.txt file describes the conditions under which this software may be distributed.
# Requires Python 2.7 or later
import codecs, glob, os, sys
if __name__ == "__main__":
import FileGenerator
else:
from . import FileGenerator
continuationLineEnd = " \\"
def FindPathToHeader(header, includePath):
for incDir in includePath:
relPath = os.path.join(incDir, header)
if os.path.exists(relPath):
return relPath
return ""
fhifCache = {} # Remember the includes in each file. ~5x speed up.
def FindHeadersInFile(filePath):
if filePath not in fhifCache:
headers = []
with codecs.open(filePath, "r", "utf-8") as f:
for line in f:
if line.strip().startswith("#include"):
parts = line.split()
if len(parts) > 1:
header = parts[1]
if header[0] != '<': # No system headers
headers.append(header.strip('"'))
fhifCache[filePath] = headers
return fhifCache[filePath]
def FindHeadersInFileRecursive(filePath, includePath, renames):
headerPaths = []
for header in FindHeadersInFile(filePath):
if header in renames:
header = renames[header]
relPath = FindPathToHeader(header, includePath)
if relPath and relPath not in headerPaths:
headerPaths.append(relPath)
subHeaders = FindHeadersInFileRecursive(relPath, includePath, renames)
headerPaths.extend(sh for sh in subHeaders if sh not in headerPaths)
return headerPaths
def RemoveStart(relPath, start):
if relPath.startswith(start):
return relPath[len(start):]
return relPath
def ciKey(f):
return f.lower()
def FindDependencies(sourceGlobs, includePath, objExt, startDirectory, renames={}):
deps = []
for sourceGlob in sourceGlobs:
sourceFiles = glob.glob(sourceGlob)
# Sorting the files minimizes deltas as order returned by OS may be arbitrary
sourceFiles.sort(key=ciKey)
for sourceName in sourceFiles:
objName = os.path.splitext(os.path.basename(sourceName))[0]+objExt
headerPaths = FindHeadersInFileRecursive(sourceName, includePath, renames)
depsForSource = [sourceName] + headerPaths
depsToAppend = [RemoveStart(fn.replace("\\", "/"), startDirectory) for
fn in depsForSource]
deps.append([objName, depsToAppend])
return deps
def PathStem(p):
""" Return the stem of a filename: "CallTip.o" -> "CallTip" """
return os.path.splitext(os.path.basename(p))[0]
def InsertSynonym(dependencies, current, additional):
""" Insert a copy of one object file with dependencies under a different name.
Used when one source file is used to create two object files with different
preprocessor definitions. """
result = []
for dep in dependencies:
result.append(dep)
if (dep[0] == current):
depAdd = [additional, dep[1]]
result.append(depAdd)
return result
def ExtractDependencies(input):
""" Create a list of dependencies from input list of lines
Each element contains the name of the object and a list of
files that it depends on.
Dependencies that contain "/usr/" are removed as they are system headers. """
deps = []
for line in input:
headersLine = line.startswith(" ") or line.startswith("\t")
line = line.strip()
isContinued = line.endswith("\\")
line = line.rstrip("\\ ")
fileNames = line.strip().split(" ")
if not headersLine:
# its a source file line, there may be headers too
sourceLine = fileNames[0].rstrip(":")
fileNames = fileNames[1:]
deps.append([sourceLine, []])
deps[-1][1].extend(header for header in fileNames if "/usr/" not in header)
return deps
def TextFromDependencies(dependencies):
""" Convert a list of dependencies to text. """
text = ""
indentHeaders = "\t"
joinHeaders = continuationLineEnd + os.linesep + indentHeaders
for dep in dependencies:
object, headers = dep
text += object + ":"
for header in headers:
text += joinHeaders
text += header
if headers:
text += os.linesep
return text
def UpdateDependencies(filepath, dependencies, comment=""):
""" Write a dependencies file if different from dependencies. """
FileGenerator.UpdateFile(os.path.abspath(filepath), comment.rstrip() + os.linesep +
TextFromDependencies(dependencies))
def WriteDependencies(output, dependencies):
""" Write a list of dependencies out to a stream. """
output.write(TextFromDependencies(dependencies))
if __name__ == "__main__":
""" Act as a filter that reformats input dependencies to one per line. """
inputLines = sys.stdin.readlines()
deps = ExtractDependencies(inputLines)
WriteDependencies(sys.stdout, deps)<|fim▁end|> | #!/usr/bin/env python
# Dependencies.py - discover, read, and write dependencies file for make.
# The format like the output from "g++ -MM" which produces a
|
<|file_name|>swanson_bot.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
import praw
import re
import os
import pickle
from array import *
import random
#REPLY = "I want all the bacon and eggs you have."
REPLY = array('i',["I want all the bacon and eggs you have", "I know what I'm about son", "I'm not interested in caring about people", "Is this not rap?"])
if not os.path.isfile("inigo_config.txt"):
print "You must create the file swanson_config.txt with the pickled credentials."
exit(1)
else:
print "Loading credentials"
user_data = pickle.load( open("swanson_config.txt","rb"))
#print user_data
user_agent = ("Swanson bot 0.1 created by /u/dcooper2.")
r = praw.Reddit(user_agent=user_agent)
r.login(user_data[0], user_data[1])
del user_data
print "Successfully logged in"
# Check for previous replies
if not os.path.isfile("replies.txt"):
replies = []
else:
print "Loading previous reply ids"
with open("replies.txt", "r") as f:
replies = f.read()
replies = replies.split("\n")
replies = filter(None, replies)
<|fim▁hole|># Check for new items to reply to
subreddit = r.get_subreddit('umw_cpsc470Z')
print "Checking for new posts"
for submission in subreddit.get_hot(limit=10):
print "Checking submission ", submission.id
if submission.id not in replies:
if re.search("Ron Swanson", submission.title, re.IGNORECASE) or re.search("Ron Swanson", submission.selftext, re.IGNORECASE):
x = random.randint(0,3)
submission.add_comment(REPLY[x])
print "Bot replying to submission: ", submission.id
replies.append(submission.id)
print "Checking comments"
flat_comments = praw.helpers.flatten_tree(submission.comments)
for comment in flat_comments:
if comment.id not in replies:
if re.search("Ron Swanson", comment.body, re.IGNORECASE):
y = random.randint(0,3)
print "Bot replying to comment: ", comment.id
comment.reply(REPLY[y])
replies.append(comment.id)
# Save new replies
print "Saving ids to file"
with open("replies.txt", "w") as f:
for i in replies:
f.write(i + "\n")<|fim▁end|> | |
<|file_name|>parser.js<|end_file_name|><|fim▁begin|>(function() {
"use strict";
var src$parser$$default = (function() {
"use strict";
/*
* Generated by PEG.js 0.9.0.
*
* http://pegjs.org/
*/
function peg$subclass(child, parent) {
function ctor() { this.constructor = child; }
ctor.prototype = parent.prototype;
child.prototype = new ctor();
}
function peg$SyntaxError(message, expected, found, location) {
this.message = message;
this.expected = expected;
this.found = found;
this.location = location;
this.name = "SyntaxError";
if (typeof Error.captureStackTrace === "function") {
Error.captureStackTrace(this, peg$SyntaxError);
}
}
peg$subclass(peg$SyntaxError, Error);
function peg$parse(input) {
var options = arguments.length > 1 ? arguments[1] : {},
parser = this,
peg$FAILED = {},
peg$startRuleFunctions = { start: peg$parsestart },
peg$startRuleFunction = peg$parsestart,
peg$c0 = function(elements) {
return {
type : 'messageFormatPattern',
elements: elements,
location: location()
};
},
peg$c1 = function(text) {
var string = '',
i, j, outerLen, inner, innerLen;
for (i = 0, outerLen = text.length; i < outerLen; i += 1) {
inner = text[i];
for (j = 0, innerLen = inner.length; j < innerLen; j += 1) {
string += inner[j];
}
}
return string;
},
peg$c2 = function(messageText) {
return {
type : 'messageTextElement',
value: messageText,
location: location()
};
},
peg$c3 = /^[^ \t\n\r,.+={}#]/,
peg$c4 = { type: "class", value: "[^ \\t\\n\\r,.+={}#]", description: "[^ \\t\\n\\r,.+={}#]" },
peg$c5 = "{",
peg$c6 = { type: "literal", value: "{", description: "\"{\"" },
peg$c7 = ",",
peg$c8 = { type: "literal", value: ",", description: "\",\"" },
peg$c9 = "}",
peg$c10 = { type: "literal", value: "}", description: "\"}\"" },
peg$c11 = function(id, format) {
return {
type : 'argumentElement',
id : id,
format: format && format[2],
location: location()
};
},
peg$c12 = "number",
peg$c13 = { type: "literal", value: "number", description: "\"number\"" },
peg$c14 = "date",
peg$c15 = { type: "literal", value: "date", description: "\"date\"" },
peg$c16 = "time",
peg$c17 = { type: "literal", value: "time", description: "\"time\"" },
peg$c18 = function(type, style) {
return {
type : type + 'Format',
style: style && style[2],
location: location()
};
},
peg$c19 = "plural",
peg$c20 = { type: "literal", value: "plural", description: "\"plural\"" },
peg$c21 = function(pluralStyle) {
return {
type : pluralStyle.type,
ordinal: false,
offset : pluralStyle.offset || 0,
options: pluralStyle.options,
location: location()
};
},
peg$c22 = "selectordinal",
peg$c23 = { type: "literal", value: "selectordinal", description: "\"selectordinal\"" },
peg$c24 = function(pluralStyle) {
return {
type : pluralStyle.type,
ordinal: true,
offset : pluralStyle.offset || 0,
options: pluralStyle.options,
location: location()
}
},
peg$c25 = "select",
peg$c26 = { type: "literal", value: "select", description: "\"select\"" },
peg$c27 = function(options) {
return {
type : 'selectFormat',
options: options,
location: location()
};
},
peg$c28 = "=",
peg$c29 = { type: "literal", value: "=", description: "\"=\"" },
peg$c30 = function(selector, pattern) {
return {
type : 'optionalFormatPattern',
selector: selector,
value : pattern,
location: location()
};
},
peg$c31 = "offset:",
peg$c32 = { type: "literal", value: "offset:", description: "\"offset:\"" },
peg$c33 = function(number) {
return number;
},
peg$c34 = function(offset, options) {
return {
type : 'pluralFormat',
offset : offset,
options: options,
location: location()
};
},
peg$c35 = { type: "other", description: "whitespace" },
peg$c36 = /^[ \t\n\r]/,
peg$c37 = { type: "class", value: "[ \\t\\n\\r]", description: "[ \\t\\n\\r]" },
peg$c38 = { type: "other", description: "optionalWhitespace" },
peg$c39 = /^[0-9]/,
peg$c40 = { type: "class", value: "[0-9]", description: "[0-9]" },
peg$c41 = /^[0-9a-f]/i,
peg$c42 = { type: "class", value: "[0-9a-f]i", description: "[0-9a-f]i" },
peg$c43 = "0",
peg$c44 = { type: "literal", value: "0", description: "\"0\"" },
peg$c45 = /^[1-9]/,
peg$c46 = { type: "class", value: "[1-9]", description: "[1-9]" },
peg$c47 = function(digits) {
return parseInt(digits, 10);
},
peg$c48 = /^[^{}\\\0-\x1F \t\n\r]/,
peg$c49 = { type: "class", value: "[^{}\\\\\\0-\\x1F\\x7f \\t\\n\\r]", description: "[^{}\\\\\\0-\\x1F\\x7f \\t\\n\\r]" },
peg$c50 = "\\\\",
peg$c51 = { type: "literal", value: "\\\\", description: "\"\\\\\\\\\"" },
peg$c52 = function() { return '\\'; },
peg$c53 = "\\#",
peg$c54 = { type: "literal", value: "\\#", description: "\"\\\\#\"" },
peg$c55 = function() { return '\\#'; },
peg$c56 = "\\{",
peg$c57 = { type: "literal", value: "\\{", description: "\"\\\\{\"" },
peg$c58 = function() { return '\u007B'; },
peg$c59 = "\\}",
peg$c60 = { type: "literal", value: "\\}", description: "\"\\\\}\"" },
peg$c61 = function() { return '\u007D'; },
peg$c62 = "\\u",
peg$c63 = { type: "literal", value: "\\u", description: "\"\\\\u\"" },
peg$c64 = function(digits) {
return String.fromCharCode(parseInt(digits, 16));
},
peg$c65 = function(chars) { return chars.join(''); },
peg$currPos = 0,
peg$savedPos = 0,
peg$posDetailsCache = [{ line: 1, column: 1, seenCR: false }],
peg$maxFailPos = 0,
peg$maxFailExpected = [],
peg$silentFails = 0,
peg$result;
if ("startRule" in options) {
if (!(options.startRule in peg$startRuleFunctions)) {
throw new Error("Can't start parsing from rule \"" + options.startRule + "\".");
}
peg$startRuleFunction = peg$startRuleFunctions[options.startRule];
}
function text() {
return input.substring(peg$savedPos, peg$currPos);
}
function location() {
return peg$computeLocation(peg$savedPos, peg$currPos);
}
function expected(description) {
throw peg$buildException(
null,
[{ type: "other", description: description }],
input.substring(peg$savedPos, peg$currPos),
peg$computeLocation(peg$savedPos, peg$currPos)
);
}
function error(message) {
throw peg$buildException(
message,
null,
input.substring(peg$savedPos, peg$currPos),
peg$computeLocation(peg$savedPos, peg$currPos)
);
}
function peg$computePosDetails(pos) {
var details = peg$posDetailsCache[pos],
p, ch;
if (details) {
return details;
} else {
p = pos - 1;
while (!peg$posDetailsCache[p]) {
p--;
}
details = peg$posDetailsCache[p];
details = {
line: details.line,
column: details.column,
seenCR: details.seenCR
};
while (p < pos) {
ch = input.charAt(p);
if (ch === "\n") {
if (!details.seenCR) { details.line++; }
details.column = 1;
details.seenCR = false;
} else if (ch === "\r" || ch === "\u2028" || ch === "\u2029") {
details.line++;
details.column = 1;
details.seenCR = true;
} else {
details.column++;
details.seenCR = false;
}
p++;
}
peg$posDetailsCache[pos] = details;
return details;
}
}
function peg$computeLocation(startPos, endPos) {
var startPosDetails = peg$computePosDetails(startPos),
endPosDetails = peg$computePosDetails(endPos);
return {
start: {
offset: startPos,
line: startPosDetails.line,
column: startPosDetails.column
},
end: {
offset: endPos,
line: endPosDetails.line,
column: endPosDetails.column
}
};
}
function peg$fail(expected) {
if (peg$currPos < peg$maxFailPos) { return; }
if (peg$currPos > peg$maxFailPos) {
peg$maxFailPos = peg$currPos;
peg$maxFailExpected = [];
}
peg$maxFailExpected.push(expected);
}
function peg$buildException(message, expected, found, location) {
function cleanupExpected(expected) {
var i = 1;
expected.sort(function(a, b) {
if (a.description < b.description) {
return -1;
} else if (a.description > b.description) {
return 1;
} else {
return 0;
}
});
while (i < expected.length) {
if (expected[i - 1] === expected[i]) {
expected.splice(i, 1);
} else {
i++;
}
}
}
function buildMessage(expected, found) {
function stringEscape(s) {
function hex(ch) { return ch.charCodeAt(0).toString(16).toUpperCase(); }
return s
.replace(/\\/g, '\\\\')
.replace(/"/g, '\\"')
.replace(/\x08/g, '\\b')
.replace(/\t/g, '\\t')
.replace(/\n/g, '\\n')
.replace(/\f/g, '\\f')
.replace(/\r/g, '\\r')
.replace(/[\x00-\x07\x0B\x0E\x0F]/g, function(ch) { return '\\x0' + hex(ch); })
.replace(/[\x10-\x1F\x80-\xFF]/g, function(ch) { return '\\x' + hex(ch); })
.replace(/[\u0100-\u0FFF]/g, function(ch) { return '\\u0' + hex(ch); })
.replace(/[\u1000-\uFFFF]/g, function(ch) { return '\\u' + hex(ch); });
}
var expectedDescs = new Array(expected.length),
expectedDesc, foundDesc, i;
for (i = 0; i < expected.length; i++) {
expectedDescs[i] = expected[i].description;
}
expectedDesc = expected.length > 1
? expectedDescs.slice(0, -1).join(", ")
+ " or "
+ expectedDescs[expected.length - 1]
: expectedDescs[0];
foundDesc = found ? "\"" + stringEscape(found) + "\"" : "end of input";
return "Expected " + expectedDesc + " but " + foundDesc + " found.";
}
if (expected !== null) {
cleanupExpected(expected);
}
return new peg$SyntaxError(
message !== null ? message : buildMessage(expected, found),
expected,
found,
location
);
}
function peg$parsestart() {
var s0;
s0 = peg$parsemessageFormatPattern();
return s0;
}
function peg$parsemessageFormatPattern() {
var s0, s1, s2;
s0 = peg$currPos;
s1 = [];
s2 = peg$parsemessageFormatElement();
while (s2 !== peg$FAILED) {
s1.push(s2);
s2 = peg$parsemessageFormatElement();
}
if (s1 !== peg$FAILED) {
peg$savedPos = s0;
s1 = peg$c0(s1);
}
s0 = s1;
return s0;
}
function peg$parsemessageFormatElement() {
var s0;
s0 = peg$parsemessageTextElement();
if (s0 === peg$FAILED) {
s0 = peg$parseargumentElement();
}
return s0;
}
function peg$parsemessageText() {
var s0, s1, s2, s3, s4, s5;
s0 = peg$currPos;
s1 = [];
s2 = peg$currPos;
s3 = peg$parse_();
if (s3 !== peg$FAILED) {
s4 = peg$parsechars();
if (s4 !== peg$FAILED) {
s5 = peg$parse_();
if (s5 !== peg$FAILED) {
s3 = [s3, s4, s5];
s2 = s3;
} else {
peg$currPos = s2;
s2 = peg$FAILED;
}
} else {
peg$currPos = s2;
s2 = peg$FAILED;
}
} else {
peg$currPos = s2;
s2 = peg$FAILED;
}
if (s2 !== peg$FAILED) {
while (s2 !== peg$FAILED) {
s1.push(s2);
s2 = peg$currPos;
s3 = peg$parse_();
if (s3 !== peg$FAILED) {
s4 = peg$parsechars();
if (s4 !== peg$FAILED) {
s5 = peg$parse_();
if (s5 !== peg$FAILED) {
s3 = [s3, s4, s5];
s2 = s3;
} else {
peg$currPos = s2;
s2 = peg$FAILED;
}
} else {
peg$currPos = s2;
s2 = peg$FAILED;
}
} else {
peg$currPos = s2;
s2 = peg$FAILED;
}
}
} else {
s1 = peg$FAILED;
}
if (s1 !== peg$FAILED) {
peg$savedPos = s0;
s1 = peg$c1(s1);
}
s0 = s1;
if (s0 === peg$FAILED) {
s0 = peg$currPos;
s1 = peg$parsews();
if (s1 !== peg$FAILED) {
s0 = input.substring(s0, peg$currPos);
} else {
s0 = s1;
}
}
return s0;
}
function peg$parsemessageTextElement() {
var s0, s1;
s0 = peg$currPos;
s1 = peg$parsemessageText();
if (s1 !== peg$FAILED) {
peg$savedPos = s0;
s1 = peg$c2(s1);
}
s0 = s1;
return s0;
}
function peg$parseargument() {
var s0, s1, s2;
s0 = peg$parsenumber();
if (s0 === peg$FAILED) {
s0 = peg$currPos;
s1 = [];
if (peg$c3.test(input.charAt(peg$currPos))) {
s2 = input.charAt(peg$currPos);
peg$currPos++;
} else {
s2 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c4); }
}
if (s2 !== peg$FAILED) {
while (s2 !== peg$FAILED) {
s1.push(s2);
if (peg$c3.test(input.charAt(peg$currPos))) {
s2 = input.charAt(peg$currPos);
peg$currPos++;
} else {
s2 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c4); }
}
}
} else {
s1 = peg$FAILED;
}
if (s1 !== peg$FAILED) {
s0 = input.substring(s0, peg$currPos);
} else {
s0 = s1;
}
}
return s0;
}
function peg$parseargumentElement() {
var s0, s1, s2, s3, s4, s5, s6, s7, s8;
s0 = peg$currPos;
if (input.charCodeAt(peg$currPos) === 123) {
s1 = peg$c5;
peg$currPos++;
} else {
s1 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c6); }
}
if (s1 !== peg$FAILED) {
s2 = peg$parse_();
if (s2 !== peg$FAILED) {
s3 = peg$parseargument();
if (s3 !== peg$FAILED) {
s4 = peg$parse_();
if (s4 !== peg$FAILED) {
s5 = peg$currPos;
if (input.charCodeAt(peg$currPos) === 44) {
s6 = peg$c7;
peg$currPos++;
} else {
s6 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c8); }
}
if (s6 !== peg$FAILED) {
s7 = peg$parse_();
if (s7 !== peg$FAILED) {
s8 = peg$parseelementFormat();
if (s8 !== peg$FAILED) {
s6 = [s6, s7, s8];
s5 = s6;
} else {
peg$currPos = s5;
s5 = peg$FAILED;
}
} else {
peg$currPos = s5;
s5 = peg$FAILED;
}
} else {
peg$currPos = s5;
s5 = peg$FAILED;
}
if (s5 === peg$FAILED) {
s5 = null;
}
if (s5 !== peg$FAILED) {
s6 = peg$parse_();
if (s6 !== peg$FAILED) {
if (input.charCodeAt(peg$currPos) === 125) {
s7 = peg$c9;
peg$currPos++;
} else {
s7 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c10); }
}
if (s7 !== peg$FAILED) {
peg$savedPos = s0;
s1 = peg$c11(s3, s5);
s0 = s1;
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}<|fim▁hole|> function peg$parseelementFormat() {
var s0;
s0 = peg$parsesimpleFormat();
if (s0 === peg$FAILED) {
s0 = peg$parsepluralFormat();
if (s0 === peg$FAILED) {
s0 = peg$parseselectOrdinalFormat();
if (s0 === peg$FAILED) {
s0 = peg$parseselectFormat();
}
}
}
return s0;
}
function peg$parsesimpleFormat() {
var s0, s1, s2, s3, s4, s5, s6;
s0 = peg$currPos;
if (input.substr(peg$currPos, 6) === peg$c12) {
s1 = peg$c12;
peg$currPos += 6;
} else {
s1 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c13); }
}
if (s1 === peg$FAILED) {
if (input.substr(peg$currPos, 4) === peg$c14) {
s1 = peg$c14;
peg$currPos += 4;
} else {
s1 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c15); }
}
if (s1 === peg$FAILED) {
if (input.substr(peg$currPos, 4) === peg$c16) {
s1 = peg$c16;
peg$currPos += 4;
} else {
s1 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c17); }
}
}
}
if (s1 !== peg$FAILED) {
s2 = peg$parse_();
if (s2 !== peg$FAILED) {
s3 = peg$currPos;
if (input.charCodeAt(peg$currPos) === 44) {
s4 = peg$c7;
peg$currPos++;
} else {
s4 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c8); }
}
if (s4 !== peg$FAILED) {
s5 = peg$parse_();
if (s5 !== peg$FAILED) {
s6 = peg$parsechars();
if (s6 !== peg$FAILED) {
s4 = [s4, s5, s6];
s3 = s4;
} else {
peg$currPos = s3;
s3 = peg$FAILED;
}
} else {
peg$currPos = s3;
s3 = peg$FAILED;
}
} else {
peg$currPos = s3;
s3 = peg$FAILED;
}
if (s3 === peg$FAILED) {
s3 = null;
}
if (s3 !== peg$FAILED) {
peg$savedPos = s0;
s1 = peg$c18(s1, s3);
s0 = s1;
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
return s0;
}
function peg$parsepluralFormat() {
var s0, s1, s2, s3, s4, s5;
s0 = peg$currPos;
if (input.substr(peg$currPos, 6) === peg$c19) {
s1 = peg$c19;
peg$currPos += 6;
} else {
s1 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c20); }
}
if (s1 !== peg$FAILED) {
s2 = peg$parse_();
if (s2 !== peg$FAILED) {
if (input.charCodeAt(peg$currPos) === 44) {
s3 = peg$c7;
peg$currPos++;
} else {
s3 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c8); }
}
if (s3 !== peg$FAILED) {
s4 = peg$parse_();
if (s4 !== peg$FAILED) {
s5 = peg$parsepluralStyle();
if (s5 !== peg$FAILED) {
peg$savedPos = s0;
s1 = peg$c21(s5);
s0 = s1;
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
return s0;
}
function peg$parseselectOrdinalFormat() {
var s0, s1, s2, s3, s4, s5;
s0 = peg$currPos;
if (input.substr(peg$currPos, 13) === peg$c22) {
s1 = peg$c22;
peg$currPos += 13;
} else {
s1 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c23); }
}
if (s1 !== peg$FAILED) {
s2 = peg$parse_();
if (s2 !== peg$FAILED) {
if (input.charCodeAt(peg$currPos) === 44) {
s3 = peg$c7;
peg$currPos++;
} else {
s3 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c8); }
}
if (s3 !== peg$FAILED) {
s4 = peg$parse_();
if (s4 !== peg$FAILED) {
s5 = peg$parsepluralStyle();
if (s5 !== peg$FAILED) {
peg$savedPos = s0;
s1 = peg$c24(s5);
s0 = s1;
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
return s0;
}
function peg$parseselectFormat() {
var s0, s1, s2, s3, s4, s5, s6;
s0 = peg$currPos;
if (input.substr(peg$currPos, 6) === peg$c25) {
s1 = peg$c25;
peg$currPos += 6;
} else {
s1 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c26); }
}
if (s1 !== peg$FAILED) {
s2 = peg$parse_();
if (s2 !== peg$FAILED) {
if (input.charCodeAt(peg$currPos) === 44) {
s3 = peg$c7;
peg$currPos++;
} else {
s3 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c8); }
}
if (s3 !== peg$FAILED) {
s4 = peg$parse_();
if (s4 !== peg$FAILED) {
s5 = [];
s6 = peg$parseoptionalFormatPattern();
if (s6 !== peg$FAILED) {
while (s6 !== peg$FAILED) {
s5.push(s6);
s6 = peg$parseoptionalFormatPattern();
}
} else {
s5 = peg$FAILED;
}
if (s5 !== peg$FAILED) {
peg$savedPos = s0;
s1 = peg$c27(s5);
s0 = s1;
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
return s0;
}
function peg$parseselector() {
var s0, s1, s2, s3;
s0 = peg$currPos;
s1 = peg$currPos;
if (input.charCodeAt(peg$currPos) === 61) {
s2 = peg$c28;
peg$currPos++;
} else {
s2 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c29); }
}
if (s2 !== peg$FAILED) {
s3 = peg$parsenumber();
if (s3 !== peg$FAILED) {
s2 = [s2, s3];
s1 = s2;
} else {
peg$currPos = s1;
s1 = peg$FAILED;
}
} else {
peg$currPos = s1;
s1 = peg$FAILED;
}
if (s1 !== peg$FAILED) {
s0 = input.substring(s0, peg$currPos);
} else {
s0 = s1;
}
if (s0 === peg$FAILED) {
s0 = peg$parsechars();
}
return s0;
}
function peg$parseoptionalFormatPattern() {
var s0, s1, s2, s3, s4, s5, s6, s7, s8;
s0 = peg$currPos;
s1 = peg$parse_();
if (s1 !== peg$FAILED) {
s2 = peg$parseselector();
if (s2 !== peg$FAILED) {
s3 = peg$parse_();
if (s3 !== peg$FAILED) {
if (input.charCodeAt(peg$currPos) === 123) {
s4 = peg$c5;
peg$currPos++;
} else {
s4 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c6); }
}
if (s4 !== peg$FAILED) {
s5 = peg$parse_();
if (s5 !== peg$FAILED) {
s6 = peg$parsemessageFormatPattern();
if (s6 !== peg$FAILED) {
s7 = peg$parse_();
if (s7 !== peg$FAILED) {
if (input.charCodeAt(peg$currPos) === 125) {
s8 = peg$c9;
peg$currPos++;
} else {
s8 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c10); }
}
if (s8 !== peg$FAILED) {
peg$savedPos = s0;
s1 = peg$c30(s2, s6);
s0 = s1;
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
return s0;
}
function peg$parseoffset() {
var s0, s1, s2, s3;
s0 = peg$currPos;
if (input.substr(peg$currPos, 7) === peg$c31) {
s1 = peg$c31;
peg$currPos += 7;
} else {
s1 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c32); }
}
if (s1 !== peg$FAILED) {
s2 = peg$parse_();
if (s2 !== peg$FAILED) {
s3 = peg$parsenumber();
if (s3 !== peg$FAILED) {
peg$savedPos = s0;
s1 = peg$c33(s3);
s0 = s1;
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
return s0;
}
function peg$parsepluralStyle() {
var s0, s1, s2, s3, s4;
s0 = peg$currPos;
s1 = peg$parseoffset();
if (s1 === peg$FAILED) {
s1 = null;
}
if (s1 !== peg$FAILED) {
s2 = peg$parse_();
if (s2 !== peg$FAILED) {
s3 = [];
s4 = peg$parseoptionalFormatPattern();
if (s4 !== peg$FAILED) {
while (s4 !== peg$FAILED) {
s3.push(s4);
s4 = peg$parseoptionalFormatPattern();
}
} else {
s3 = peg$FAILED;
}
if (s3 !== peg$FAILED) {
peg$savedPos = s0;
s1 = peg$c34(s1, s3);
s0 = s1;
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
return s0;
}
function peg$parsews() {
var s0, s1;
peg$silentFails++;
s0 = [];
if (peg$c36.test(input.charAt(peg$currPos))) {
s1 = input.charAt(peg$currPos);
peg$currPos++;
} else {
s1 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c37); }
}
if (s1 !== peg$FAILED) {
while (s1 !== peg$FAILED) {
s0.push(s1);
if (peg$c36.test(input.charAt(peg$currPos))) {
s1 = input.charAt(peg$currPos);
peg$currPos++;
} else {
s1 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c37); }
}
}
} else {
s0 = peg$FAILED;
}
peg$silentFails--;
if (s0 === peg$FAILED) {
s1 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c35); }
}
return s0;
}
function peg$parse_() {
var s0, s1, s2;
peg$silentFails++;
s0 = peg$currPos;
s1 = [];
s2 = peg$parsews();
while (s2 !== peg$FAILED) {
s1.push(s2);
s2 = peg$parsews();
}
if (s1 !== peg$FAILED) {
s0 = input.substring(s0, peg$currPos);
} else {
s0 = s1;
}
peg$silentFails--;
if (s0 === peg$FAILED) {
s1 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c38); }
}
return s0;
}
function peg$parsedigit() {
var s0;
if (peg$c39.test(input.charAt(peg$currPos))) {
s0 = input.charAt(peg$currPos);
peg$currPos++;
} else {
s0 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c40); }
}
return s0;
}
function peg$parsehexDigit() {
var s0;
if (peg$c41.test(input.charAt(peg$currPos))) {
s0 = input.charAt(peg$currPos);
peg$currPos++;
} else {
s0 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c42); }
}
return s0;
}
function peg$parsenumber() {
var s0, s1, s2, s3, s4, s5;
s0 = peg$currPos;
if (input.charCodeAt(peg$currPos) === 48) {
s1 = peg$c43;
peg$currPos++;
} else {
s1 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c44); }
}
if (s1 === peg$FAILED) {
s1 = peg$currPos;
s2 = peg$currPos;
if (peg$c45.test(input.charAt(peg$currPos))) {
s3 = input.charAt(peg$currPos);
peg$currPos++;
} else {
s3 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c46); }
}
if (s3 !== peg$FAILED) {
s4 = [];
s5 = peg$parsedigit();
while (s5 !== peg$FAILED) {
s4.push(s5);
s5 = peg$parsedigit();
}
if (s4 !== peg$FAILED) {
s3 = [s3, s4];
s2 = s3;
} else {
peg$currPos = s2;
s2 = peg$FAILED;
}
} else {
peg$currPos = s2;
s2 = peg$FAILED;
}
if (s2 !== peg$FAILED) {
s1 = input.substring(s1, peg$currPos);
} else {
s1 = s2;
}
}
if (s1 !== peg$FAILED) {
peg$savedPos = s0;
s1 = peg$c47(s1);
}
s0 = s1;
return s0;
}
function peg$parsechar() {
var s0, s1, s2, s3, s4, s5, s6, s7;
if (peg$c48.test(input.charAt(peg$currPos))) {
s0 = input.charAt(peg$currPos);
peg$currPos++;
} else {
s0 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c49); }
}
if (s0 === peg$FAILED) {
s0 = peg$currPos;
if (input.substr(peg$currPos, 2) === peg$c50) {
s1 = peg$c50;
peg$currPos += 2;
} else {
s1 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c51); }
}
if (s1 !== peg$FAILED) {
peg$savedPos = s0;
s1 = peg$c52();
}
s0 = s1;
if (s0 === peg$FAILED) {
s0 = peg$currPos;
if (input.substr(peg$currPos, 2) === peg$c53) {
s1 = peg$c53;
peg$currPos += 2;
} else {
s1 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c54); }
}
if (s1 !== peg$FAILED) {
peg$savedPos = s0;
s1 = peg$c55();
}
s0 = s1;
if (s0 === peg$FAILED) {
s0 = peg$currPos;
if (input.substr(peg$currPos, 2) === peg$c56) {
s1 = peg$c56;
peg$currPos += 2;
} else {
s1 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c57); }
}
if (s1 !== peg$FAILED) {
peg$savedPos = s0;
s1 = peg$c58();
}
s0 = s1;
if (s0 === peg$FAILED) {
s0 = peg$currPos;
if (input.substr(peg$currPos, 2) === peg$c59) {
s1 = peg$c59;
peg$currPos += 2;
} else {
s1 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c60); }
}
if (s1 !== peg$FAILED) {
peg$savedPos = s0;
s1 = peg$c61();
}
s0 = s1;
if (s0 === peg$FAILED) {
s0 = peg$currPos;
if (input.substr(peg$currPos, 2) === peg$c62) {
s1 = peg$c62;
peg$currPos += 2;
} else {
s1 = peg$FAILED;
if (peg$silentFails === 0) { peg$fail(peg$c63); }
}
if (s1 !== peg$FAILED) {
s2 = peg$currPos;
s3 = peg$currPos;
s4 = peg$parsehexDigit();
if (s4 !== peg$FAILED) {
s5 = peg$parsehexDigit();
if (s5 !== peg$FAILED) {
s6 = peg$parsehexDigit();
if (s6 !== peg$FAILED) {
s7 = peg$parsehexDigit();
if (s7 !== peg$FAILED) {
s4 = [s4, s5, s6, s7];
s3 = s4;
} else {
peg$currPos = s3;
s3 = peg$FAILED;
}
} else {
peg$currPos = s3;
s3 = peg$FAILED;
}
} else {
peg$currPos = s3;
s3 = peg$FAILED;
}
} else {
peg$currPos = s3;
s3 = peg$FAILED;
}
if (s3 !== peg$FAILED) {
s2 = input.substring(s2, peg$currPos);
} else {
s2 = s3;
}
if (s2 !== peg$FAILED) {
peg$savedPos = s0;
s1 = peg$c64(s2);
s0 = s1;
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
} else {
peg$currPos = s0;
s0 = peg$FAILED;
}
}
}
}
}
}
return s0;
}
function peg$parsechars() {
var s0, s1, s2;
s0 = peg$currPos;
s1 = [];
s2 = peg$parsechar();
if (s2 !== peg$FAILED) {
while (s2 !== peg$FAILED) {
s1.push(s2);
s2 = peg$parsechar();
}
} else {
s1 = peg$FAILED;
}
if (s1 !== peg$FAILED) {
peg$savedPos = s0;
s1 = peg$c65(s1);
}
s0 = s1;
return s0;
}
peg$result = peg$startRuleFunction();
if (peg$result !== peg$FAILED && peg$currPos === input.length) {
return peg$result;
} else {
if (peg$result !== peg$FAILED && peg$currPos < input.length) {
peg$fail({ type: "end", description: "end of input" });
}
throw peg$buildException(
null,
peg$maxFailExpected,
peg$maxFailPos < input.length ? input.charAt(peg$maxFailPos) : null,
peg$maxFailPos < input.length
? peg$computeLocation(peg$maxFailPos, peg$maxFailPos + 1)
: peg$computeLocation(peg$maxFailPos, peg$maxFailPos)
);
}
}
return {
SyntaxError: peg$SyntaxError,
parse: peg$parse
};
})();
this['IntlMessageFormatParser'] = src$parser$$default;
}).call(this);
//# sourceMappingURL=parser.js.map<|fim▁end|> |
return s0;
}
|
<|file_name|>__main__.py<|end_file_name|><|fim▁begin|># Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
from anki.syncserver import serve
<|fim▁hole|><|fim▁end|> | serve() |
<|file_name|>request.go<|end_file_name|><|fim▁begin|>package standard
import (
"io"
"io/ioutil"
"mime/multipart"
"net/http"
"strings"
"github.com/labstack/echo"
"github.com/labstack/echo/engine"
"github.com/labstack/gommon/log"
)
type (
// Request implements `engine.Request`.
Request struct {
*http.Request
header engine.Header
url engine.URL
logger *log.Logger
}
)
const (
defaultMemory = 32 << 20 // 32 MB
)
// NewRequest returns `Request` instance.
func NewRequest(r *http.Request, l *log.Logger) *Request {
return &Request{
Request: r,
url: &URL{URL: r.URL},
header: &Header{Header: r.Header},
logger: l,
}
}
// IsTLS implements `engine.Request#TLS` function.
func (r *Request) IsTLS() bool {
return r.Request.TLS != nil
}
// Scheme implements `engine.Request#Scheme` function.
func (r *Request) Scheme() string {
if r.IsTLS() {
return "https"
}
return "http"
}
// Host implements `engine.Request#Host` function.
func (r *Request) Host() string {
return r.Request.Host
}
// URL implements `engine.Request#URL` function.
func (r *Request) URL() engine.URL {
return r.url
}
// Header implements `engine.Request#URL` function.
func (r *Request) Header() engine.Header {
return r.header
}
// func Proto() string {
// return r.request.Proto()
// }
//
// func ProtoMajor() int {
// return r.request.ProtoMajor()
// }
//
// func ProtoMinor() int {
// return r.request.ProtoMinor()
// }
// ContentLength implements `engine.Request#ContentLength` function.
func (r *Request) ContentLength() int {
return int(r.Request.ContentLength)
}
// UserAgent implements `engine.Request#UserAgent` function.
func (r *Request) UserAgent() string {
return r.Request.UserAgent()
}
// RemoteAddress implements `engine.Request#RemoteAddress` function.
func (r *Request) RemoteAddress() string {
return r.RemoteAddr
}
// Method implements `engine.Request#Method` function.
func (r *Request) Method() string {<|fim▁hole|> return r.Request.Method
}
// SetMethod implements `engine.Request#SetMethod` function.
func (r *Request) SetMethod(method string) {
r.Request.Method = method
}
// URI implements `engine.Request#URI` function.
func (r *Request) URI() string {
return r.RequestURI
}
// SetURI implements `engine.Request#SetURI` function.
func (r *Request) SetURI(uri string) {
r.RequestURI = uri
}
// Body implements `engine.Request#Body` function.
func (r *Request) Body() io.Reader {
return r.Request.Body
}
// SetBody implements `engine.Request#SetBody` function.
func (r *Request) SetBody(reader io.Reader) {
r.Request.Body = ioutil.NopCloser(reader)
}
// FormValue implements `engine.Request#FormValue` function.
func (r *Request) FormValue(name string) string {
return r.Request.FormValue(name)
}
// FormParams implements `engine.Request#FormParams` function.
func (r *Request) FormParams() map[string][]string {
if strings.HasPrefix(r.header.Get(echo.HeaderContentType), echo.MIMEMultipartForm) {
if err := r.ParseMultipartForm(defaultMemory); err != nil {
r.logger.Error(err)
}
} else {
if err := r.ParseForm(); err != nil {
r.logger.Error(err)
}
}
return map[string][]string(r.Request.Form)
}
// FormFile implements `engine.Request#FormFile` function.
func (r *Request) FormFile(name string) (*multipart.FileHeader, error) {
_, fh, err := r.Request.FormFile(name)
return fh, err
}
// MultipartForm implements `engine.Request#MultipartForm` function.
func (r *Request) MultipartForm() (*multipart.Form, error) {
err := r.ParseMultipartForm(defaultMemory)
return r.Request.MultipartForm, err
}
// Cookie implements `engine.Request#Cookie` function.
func (r *Request) Cookie(name string) (engine.Cookie, error) {
c, err := r.Request.Cookie(name)
if err != nil {
return nil, echo.ErrCookieNotFound
}
return &Cookie{c}, nil
}
// Cookies implements `engine.Request#Cookies` function.
func (r *Request) Cookies() []engine.Cookie {
cs := r.Request.Cookies()
cookies := make([]engine.Cookie, len(cs))
for i, c := range cs {
cookies[i] = &Cookie{c}
}
return cookies
}
func (r *Request) reset(req *http.Request, h engine.Header, u engine.URL) {
r.Request = req
r.header = h
r.url = u
}<|fim▁end|> | |
<|file_name|>py_dep_analysis.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# This file contains utilities for understanding dependencies between python
# source files and tests.
#
# Utils are assumed to be used from top level ray/ folder, since that is how
# our tests are defined today.
#
# Example usage:
# To find all circular dependencies under ray/python/:
# python ci/travis/py_dep_analysis.py --mode=circular-dep
# To find all the RLlib tests that depend on a file:<|fim▁hole|># tons of time querying for available RLlib tests.
import argparse
import ast
import os
import re
import subprocess
import sys
from typing import Dict, List, Tuple
class DepGraph(object):
def __init__(self):
self.edges: Dict[str, Dict[str, bool]] = {}
self.ids: Dict[str, int] = {}
self.inv_ids: Dict[int, str] = {}
def _run_shell(args: List[str]) -> str:
return subprocess.check_output(args).decode(sys.stdout.encoding)
def list_rllib_tests(n: int = -1, test: str = None) -> Tuple[str, List[str]]:
"""List RLlib tests.
Args:
n: return at most n tests. all tests if n = -1.
test: only return information about a specific test.
"""
tests_res = _run_shell(
["bazel", "query", "tests(//python/ray/rllib:*)", "--output", "label"]
)
all_tests = []
# Strip, also skip any empty lines
tests = [t.strip() for t in tests_res.splitlines() if t.strip()]
for t in tests:
if test and t != test:
continue
src_out = _run_shell(
[
"bazel",
"query",
'kind("source file", deps({}))'.format(t),
"--output",
"label",
]
)
srcs = [f.strip() for f in src_out.splitlines()]
srcs = [f for f in srcs if f.startswith("//python") and f.endswith(".py")]
if srcs:
all_tests.append((t, srcs))
# Break early if smoke test.
if n > 0 and len(all_tests) >= n:
break
return all_tests
def _new_dep(graph: DepGraph, src_module: str, dep: str):
"""Create a new dependency between src_module and dep."""
if dep not in graph.ids:
graph.ids[dep] = len(graph.ids)
src_id = graph.ids[src_module]
dep_id = graph.ids[dep]
if src_id not in graph.edges:
graph.edges[src_id] = {}
graph.edges[src_id][dep_id] = True
def _new_import(graph: DepGraph, src_module: str, dep_module: str):
"""Process a new import statement in src_module."""
# We don't care about system imports.
if not dep_module.startswith("ray"):
return
_new_dep(graph, src_module, dep_module)
def _is_path_module(module: str, name: str, _base_dir: str) -> bool:
"""Figure out if base.sub is a python module or not."""
# Special handling for _raylet, which is a C++ lib.
if module == "ray._raylet":
return False
bps = ["python"] + module.split(".")
path = os.path.join(_base_dir, os.path.join(*bps), name + ".py")
if os.path.isfile(path):
return True # file module
return False
def _new_from_import(
graph: DepGraph, src_module: str, dep_module: str, dep_name: str, _base_dir: str
):
"""Process a new "from ... import ..." statement in src_module."""
# We don't care about imports outside of ray package.
if not dep_module or not dep_module.startswith("ray"):
return
if _is_path_module(dep_module, dep_name, _base_dir):
# dep_module.dep_name points to a file.
_new_dep(graph, src_module, _full_module_path(dep_module, dep_name))
else:
# sub is an obj on base dir/file.
_new_dep(graph, src_module, dep_module)
def _process_file(graph: DepGraph, src_path: str, src_module: str, _base_dir=""):
"""Create dependencies from src_module to all the valid imports in src_path.
Args:
graph: the DepGraph to be added to.
src_path: .py file to be processed.
src_module: full module path of the source file.
_base_dir: use a different base dir than current dir. For unit testing.
"""
with open(os.path.join(_base_dir, src_path), "r") as in_f:
tree = ast.parse(in_f.read())
for node in ast.walk(tree):
if isinstance(node, ast.Import):
for alias in node.names:
_new_import(graph, src_module, alias.name)
elif isinstance(node, ast.ImportFrom):
for alias in node.names:
_new_from_import(
graph, src_module, node.module, alias.name, _base_dir
)
def build_dep_graph() -> DepGraph:
"""Build index from py files to their immediate dependees."""
graph = DepGraph()
# Assuming we run from root /ray directory.
# Follow links since rllib is linked to /rllib.
for root, sub_dirs, files in os.walk("python", followlinks=True):
if _should_skip(root):
continue
module = _bazel_path_to_module_path(root)
# Process files first.
for f in files:
if not f.endswith(".py"):
continue
full = _full_module_path(module, f)
if full not in graph.ids:
graph.ids[full] = len(graph.ids)
# Process file:
_process_file(graph, os.path.join(root, f), full)
# Build reverse index for convenience.
graph.inv_ids = {v: k for k, v in graph.ids.items()}
return graph
def _full_module_path(module, f) -> str:
if f == "__init__.py":
# __init__ file for this module.
# Full path is the same as the module name.
return module
fn = re.sub(r"\.py$", "", f)
if not module:
return fn
return module + "." + fn
def _should_skip(d: str) -> bool:
"""Skip directories that should not contain py sources."""
if d.startswith("python/.eggs/"):
return True
if d.startswith("python/."):
return True
if d.startswith("python/build"):
return True
if d.startswith("python/ray/cpp"):
return True
return False
def _bazel_path_to_module_path(d: str) -> str:
"""Convert a Bazel file path to python module path.
Example: //python/ray/rllib:xxx/yyy/dd -> ray.rllib.xxx.yyy.dd
"""
# Do this in 3 steps, so all of 'python:', 'python/', or '//python', etc
# will get stripped.
d = re.sub(r"^\/\/", "", d)
d = re.sub(r"^python", "", d)
d = re.sub(r"^[\/:]", "", d)
return d.replace("/", ".").replace(":", ".")
def _file_path_to_module_path(f: str) -> str:
"""Return the corresponding module path for a .py file."""
dir, fn = os.path.split(f)
return _full_module_path(_bazel_path_to_module_path(dir), fn)
def _depends(
graph: DepGraph, visited: Dict[int, bool], tid: int, qid: int
) -> List[int]:
"""Whether there is a dependency path from module tid to module qid.
Given graph, and without going through visited.
"""
if tid not in graph.edges or qid not in graph.edges:
return []
if qid in graph.edges[tid]:
# tid directly depends on qid.
return [tid, qid]
for c in graph.edges[tid]:
if c in visited:
continue
visited[c] = True
# Reduce to a question of whether there is a path from c to qid.
ds = _depends(graph, visited, c, qid)
if ds:
# From tid -> c -> qid.
return [tid] + ds
return []
def test_depends_on_file(
graph: DepGraph, test: Tuple[str, Tuple[str]], path: str
) -> List[int]:
"""Give dependency graph, check if a test depends on a specific .py file.
Args:
graph: the dependency graph.
test: information about a test, in the format of:
[test_name, (src files for the test)]
"""
query = _file_path_to_module_path(path)
if query not in graph.ids:
# Not a file that we care about.
return []
t, srcs = test
# Skip tuned_examples/ and examples/ tests.
if t.startswith("//python/ray/rllib:examples/"):
return []
for src in srcs:
if src == "ray.rllib.tests.run_regression_tests":
return []
tid = _file_path_to_module_path(src)
if tid not in graph.ids:
# Not a test that we care about.
# TODO(jungong): What tests are these?????
continue
branch = _depends(graph, {}, graph.ids[tid], graph.ids[query])
if branch:
return branch
# Does not depend on file.
return []
def _find_circular_dep_impl(graph: DepGraph, id: str, branch: str) -> bool:
if id not in graph.edges:
return False
for c in graph.edges[id]:
if c in branch:
# Found a circle.
branch.append(c)
return True
branch.append(c)
if _find_circular_dep_impl(graph, c, branch):
return True
branch.pop()
return False
def find_circular_dep(graph: DepGraph) -> Dict[str, List[int]]:
"""Find circular dependencies among a dependency graph."""
known = {}
circles = {}
for m, id in graph.ids.items():
branch = []
if _find_circular_dep_impl(graph, id, branch):
if branch[-1] in known:
# Already knew, skip.
continue
# Since this is a cycle dependency, any step along the circle
# will form a different circle.
# So we mark every entry on this circle known.
for n in branch:
known[n] = True
# Mark that module m contains a potential circular dep.
circles[m] = branch
return circles
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--mode",
type=str,
default="test-dep",
help=(
"test-dep: find dependencies for a specified test. "
"circular-dep: find circular dependencies in "
"the specific codebase."
),
)
parser.add_argument(
"--file", type=str, help="Path of a .py source file relative to --base_dir."
)
parser.add_argument("--test", type=str, help="Specific test to check.")
parser.add_argument(
"--smoke-test", action="store_true", help="Load only a few tests for testing."
)
args = parser.parse_args()
print("building dep graph ...")
graph = build_dep_graph()
print(
"done. total {} files, {} of which have dependencies.".format(
len(graph.ids), len(graph.edges)
)
)
if args.mode == "circular-dep":
circles = find_circular_dep(graph)
print("Found following circular dependencies: \n")
for m, b in circles.items():
print(m)
for n in b:
print(" ", graph.inv_ids[n])
print()
if args.mode == "test-dep":
assert args.file, "Must specify --file for the query."
# Only support RLlib tests for now.
# The way Tune tests are defined, they all depend on
# the entire tune codebase.
tests = list_rllib_tests(5 if args.smoke_test else -1, args.test)
print("Total # of tests: ", len(tests))
for t in tests:
branch = test_depends_on_file(graph, t, args.file)
if branch:
print("{} depends on {}".format(t[0], args.file))
# Print some debugging info.
for n in branch:
print(" ", graph.inv_ids[n])
else:
print("{} does not depend on {}".format(t[0], args.file))<|fim▁end|> | # python ci/travis/py_dep_analysis.py --mode=test-dep \
# --file=python/ray/tune/tune.py
# For testing, add --smoke-test to any commands, so it doesn't spend |
<|file_name|>MessageWindow.py<|end_file_name|><|fim▁begin|># -*-python-*-
# GemRB - Infinity Engine Emulator
# Copyright (C) 2003 The GemRB Project
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# MessageWindow.py - scripts and GUI for main (walk) window
###################################################
import GemRB
import GUIClasses
import GUICommon
import GUICommonWindows
import CommonWindow
import GUIWORLD
from GameCheck import MAX_PARTY_SIZE
from GUIDefines import *
MessageWindow = 0
ActionsWindow = 0
PortraitWindow = 0
OptionsWindow = 0
MessageTA = 0
def OnLoad():
global MessageWindow, ActionsWindow, PortraitWindow, OptionsWindow
GemRB.GameSetPartySize(MAX_PARTY_SIZE)
GemRB.GameSetProtagonistMode(0)
GemRB.LoadWindowPack (GUICommon.GetWindowPack())
GemRB.SetInfoTextColor(0,255,0,255)
ActionsWindow = GemRB.LoadWindow(0)
OptionsWindow = GemRB.LoadWindow(2)
MessageWindow = GemRB.LoadWindow(7)
PortraitWindow = GUICommonWindows.OpenPortraitWindow (1)
<|fim▁hole|> GemRB.SetVar ("ActionsWindow", ActionsWindow.ID)
GemRB.SetVar ("OptionsWindow", OptionsWindow.ID)
GemRB.SetVar ("MessageWindow", -1)
GemRB.SetVar ("OtherWindow", -1)
GemRB.SetVar ("ActionsPosition", 1) #Bottom
GemRB.SetVar ("OptionsPosition", 1) #Bottom
GemRB.SetVar ("MessagePosition", 1) #Bottom
GemRB.SetVar ("OtherPosition", 0) #Left
GemRB.GameSetScreenFlags (0, OP_SET)
CloseButton= MessageWindow.GetControl (0)
CloseButton.SetText(28082)
CloseButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, CommonWindow.OnDecreaseSize)
CloseButton.SetFlags (IE_GUI_BUTTON_DEFAULT | IE_GUI_BUTTON_MULTILINE, OP_OR)
OpenButton = OptionsWindow.GetControl (10)
OpenButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, CommonWindow.OnIncreaseSize)
# Select all
Button = ActionsWindow.GetControl (1)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, GUICommon.SelectAllOnPress)
# Select all
Button = ActionsWindow.GetControl (3)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, GUICommonWindows.ActionStopPressed)
FormationButton = ActionsWindow.GetControl (4)
FormationButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, GUIWORLD.OpenFormationWindow)
GUICommonWindows.SetupClockWindowControls (ActionsWindow)
GUICommonWindows.SetupMenuWindowControls (OptionsWindow)
UpdateControlStatus ()
def UpdateControlStatus ():
global MessageWindow, PortraitWindow, ActionsWindow, OptionsWindow, MessageTA
Expand = GemRB.GetMessageWindowSize() & (GS_DIALOGMASK|GS_DIALOG)
hideflags = GemRB.HideGUI ()
if Expand:
GemRB.SetVar ("MessageWindow", MessageWindow.ID)
GemRB.SetVar ("PortraitWindow", -1)
GemRB.SetVar ("ActionsWindow", -1)
GemRB.SetVar ("OptionsWindow", -1)
MessageTA = GUIClasses.GTextArea(MessageWindow.ID, GemRB.GetVar ("MessageTextArea"))
MessageTA.SetStatus (IE_GUI_CONTROL_FOCUSED)
Label = MessageWindow.GetControl (0x10000003)
Label.SetText (str (GemRB.GameGetPartyGold ()))
else:
GemRB.SetVar ("MessageWindow", -1)
GemRB.SetVar ("PortraitWindow", PortraitWindow.ID)
GemRB.SetVar ("ActionsWindow", ActionsWindow.ID)
GemRB.SetVar ("OptionsWindow", OptionsWindow.ID)
GUICommon.GameControl.SetStatus(IE_GUI_CONTROL_FOCUSED)
if hideflags:
GemRB.UnhideGUI ()<|fim▁end|> | MessageTA = MessageWindow.GetControl (1)
MessageTA.SetFlags (IE_GUI_TEXTAREA_AUTOSCROLL|IE_GUI_TEXTAREA_HISTORY)
GemRB.SetVar ("MessageTextArea", MessageTA.ID) |
<|file_name|>test_import.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
"""Test class for Host Collection CLI"""
import csv
import os
import re
import tempfile
from fauxfactory import gen_string
from itertools import product
from random import sample
from robottelo import ssh
from robottelo.cli.base import CLIReturnCodeError
from robottelo.cli.contenthost import ContentHost
from robottelo.cli.contentview import ContentView
from robottelo.cli.factory import make_org
from robottelo.cli.hostcollection import HostCollection
from robottelo.cli.import_ import Import
from robottelo.cli.org import Org
from robottelo.cli.repository import Repository
from robottelo.cli.subscription import Subscription
from robottelo.cli.template import Template
from robottelo.cli.user import User
from robottelo.decorators import bz_bug_is_open, skip_if_bug_open
from robottelo.helpers import prepare_import_data
from robottelo.test import CLITestCase
def clean_transdata():
"""Remove transition dataset
"""
ssh.command(u'rm -rf "${HOME}"/.transition_data "${HOME}"/puppet_work_dir')
def build_csv_file(rows=None, dirname=None):
"""Generate a csv file, feed it by the provided data
(a list of dictionary objects) and return a path to it
"""
if rows is None:
rows = [{}]
file_name = tempfile.mkstemp()[1]
with open(file_name, 'wb') as csv_file:
csv_writer = csv.DictWriter(
csv_file, fieldnames=rows[0].keys(), lineterminator='\n'
)
csv_writer.writeheader()
for row in rows:
csv_writer.writerow({
key: val.encode('utf8') for key, val in row.items()
})
if dirname is None:
remote_file = file_name
else:
remote_file = os.path.join(dirname, os.path.basename(file_name))
ssh.upload_file(file_name, remote_file)
os.remove(file_name)
return remote_file
def import_content_hosts(files, tmp_dir):
"""Import all Content Hosts from the Sat5 export csv file including all
the required entities.
:param files: A dictionary of CSV file names and paths
:param tmp_dir: A path to the dataset
:returns: A dictionary of Import objects for every entity
"""
import_org = Import.organization_with_tr_data(
{'csv-file': files['users']}
)
import_repo = Import.repository_with_tr_data({
'csv-file': files['repositories'],
'synchronize': True,
'wait': True,
})
import_cv = Import.content_view_with_tr_data({
u'csv-file': files['content-views'],
u'dir': os.path.join(tmp_dir, 'exports/CHANNELS'),
u'verbose': True
})
# proceed with importing the content hosts
import_chosts = Import.content_host_with_tr_data({
u'csv-file': files['system-profiles'],
u'export-directory': tmp_dir,
u'verbose': True
})
return {
u'organizations': import_org,
u'repositories': import_repo,
u'content_views': import_cv,
u'content_hosts': import_chosts,
}
def update_csv_values(files, new_data, dirname=None):
"""Build CSV file(s) with updated key values provided as an argument
in order to randomize the dataset with keeping the organization_id
mappings
:param files: A dictionary with transition files and their paths on
a remote server.
:param new_data: A dictionary containing a file name as a key and a list
of dictionaries representing the individual changes to the CSV.
For example::
{'users': [
{
u'key_id': u'1',
u'organization': u'updated_organization_name_1',
u'username': u'updated_user_name_1',
},
{
u'key_id': u'2',
u'organization': u'updated_organization_name_2',
u'username': u'updated_user_name_2',
}
]}
:param dirname: A string. Target destination for the new CSV files.
:returns: A dictionary with updated CSV file paths.
"""
for file_ in new_data:
updated = False
result = Import.csv_to_dataset([files[file_]])
for change in new_data[file_]:
key = change.get('key')
for record in result:
if record.get(key) == change['key_id']:
record.update(change)
del record['key_id']
del record['key']
updated = True
if updated:
files[file_] = build_csv_file(result, dirname)
return files
def verify_rh_repos(tr_data, channels_file):
"""Verifies that appropriate Products and Content Views have been created
for the enabled Red Hat repository.
:param tr_data: Transition data of the Import command
:param channels_file: Sat5 transition file containing the channels to be
imported/enabled
:returns: A tuple of lists containing info about all related Products and
Content Views
"""
rh_repos = [
repo for repo in Import.csv_to_dataset([channels_file])
if (
repo['channel_name'].startswith('Red Hat') or
repo['channel_name'].startswith('RHN')
)
]
repo_list = []
cv_list = []
for record in product(rh_repos, tr_data):
repo_list.append(
Repository.list({
u'organization-id': record[1]['sat6'],
u'name': Import.repos[record[0]['channel_label']]
})
)
cv_list.append(
ContentView.info({
u'organization-id': record[1]['sat6'],
u'name': record[0]['channel_name']
})['id']
)
return repo_list, cv_list
def get_sat6_id(
entity_dict, transition_dict, tr_key='sat5', ent_key='organization_id'
):
"""Updates the dictionary of the import entity with 'sat6' key/value pairs
for keeping the Satellite 6 referrence to the imported entity
:param entity_dict: A dictionary holding the info for an entity to be
imported (typically a product of csv_to_dataset())
:param transition_dict: A dictionary holding the transition data for the
imported entity (typically a product of Import.*_with_tr_data())
:param tr_key: A string identifying a transition key field to identify
an entity id
:param ent_key: A string identifying entity key field to identify
an entity id
:returns: entity_dict updated by 'sat6' key/value pair
"""
for entity, tr_record in product(entity_dict, transition_dict):
if tr_record[tr_key] == entity[ent_key]:
entity.update({'sat6': tr_record['sat6']})
return entity_dict
def gen_import_org_data():
"""Random data for Organization Import tests"""
org_ids = [type(u'')(org_id) for org_id in sample(range(1, 1000), 3)]
return (
{'users': [{
u'key': 'organization_id',
u'key_id': type(u'')(i + 1),
u'organization_id': org_ids[i],
u'organization': gen_string('alphanumeric')
} for i in range(len(org_ids))]},
{'users': [{
u'key': 'organization_id',
u'key_id': type(u'')(i + 1),
u'organization_id': org_ids[i],
u'organization': gen_string('utf8')
} for i in range(len(org_ids))]},
)
def gen_import_org_manifest_data():
"""Random data for Organization Import tests"""
org_ids = [type(u'')(org_id) for org_id in sample(range(1, 1000), 3)]
random_data = (
{'users': [{
u'key': 'organization_id',
u'key_id': type(u'')(i + 1),
u'organization_id': org_ids[i],
u'organization': gen_string('alphanumeric')
} for i in range(len(org_ids))]},
)
if not bz_bug_is_open('1260722'):
random_data = random_data + (
{'users': [{
u'key': 'organization_id',
u'key_id': type(u'')(i + 1),
u'organization_id': org_ids[i],
u'organization': gen_string('utf8')
} for i in range(len(org_ids))]},
)
return random_data
def gen_import_user_data():
"""Random data for User Import tests"""
org_ids = [type(u'')(org_id) for org_id in sample(range(1, 1000), 3)]
return (
{'users': [{
u'key': u'organization_id',
u'key_id': type(u'')(i + 1),
u'organization_id': org_ids[i],
u'organization': gen_string('alphanumeric'),
u'username': gen_string('alphanumeric')
} for i in range(len(org_ids))]},
{'users': [{
u'key': u'organization_id',
u'key_id': type(u'')(i + 1),
u'organization_id': org_ids[i],
u'organization': gen_string('utf8'),
u'username': gen_string('utf8')
} for i in range(len(org_ids))]},
)
def gen_import_hostcol_data():
"""Random data for Organization Import tests"""
org_ids = [type(u'')(org_id) for org_id in sample(range(1, 1000), 3)]
random_data = {'users': [], 'system-groups': []}
for i in range(len(org_ids)):
random_data['users'].append({
u'key': 'organization_id',
u'key_id': type(u'')(i + 1),
u'organization_id': org_ids[i],
u'organization': gen_string('alphanumeric'),
})
random_data['system-groups'].append({
u'key': u'org_id',
u'key_id': type(u'')(i + 1),
u'org_id': org_ids[i],
u'name': gen_string('alphanumeric'),
})
return (random_data,)
def gen_import_repo_data():
"""Random data for Repository Import tests"""
org_ids = [type(u'')(org_id) for org_id in sample(range(1, 1000), 3)]
random_data = {'users': [], 'repositories': []}
for i in range(len(org_ids)):
random_data['users'].append({
u'key': 'organization_id',
u'key_id': type(u'')(i + 1),
u'organization_id': org_ids[i],
u'organization': gen_string('alphanumeric'),
})
random_data['repositories'].append({
u'key': 'org_id',
u'key_id': type(u'')(i + 1),
u'org_id': org_ids[i],
})
return (random_data,)
def gen_import_cv_data():
"""Random data for Content View Import tests"""
return ({
u'users': [{
u'key': 'organization_id',
u'key_id': type(u'')(i + 1),
u'organization': gen_string('alphanumeric')}
for i in range(3)
],
u'content-views': [{
u'key': u'org_id',
u'key_id': type(u'')(i + 1),
u'channel_name': gen_string('alphanumeric'),
u'channel_label': gen_string('alphanumeric')}
for i in range(3)
]},
)
def gen_import_rh_repo_data():
"""Random data for RH Repos Import tests"""
org_ids = [type(u'')(org_id) for org_id in sample(range(1, 1000), 3)]
# wipe all channel names and labels excepting channel id 106
return (
{
u'users': [{
u'key': u'organization_id',
u'key_id': type(u'')(i + 1),
u'organization_id': org_ids[i],
u'organization': gen_string('alphanumeric'),
} for i in range(len(org_ids))],
u'channels': [{
u'key': u'channel_id',
u'key_id': type(u'')(i),
u'channel_label': u'',
u'channel_name': gen_string('alphanumeric'),
} for i in set(range(101, 113)) - {106}] + [
{
u'key': u'org_id',
u'key_id': type(u'')(i + 1),
u'org_id': org_ids[i],
} for i in range(len(org_ids))
],
},
)
def gen_import_chost_data():
"""Random data for Content Host Import tests"""
org_ids = [type(u'')(org_id) for org_id in sample(range(1, 1000), 3)]
return (
{
u'users': [{
u'key': u'organization_id',
u'key_id': type(u'')(i + 1),
u'organization': gen_string('alphanumeric'),
} for i in range(len(org_ids))],
u'content-views': [{
u'key': u'org_id',
u'key_id': type(u'')(i + 1),
u'channel_name': gen_string('alphanumeric'),
u'channel_label': gen_string('alphanumeric')}
for i in range(len(org_ids))
],
# wipe all channel labels to make hammer skip the sync
u'channels': [{
u'key': u'channel_id',
u'key_id': type(u'')(i),
u'channel_label': u'',
u'channel_name': gen_string('alphanumeric')}
for i in set(range(101, 113))
],
u'system-profiles': [{
u'key': u'server_id',
u'key_id': type(u'')(1000010000 + i),
u'base_channel_id': u'110',
u'child_channel_id': u'None;111'}
for i in set(range(8, 11))
],
},
)
def gen_import_snippet_data():
"""Random data for Repository Import tests"""
org_ids = [type(u'')(org_id) for org_id in sample(range(1, 1000), 3)]
random_data = {'users': [], 'kickstart-scripts': []}
for i in range(len(org_ids)):
random_data['users'].append({
u'key': 'organization_id',
u'key_id': type(u'')(i + 1),
u'organization_id': org_ids[i],
u'organization': gen_string('alphanumeric'),
})
random_data['kickstart-scripts'].append({
u'key': 'org_id',
u'key_id': type(u'')(i + 1),
u'org_id': org_ids[i],
u'script_name': gen_string('utf8'),
u'kickstart_label': gen_string('utf8'),
u'script_type': sample([u'pre', u'post'], 1).pop(),
u'chroot': sample([u'Y', u'N'], 1).pop(),
})
return (random_data,)
def gen_import_config_files_data():
"""Random data for Config File Import tests"""
org_ids = [type(u'')(org_id) for org_id in sample(range(1, 1000), 3)]
random_data = {'users': [], 'config-files-latest': []}
for i in range(len(org_ids)):
random_data['users'].append({
u'key': 'organization_id',
u'key_id': type(u'')(i + 1),
u'organization_id': org_ids[i],
u'organization': gen_string('alphanumeric'),
})
random_data['config-files-latest'].append({
u'key': 'org_id',
u'key_id': type(u'')(i + 1),
u'org_id': org_ids[i],
})
return (random_data,)
class TestImport(CLITestCase):
"""Import CLI tests.
All default tests pass no options to the imprt object
In such case methods download a default data set from URL
specified in robottelo.properties.
"""
@classmethod
def setUpClass(cls):
super(TestImport, cls).setUpClass()
# prepare the default dataset
cls.default_dataset = prepare_import_data()
cls.default_dataset[1]['content-views'] = os.path.join(
cls.default_dataset[0],
'exports/CHANNELS/export.csv',
)
@classmethod
def tearDownClass(cls):
ssh.command(u'rm -r {0}'.format(cls.default_dataset[0]))
super(TestImport, cls).tearDownClass()
def test_import_orgs_default(self):
"""@test: Import all organizations from the default data set
(predefined source).
@feature: Import Organizations
@assert: 3 Organizations are created
"""
for test_data in gen_import_org_data():
with self.subTest(test_data):
tmp_dir = self.default_dataset[0]
files = dict(self.default_dataset[1])
files = update_csv_values(files, test_data, tmp_dir)
Import.organization({'csv-file': files['users']})
# now to check whether the orgs from csv appeared in satellite
for org in Import.csv_to_dataset([files['users']]):
Org.info({'name': org['organization']})
clean_transdata()
def test_import_orgs_manifests(self):
"""@test: Import all organizations from the default data set
(predefined source) and upload manifests for each of them
@feature: Import Organizations including Manifests
@assert: 3 Organizations are created with 3 manifests uploaded
"""
for test_data in gen_import_org_manifest_data():
with self.subTest(test_data):
tmp_dir = self.default_dataset[0]
files = dict(self.default_dataset[1])
files = update_csv_values(files, test_data, tmp_dir)
Import.organization_with_tr_data_manifests({
'csv-file': files['users'],
})
# now to check whether the orgs from csv appeared in satellite
orgs = set(org['name'] for org in Org.list())
imp_orgs = set(
org['organization'] for
org in Import.csv_to_dataset([files['users']])
)
self.assertTrue(imp_orgs.issubset(orgs))
for org in imp_orgs:
manifest_history = Subscription.manifest_history({
'organization': org,
})[3]
self.assertIn('SUCCESS', manifest_history)
clean_transdata()
def test_reimport_orgs_default_negative(self):
"""@test: Try to Import all organizations from the predefined source
and try to import them again
@feature: Import Organizations twice
@assert: 2nd Import will result in No Action Taken
"""
for test_data in gen_import_org_data():
with self.subTest(test_data):
tmp_dir = self.default_dataset[0]
files = dict(self.default_dataset[1])
files = update_csv_values(files, test_data, tmp_dir)
Import.organization({'csv-file': files['users']})
orgs_before = Org.list()
Import.organization({'csv-file': files['users']})
self.assertEqual(orgs_before, Org.list())
clean_transdata()
def test_import_orgs_recovery(self):
"""@test: Try to Import organizations with the same name to invoke
usage of a recovery strategy (rename, map, none)
@feature: Import Organizations Recover
@assert: 2nd Import will result in No Action Taken, 3rd one will rename
the new organizations, and the 4th one will map them
"""
for test_data in gen_import_org_data():
with self.subTest(test_data):
tmp_dir = self.default_dataset[0]
files = dict(self.default_dataset[1])
files = update_csv_values(files, test_data, tmp_dir)
# initial import
Import.organization({'csv-file': files['users']})
# clear the .transition_data to clear the transition mapping
ssh.command('rm -rf "${HOME}"/.transition_data')
# use the 'none' strategy
orgs_before = Org.list()
Import.organization({
'csv-file': files['users'], 'recover': 'none'
})
self.assertEqual(orgs_before, Org.list())
# use the default (rename) strategy
imp_rename = Import.organization_with_tr_data({
'csv-file': files['users'],
})
self.assertEqual(
len(imp_rename[1]), len(test_data['users'])
)
for record in imp_rename[1]:
Org.info({'id': record['sat6']})
Import.organization({
'csv-file': files['users'], 'delete': True
})
# use the 'map' strategy
imp_map = Import.organization_with_tr_data({
'csv-file': files['users'], 'recover': 'map',
})
for record in imp_map[1]:
Org.info({'id': record['sat6']})
Import.organization({
'csv-file': files['users'], 'delete': True
})
clean_transdata()
def test_merge_orgs(self):
"""@test: Try to Import all organizations and their users from CSV
to a mapped organization.
@feature: Import User Mapped Org
@assert: 3 Organizations Mapped and their Users created
in a single Organization
"""
for test_data in gen_import_user_data():
with self.subTest(test_data):
# create a new Organization and prepare CSV files
new_org = make_org()
tmp_dir = self.default_dataset[0]
files = dict(self.default_dataset[1])
pwdfile = os.path.join(tmp_dir, gen_string('alpha', 6))
files = update_csv_values(files, test_data, tmp_dir)
Import.organization({
'csv-file': files['users'],
'into-org-id': new_org['id'],
'verbose': True,
})
Import.user({
'csv-file': files['users'], 'new-passwords': pwdfile
})
# list users by org-id and check whether
# users from csv are in listing
users = User.list({u'organization-id': new_org['id']})
logins = set(user['login'] for user in users)
imp_users = set(
user['username']
for user in Import.csv_to_dataset([files['users']])
)
self.assertTrue(all((user in logins for user in imp_users)))
clean_transdata()
def test_import_users_default(self):
"""@test: Import all 3 users from the default data set (predefined
source).
@feature: Import Users
@assert: 3 Users created
"""
for test_data in gen_import_user_data():
with self.subTest(test_data):
tmp_dir = self.default_dataset[0]
files = dict(self.default_dataset[1])<|fim▁hole|> 'csv-file': files['users'], 'new-passwords': pwdfile,
})
# list the users and check whether
# users from csv are in the listing
logins = set(user['login'] for user in User.list())
imp_users = set(
user['username']
for user in Import.csv_to_dataset([files['users']])
)
self.assertTrue(imp_users.issubset(logins))
clean_transdata()
def test_reimport_users_default_negative(self):
"""@test: Try to Import all users from the
predefined source and try to import them again
@feature: Repetitive User Import
@assert: 2nd Import will result in No Action Taken
"""
for test_data in gen_import_user_data():
with self.subTest(test_data):
tmp_dir = self.default_dataset[0]
files = dict(self.default_dataset[1])
files = update_csv_values(files, test_data, tmp_dir)
pwdfile = os.path.join(tmp_dir, gen_string('alpha', 6))
# Import the organizations first
Import.organization({'csv-file': files['users']})
Import.user({
'csv-file': files['users'], 'new-passwords': pwdfile,
})
ssh.command(u'rm -rf {0}'.format(pwdfile))
users_before = set(user['login'] for user in User.list())
Import.user({
'csv-file': files['users'], 'new-passwords': pwdfile,
})
users_after = set(user['login'] for user in User.list())
self.assertTrue(users_after.issubset(users_before))
clean_transdata()
def test_import_users_merge(self):
"""@test: Try to Merge users with the same name using 'merge-users'
option.
@feature: Import Users Map-users
@assert: Users imported in 2nd import are being mapped to the existing
ones with the same name
"""
for test_data in gen_import_user_data():
with self.subTest(test_data):
# prepare the data
tmp_dir = self.default_dataset[0]
files = dict(self.default_dataset[1])
files = update_csv_values(files, test_data, tmp_dir)
pwdfiles = [
os.path.join(tmp_dir, gen_string('alpha', 6))
for _ in range(2)
]
# initial import
Import.organization({'csv-file': files['users']})
Import.user({
'csv-file': files['users'],
'new-passwords': pwdfiles[0],
})
# clear the .transition_data to clear the transition mapping
ssh.command('rm -rf "${HOME}"/.transition_data/users*')
# import users using merge-users option
import_merge = Import.user_with_tr_data({
'csv-file': files['users'],
'new-passwords': pwdfiles[1],
'merge-users': True,
})
for record in import_merge[1]:
self.assertNotEqual(User.info({'id': record['sat6']}), '')
clean_transdata()
def test_import_users_recovery(self):
"""@test: Try to Import users with the same name to invoke
usage of a recovery strategy (rename, map, none)
@feature: Import Users Recover
@assert: 2nd Import will rename new users, 3rd one will result
in No Action Taken and 4th import will map them
"""
for test_data in gen_import_user_data():
with self.subTest(test_data):
# prepare the data
tmp_dir = self.default_dataset[0]
files = dict(self.default_dataset[1])
files = update_csv_values(files, test_data, tmp_dir)
pwdfiles = [
os.path.join(tmp_dir, gen_string('alpha', 6))
for _ in range(4)
]
# initial import
Import.organization({'csv-file': files['users']})
Import.user({
'csv-file': files['users'],
'new-passwords': pwdfiles[0],
})
# clear the .transition_data to clear the transition mapping
ssh.command('rm -rf "${HOME}"/.transition_data/users*')
# use the default (rename) strategy
import_rename = Import.user_with_tr_data({
'csv-file': files['users'],
'new-passwords': pwdfiles[1],
})
for record in import_rename[1]:
User.info({'id': record['sat6']})
Import.user({'csv-file': files['users'], 'delete': True})
# use the 'none' strategy
users_before = set(user['login'] for user in User.list())
Import.user({
'csv-file': files['users'],
'new-passwords': pwdfiles[2],
'recover': 'none',
})
users_after = set(user['login'] for user in User.list())
self.assertEqual(users_before, users_after)
# use the 'map' strategy
import_map = Import.user_with_tr_data({
'csv-file': files['users'],
'recover': 'map',
'new-passwords': pwdfiles[3],
})
for record in import_map[1]:
User.info({'id': record['sat6']})
# do the cleanup
ssh.command(u'rm -rf {0}'.format(' '.join(pwdfiles)))
clean_transdata()
def test_import_host_collections_default(self):
"""@test: Import all System Groups from the default data set
(predefined source) as the Host Collections.
@feature: Import Host-Collections
@assert: 3 Host Collections created
"""
for test_data in gen_import_hostcol_data():
with self.subTest(test_data):
tmp_dir = self.default_dataset[0]
files = dict(self.default_dataset[1])
files = update_csv_values(files, test_data, tmp_dir)
# import the prerequisities
import_org = Import.organization_with_tr_data({
'csv-file': files['users'],
})
Import.host_collection_with_tr_data({
'csv-file': files['system-groups'],
})
# now check whether all HCs from csv are imported
imp_orgs = get_sat6_id(
Import.csv_to_dataset([files['users']]),
import_org[1]
)
for imp_org in imp_orgs:
self.assertNotEqual(
HostCollection.list(
{'organization-id': imp_org['sat6']}
),
[]
)
clean_transdata()
def test_reimport_host_collections_default_negative(self):
"""@test: Try to re-import all System Groups from the default data set
(predefined source) as the Host Collections.
@feature: Repetitive Import Host-Collections
@assert: 3 Host Collections created, no action taken on 2nd Import
"""
for test_data in gen_import_hostcol_data():
with self.subTest(test_data):
tmp_dir = self.default_dataset[0]
files = dict(self.default_dataset[1])
files = update_csv_values(files, test_data, tmp_dir)
# import the prerequisities
import_org = Import.organization_with_tr_data({
'csv-file': files['users'],
})
Import.host_collection({'csv-file': files['system-groups']})
hcollections_before = [
HostCollection.list({'organization-id': tr['sat6']})
for tr in import_org[1]
]
self.assertNotEqual(hcollections_before, [])
Import.host_collection({'csv-file': files['system-groups']})
hcollections_after = [
HostCollection.list({'organization-id': tr['sat6']})
for tr in import_org[1]
]
self.assertEqual(hcollections_before, hcollections_after)
clean_transdata()
def test_import_host_collections_recovery(self):
"""@test: Try to Import Collections with the same name to invoke
usage of a recovery strategy (rename, map, none)
@feature: Import HostCollection Recover
@assert: 2nd Import will rename the new collections, 3nd import will
result in No Action Taken and the 4th one will map them
"""
for test_data in gen_import_hostcol_data():
with self.subTest(test_data):
# prepare the data
tmp_dir = self.default_dataset[0]
files = dict(self.default_dataset[1])
files = update_csv_values(files, test_data, tmp_dir)
# initial import
import_org = Import.organization_with_tr_data({
'csv-file': files['users']
})
Import.host_collection_with_tr_data({
'csv-file': files['system-groups'],
})
# clear the .transition_data to clear the transition mapping
ssh.command(
'rm -rf "${HOME}"/.transition_data/host_collections*'
)
# use the default (rename) strategy
import_hc_rename = Import.host_collection_with_tr_data({
'csv-file': files['system-groups'],
'verbose': True,
})
for record in import_hc_rename[1]:
HostCollection.info({'id': record['sat6']})
Import.host_collection({
'csv-file': files['system-groups'],
'delete': True,
})
# use the 'none' strategy
hc_before = [
HostCollection.list({'organization-id': tr['sat6']})
for tr in import_org[1]
]
Import.host_collection({
'csv-file': files['system-groups'], 'recover': 'none',
})
hc_after = [
HostCollection.list({'organization-id': tr['sat6']})
for tr in import_org[1]
]
self.assertEqual(hc_before, hc_after)
# use the 'map' strategy
import_hc_map = Import.host_collection_with_tr_data({
'csv-file': files['system-groups'],
'recover': 'map',
'verbose': True,
})
for record in import_hc_map[1]:
HostCollection.info({'id': record['sat6']})
clean_transdata()
def test_import_repo_default(self):
"""@test: Import and enable all Repositories from the default data set
(predefined source)
@feature: Import Enable Repositories
@assert: 3 Repositories imported and enabled
"""
for test_data in gen_import_repo_data():
with self.subTest(test_data):
# randomize the values for orgs and repos
tmp_dir = self.default_dataset[0]
files = dict(self.default_dataset[1])
files = update_csv_values(files, test_data, tmp_dir)
# import the prerequisities
import_org = Import.organization_with_tr_data({
'csv-file': files['users'],
})
# now proceed with importing the repositories
Import.repository_with_tr_data({
'csv-file': files['repositories'],
'synchronize': True,
'wait': True,
})
# get the sat6 mapping of the imported organizations
imp_orgs = get_sat6_id(
Import.csv_to_dataset([files['users']]),
import_org[1]
)
# now to check whether all repos from csv appeared in satellite
for imp_org in imp_orgs:
self.assertNotEqual(
Repository.list({'organization-id': imp_org['sat6']}),
[],
)
clean_transdata()
def test_reimport_repo_negative(self):
"""@test: Import and enable all Repositories from the default data set
(predefined source), then try to Import Repositories from the same CSV
again.
@feature: Repetitive Import Enable Repositories
@assert: 3 Repositories imported and enabled, second run should trigger
no action.
"""
for test_data in gen_import_repo_data():
with self.subTest(test_data):
# randomize the values for orgs and repos
tmp_dir = self.default_dataset[0]
files = dict(self.default_dataset[1])
files = update_csv_values(files, test_data, tmp_dir)
# import the prerequisities
import_org = Import.organization_with_tr_data({
'csv-file': files['users'],
})
# now proceed with importing the repositories
Import.repository_with_tr_data({
'csv-file': files['repositories'],
'synchronize': True,
'wait': True,
})
# get the sat6 mapping of the imported organizations
imp_orgs = get_sat6_id(
Import.csv_to_dataset([files['users']]),
import_org[1]
)
repos_before = [
Repository.list({'organization-id': imp_org['sat6']})
for imp_org in imp_orgs
]
# Reimport the same repos and check for changes in sat6
Import.repository({
'csv-file': files['repositories'],
'synchronize': True,
'wait': True,
})
self.assertEqual(
repos_before,
[
Repository.list({'organization-id': imp_org['sat6']})
for imp_org in imp_orgs
]
)
clean_transdata()
def test_import_repo_recovery(self):
"""@test: Try to Import Repos with the same name to invoke
usage of a recovery strategy (rename, map, none)
@feature: Import Repository Recover
@assert: 2nd Import will rename the new repos, 3rd import will
map them and the 4th one will result in No Action Taken
"""
for test_data in gen_import_repo_data():
with self.subTest(test_data):
# prepare the data
tmp_dir = self.default_dataset[0]
files = dict(self.default_dataset[1])
files = update_csv_values(files, test_data, tmp_dir)
# import the prerequisities
import_org = Import.organization_with_tr_data({
'csv-file': files['users'],
})
Import.repository_with_tr_data({
'csv-file': files['repositories'],
})
# clear the .transition_data to clear the transition mapping
ssh.command('rm -rf "${HOME}"/.transition_data/repositories*')
ssh.command('rm -rf "${HOME}"/.transition_data/products*')
# use the default (rename) strategy
import_repo_rename = Import.repository_with_tr_data({
'csv-file': files['repositories'], 'verbose': True,
})
for record in import_repo_rename[1][1]:
Repository.info({'id': record['sat6']})
Import.repository({
'csv-file': files['repositories'], 'delete': True,
})
# use the 'none' strategy
repos_before = [
Repository.list({'organization-id': tr['sat6']})
for tr in import_org[1]
]
Import.repository({
'csv-file': files['repositories'],
'recover': 'none',
})
self.assertEqual(
repos_before,
[Repository.list({'organization-id': tr['sat6']})
for tr in import_org[1]],
)
# use the 'map' strategy
import_repo_map = Import.repository_with_tr_data({
'csv-file': files['repositories'],
'recover': 'map',
'verbose': True,
})
for record in import_repo_map[1][1]:
Repository.info({'id': record['sat6']})
clean_transdata()
def test_import_cv_default(self):
"""@test: Import and enable all Content Views from the default data set
(predefined source)
@feature: Import Enable Content View
@assert: 3 Content Views imported and enabled
"""
for test_data in gen_import_cv_data():
with self.subTest(test_data):
# randomize the values for orgs and repos
tmp_dir = self.default_dataset[0]
files = dict(self.default_dataset[1])
files = update_csv_values(files, test_data, tmp_dir)
# import the prerequisities
import_org = Import.organization_with_tr_data({
'csv-file': files['users'],
})
Import.repository_with_tr_data({
'csv-file': files['repositories'],
'synchronize': True,
'wait': True,
})
# now proceed with Content View import
Import.content_view_with_tr_data({
'csv-file': files['content-views'],
'dir': os.path.join(tmp_dir, 'exports/CHANNELS'),
})
# get the sat6 mapping of the imported organizations
imp_orgs = get_sat6_id(
Import.csv_to_dataset([files['users']]),
import_org[1]
)
# now check whether all CVs from csv are imported
for imp_org in imp_orgs:
self.assertNotEqual(
ContentView.list({'organization-id': imp_org['sat6']}),
[]
)
clean_transdata()
def test_reimport_cv_negative(self):
"""@test: Import and enable all Content Views from the default data set
(predefined source), then try to Import them from the same CSV
again.
@feature: Repetitive Import Content Views
@assert: 3 Content Views imported and enabled, 2nd run should trigger
no action.
"""
for test_data in gen_import_cv_data():
with self.subTest(test_data):
# randomize the values for orgs and repos
tmp_dir = self.default_dataset[0]
files = dict(self.default_dataset[1])
files = update_csv_values(files, test_data, tmp_dir)
# import the prerequisities
import_org = Import.organization_with_tr_data({
'csv-file': files['users'],
})
Import.repository_with_tr_data({
'csv-file': files['repositories'],
'synchronize': True,
'wait': True,
})
Import.content_view_with_tr_data({
'csv-file': files['content-views'],
'dir': os.path.join(tmp_dir, 'exports/CHANNELS'),
})
# get the sat6 mapping of the imported organizations
imp_orgs = get_sat6_id(
Import.csv_to_dataset([files['users']]),
import_org[1]
)
cvs_before = [
ContentView.list({'organization-id': imp_org['sat6']})
for imp_org in imp_orgs
]
# Reimport the same content views and check for changes in sat6
Import.content_view({
'csv-file': files['content-views'],
'dir': os.path.join(tmp_dir, 'exports/CHANNELS'),
})
self.assertEqual(
cvs_before,
[
ContentView.list({'organization-id': imp_org['sat6']})
for imp_org in imp_orgs
]
)
clean_transdata()
def test_import_cv_recovery(self):
"""@test: Try to Import Content Views with the same name to invoke
usage of a recovery strategy (rename, map, none)
@feature: Import Content View Recover
@assert: 2nd Import will rename the new Content Views, 3rd import will
map them and the 4th one will result in No Action Taken
"""
for test_data in gen_import_cv_data():
with self.subTest(test_data):
# prepare the data
tmp_dir = self.default_dataset[0]
files = dict(self.default_dataset[1])
files = update_csv_values(files, test_data, tmp_dir)
# import the prerequisities
import_org = Import.organization_with_tr_data({
'csv-file': files['users'],
})
Import.repository_with_tr_data({
'csv-file': files['repositories'],
})
Import.content_view_with_tr_data({
'csv-file': files['content-views'],
'dir': os.path.join(tmp_dir, 'exports/CHANNELS'),
})
# clear the .transition_data to clear the transition mapping
ssh.command('rm -rf "${HOME}"/.transition_data/repositories*')
ssh.command('rm -rf "${HOME}"/.transition_data/products*')
ssh.command('rm -rf "${HOME}"/.transition_data/content_views*')
# use the default (rename) strategy
import_cv_rename = Import.content_view_with_tr_data({
'csv-file': files['content-views'],
'dir': os.path.join(tmp_dir, 'exports/CHANNELS'),
'verbose': True,
})
for record in import_cv_rename[1]:
ContentView.info({'id': record['sat6']})
Import.content_view({
'csv-file': files['content-views'],
'delete': True,
})
# use the 'none' strategy
cvs_before = [
ContentView.list({'organization-id': tr['sat6']})
for tr in import_org[1]
]
Import.content_view({
'csv-file': files['content-views'],
'dir': os.path.join(tmp_dir, 'exports/CHANNELS'),
'recover': 'none',
})
cvs_after = [
ContentView.list({'organization-id': tr['sat6']})
for tr in import_org[1]
]
self.assertEqual(cvs_before, cvs_after)
# use the 'map' strategy
import_cvs_map = Import.content_view_with_tr_data({
'csv-file': files['content-views'],
'dir': os.path.join(tmp_dir, 'exports/CHANNELS'),
'recover': 'map',
'verbose': True,
})
for record in import_cvs_map[1]:
ContentView.info({'id': record['sat6']})
clean_transdata()
def test_bz1160847_translate_macros(self):
"""@test: Check whether all supported Sat5 macros are being properly
converted to the Puppet facts.
According to RH Transition Guide (Chapter 3.7.8, Table 3.1)
@feature: Import config-file --csv-file --generate-only
@assert: Generated .erb file contains correctly formatted puppet facts
"""
# prepare data (craft csv)
test_data = [
{
u'name': u'hostname',
u'macro': u'{| rhn.system.hostname |}',
u'fact': u'<%= @fqdn %>',
},
{
u'name': u'sys_ip_address',
u'macro': u'{| rhn.system.ip_address |}',
u'fact': u'<%= @ipaddress %>',
},
{
u'name': u'ip_address',
u'macro': u'{| rhn.system.net_interface'
u'.ip_address(eth0) |}',
u'fact': u'<%= @ipaddress_eth0 %>',
},
{
u'name': u'netmask',
u'macro': u'{| rhn.system.net_interface'
u'.netmask(eth0) |}',
u'fact': u'<%= @netmask_eth0 %>',
},
{
u'name': u'mac_address',
u'macro': u'{| rhn.system.net_interface.'
u'hardware_address(eth0) |}',
u'fact': u'<%= @macaddress_eth0 %>',
},
]
csv_contents = u'\n'.join(
u'{0}={1}'.format(i['name'], i['macro']) for i in test_data
)
csv_row = {
u'org_id': u'1',
u'channel_id': u'3',
u'channel': u'config-1',
u'channel_type': u'normal',
u'path': gen_string('utf8') + gen_string('alphanumeric'),
u'file_type': u'file',
u'file_id': u'8',
u'revision': u'1',
u'is_binary': u'N',
u'contents': u'{0}\n'.format(csv_contents),
u'delim_start': u'{|',
u'delim_end': u'|}',
u'username': u'root',
u'groupname': u'root',
u'filemode': u'600',
u'symbolic_link': u'',
u'selinux_ctx': u'',
}
file_name = build_csv_file([csv_row], self.default_dataset[0])
invalid_chars = '[^\da-zA-Z\-\.\_]'
# create a random org that will be mapped to sat5 org with id = 1
if bz_bug_is_open(1226981):
org_data = {'name': gen_string('alphanumeric')}
else:
org_data = {'name': gen_string('utf8')}
org = make_org(org_data)
trans_header = [u'sat5', u'sat6', u'delete']
trans_row = [u'1', org['id'], u'']
transition_data_file = tempfile.mkstemp(
prefix='organizations-',
suffix='.csv',
)[1]
with open(transition_data_file, 'wb') as trans_csv:
csv_writer = csv.writer(trans_csv)
csv_writer.writerow(trans_header)
csv_writer.writerow(trans_row)
ssh.command('mkdir -p ~/.transition_data')
ssh.upload_file(
transition_data_file,
os.path.join(
'.transition_data/', os.path.basename(transition_data_file)
)
)
os.remove(transition_data_file)
# run the import command
Import.config_file({
u'csv-file': file_name,
u'generate-only': True,
})
prefix = re.sub(invalid_chars, '', org['name'])
erb_file = re.sub(invalid_chars, '', csv_row['path'])
if len(prefix) == 0:
prefix = u'orgid' + org['id']
if len(erb_file) == 0:
erb_file = u'file_id8'
# collect the contains of the generated file
cat_cmd = ssh.command(
u'cat "${{HOME}}"/puppet_work_dir/{0}-config_1/templates/'
u'{1}.erb'.format(prefix.lower(), erb_file)
)
# compare the contains with the expected format
self.assertEqual(
cat_cmd.stdout[:-1],
[fact['name'] + '=' + fact['fact'] for fact in test_data],
)
clean_transdata()
def test_import_enable_rh_repos(self):
"""@test: Import and enable all red hat repositories from predefined
dataset
@feature: Import Enable RH Repositories
@assert: All Repositories imported and synchronized
"""
for test_data in gen_import_rh_repo_data():
with self.subTest(test_data):
tmp_dir = self.default_dataset[0]
files = dict(self.default_dataset[1])
files = update_csv_values(dict(files), test_data, tmp_dir)
rh_repos = [
repo for repo in Import.csv_to_dataset([files['channels']])
if (
repo['channel_name'].startswith('Red Hat') or
repo['channel_name'].startswith('RHN')
)
]
# import the prerequisities (organizations with manifests)
import_org = Import.organization_with_tr_data_manifests({
'csv-file': files['users'],
})
Import.repository_enable_with_tr_data({
'csv-file': files['channels'],
'synchronize': True,
'wait': True,
})
# verify rh repos appended in every imported org
for record in product(rh_repos, import_org[1]):
self.assertNotEqual(
Repository.list({
u'organization-id': record[1]['sat6'],
u'name': Import.repos[record[0]['channel_label']]
}),
[]
)
self.assertNotEqual(
ContentView.info({
u'organization-id': record[1]['sat6'],
u'name': record[0]['channel_name']
}),
[]
)
clean_transdata()
def test_reimport_enable_rh_repos_negative(self):
"""@test: Repetitive Import and enable of all red hat repositories from
the predefined dataset
@feature: Repetitive Import Enable RH Repositories
@assert: All Repositories imported and synchronized only once
"""
for test_data in gen_import_rh_repo_data():
with self.subTest(test_data):
tmp_dir = self.default_dataset[0]
files = dict(self.default_dataset[1])
files = update_csv_values(files, test_data, tmp_dir)
# import the prerequisities (organizations with manifests)
import_org = Import.organization_with_tr_data_manifests({
'csv-file': files['users'],
})
Import.repository_enable({
'csv-file': files['channels'],
'synchronize': True,
'wait': True,
})
# verify rh repos appended in every imported org
repos_before, cvs_before = verify_rh_repos(
import_org[1], files['channels']
)
self.assertFalse([] in repos_before)
self.assertFalse([] in cvs_before)
Import.repository_enable({
'csv-file': files['channels'],
'synchronize': True,
'wait': True,
})
# compare after and before to make sure
# nothing has changed after 2nd import
self.assertEqual(
(repos_before, cvs_before),
verify_rh_repos(
import_org[1], files['channels']
)
)
clean_transdata()
def test_import_content_hosts_default(self):
"""@test: Import all content hosts from
the predefined dataset
@feature: Import Content-host
@assert: Profiles for all Content Hosts created
"""
for test_data in gen_import_chost_data():
with self.subTest(test_data):
tmp_dir = self.default_dataset[0]
files = dict(self.default_dataset[1])
files = update_csv_values(files, test_data, tmp_dir)
# import the prerequisites and content hosts
imports = import_content_hosts(files, tmp_dir)
# get the sat6 mapping of the imported organizations
imp_orgs = get_sat6_id(
Import.csv_to_dataset([files['users']]),
imports['organizations'][1]
)
# now to check whether all cont. hosts appeared in satellite
for imp_org in imp_orgs:
self.assertNotEqual(
ContentHost.list({'organization-id': imp_org['sat6']}),
[]
)
clean_transdata()
def test_reimport_content_hosts_negative(self):
"""@test: Repetitive Import of all content hosts from
the predefined dataset
@feature: Repetitive Import Content-host
@assert: Profiles for all Content Hosts created only once
"""
for test_data in gen_import_chost_data():
with self.subTest(test_data):
tmp_dir = self.default_dataset[0]
files = dict(self.default_dataset[1])
files = update_csv_values(files, test_data, tmp_dir)
# import the prerequisites and content hosts
imports = import_content_hosts(files, tmp_dir)
# get the sat6 mapping of the imported organizations
imp_orgs = get_sat6_id(
Import.csv_to_dataset([files['users']]),
imports['organizations'][1]
)
chosts_before = [
ContentHost.list({'organization-id': imp_org['sat6']})
for imp_org in imp_orgs
]
Import.content_host_with_tr_data({
u'csv-file': files['system-profiles'],
u'export-directory': tmp_dir,
u'verbose': True
})
self.assertEqual(
[
ContentHost.list({'organization-id': imp_org['sat6']})
for imp_org in imp_orgs
],
chosts_before
)
clean_transdata()
@skip_if_bug_open('bugzilla', 1267224)
def test_import_content_hosts_recovery_negative(self):
"""@test: Try to invoke usage of a recovery strategy
@feature: Import Content Hosts Recover
@assert: No such option exists, error is shown
"""
for test_data in gen_import_chost_data():
with self.subTest(test_data):
# prepare the data
tmp_dir = self.default_dataset[0]
files = dict(self.default_dataset[1])
files = update_csv_values(files, test_data, tmp_dir)
import_content_hosts(files, tmp_dir)
# clear the .transition_data to clear the transition mapping
ssh.command(
'rm -rf "${{HOME}}"/.transition_data/system*'
'{0}/SOURCES {0}/SPECS'
.format(tmp_dir)
)
# use the rename strategy
with self.assertRaises(CLIReturnCodeError):
Import.content_host_with_tr_data({
u'csv-file': files['system-profiles'],
u'export-directory': tmp_dir,
u'recover': u'rename',
})
clean_transdata()
def test_import_snippets_default(self):
"""@test: Import template snippets from the default data set
(predefined source)
@feature: Import Template Snippets
@assert: All Snippets imported
"""
for test_data in gen_import_snippet_data():
with self.subTest(test_data):
tmp_dir = self.default_dataset[0]
files = dict(self.default_dataset[1])
# randomize the values for orgs and snippets
files = update_csv_values(files, test_data, tmp_dir)
# import the prerequisities
Import.organization_with_tr_data({'csv-file': files['users']})
# list and save templates before import
before = Template.list()
# now proceed with importing the template snippets
import_snippet = Import.template_snippet_with_tr_data({
'csv-file': files['kickstart-scripts'],
'verbose': True,
})
# list and save templates after import
after = Template.list()
# difference between before and after import
diff = [d for d in after if d not in before]
diff_ids = [d[u'id'] for d in diff]
mapping = import_snippet[1][0]
# check that snippets have been properly imported
for row in mapping:
template = Template.info({u'id': row[u'sat6']})
self.assertTrue(template[u'id'] in diff_ids)
self.assertTrue(template[u'type'] == u'snippet')
clean_transdata()
def test_import_config_files_default(self):
"""@test: Import all Config Files from the default data set
(predefined source)
@feature: Import Config Files
@assert: All Config Files are imported
"""
for test_data in gen_import_config_files_data():
with self.subTest(test_data):
# randomize the values for orgs and repos
tmp_dir = self.default_dataset[0]
files = dict(self.default_dataset[1])
files = update_csv_values(files, test_data, tmp_dir)
# import the prerequisities
Import.organization_with_tr_data(
{'csv-file': files['users']}
)
# now proceed with Config Files import
import_cf = Import.config_file_with_tr_data({
'csv-file': files['config-files-latest'],
'verbose': True,
})
configs = Import.csv_to_dataset([files['config-files-latest']])
imp_configs = get_sat6_id(
configs,
import_cf[1][1],
'channel_id',
'channel_id'
)
for rec in imp_configs:
self.assertEqual(
rec['channel'],
Repository.info({'id': rec['sat6']})['name']
)
clean_transdata()
def test_reimport_config_files_negative(self):
"""@test: Repetitive Import of all Config Files from the default
data set (predefined source)
@feature: Repetitive Import Config Files
@assert: All Config Files are imported only once
"""
for test_data in gen_import_config_files_data():
with self.subTest(test_data):
# randomize the values for orgs and repos
tmp_dir = self.default_dataset[0]
files = dict(self.default_dataset[1])
files = update_csv_values(files, test_data, tmp_dir)
# import the prerequisities
Import.organization_with_tr_data(
{'csv-file': files['users']}
)
# initial import
import_cf = Import.config_file_with_tr_data({
'csv-file': files['config-files-latest'],
'verbose': True,
})
configs = Import.csv_to_dataset([files['config-files-latest']])
imp_configs = get_sat6_id(
configs,
import_cf[1][1],
'channel_id',
'channel_id'
)
cf_before = [
Repository.info({'id': rec['sat6']})
for rec in imp_configs
]
cf_after = [
Repository.info({'id': rec['sat6']})
for rec in imp_configs
]
self.assertEqual(cf_before, cf_after)
clean_transdata()<|fim▁end|> | files = update_csv_values(files, test_data, tmp_dir)
pwdfile = os.path.join(tmp_dir, gen_string('alpha', 6))
Import.organization({'csv-file': files['users']})
Import.user({ |
<|file_name|>lumx.js<|end_file_name|><|fim▁begin|>/*
LumX v1.5.14
(c) 2014-2017 LumApps http://ui.lumapps.com
License: MIT
*/
(function()
{
'use strict';
angular.module('lumx.utils.depth', []);
angular.module('lumx.utils.event-scheduler', []);
angular.module('lumx.utils.transclude-replace', []);
angular.module('lumx.utils.utils', []);
angular.module('lumx.utils', [
'lumx.utils.depth',
'lumx.utils.event-scheduler',
'lumx.utils.transclude-replace',
'lumx.utils.utils'
]);
angular.module('lumx.button', []);
angular.module('lumx.checkbox', []);
angular.module('lumx.data-table', []);
angular.module('lumx.date-picker', []);
angular.module('lumx.dialog', ['lumx.utils.event-scheduler']);
angular.module('lumx.dropdown', ['lumx.utils.event-scheduler']);
angular.module('lumx.fab', []);
angular.module('lumx.file-input', []);
angular.module('lumx.icon', []);
angular.module('lumx.notification', ['lumx.utils.event-scheduler']);
angular.module('lumx.progress', []);
angular.module('lumx.radio-button', []);
angular.module('lumx.ripple', []);
angular.module('lumx.search-filter', []);
angular.module('lumx.select', []);
angular.module('lumx.stepper', []);
angular.module('lumx.switch', []);
angular.module('lumx.tabs', []);
angular.module('lumx.text-field', []);
angular.module('lumx.tooltip', []);
angular.module('lumx', [
'lumx.button',
'lumx.checkbox',
'lumx.data-table',
'lumx.date-picker',
'lumx.dialog',
'lumx.dropdown',
'lumx.fab',
'lumx.file-input',
'lumx.icon',
'lumx.notification',
'lumx.progress',
'lumx.radio-button',
'lumx.ripple',
'lumx.search-filter',
'lumx.select',
'lumx.stepper',
'lumx.switch',
'lumx.tabs',
'lumx.text-field',
'lumx.tooltip',
'lumx.utils'
]);
})();
(function()
{
'use strict';
angular
.module('lumx.utils.depth')
.service('LxDepthService', LxDepthService);
function LxDepthService()
{
var service = this;
var depth = 1000;
service.getDepth = getDepth;
service.register = register;
////////////
function getDepth()
{
return depth;
}
function register()
{
depth++;
}
}
})();
(function()
{
'use strict';
angular
.module('lumx.utils.event-scheduler')
.service('LxEventSchedulerService', LxEventSchedulerService);
LxEventSchedulerService.$inject = ['$document', 'LxUtils'];
function LxEventSchedulerService($document, LxUtils)
{
var service = this;
var handlers = {};
var schedule = {};
service.register = register;
service.unregister = unregister;
////////////
function handle(event)
{
var scheduler = schedule[event.type];
if (angular.isDefined(scheduler))
{
for (var i = 0, length = scheduler.length; i < length; i++)
{
var handler = scheduler[i];
if (angular.isDefined(handler) && angular.isDefined(handler.callback) && angular.isFunction(handler.callback))
{
handler.callback(event);
if (event.isPropagationStopped())
{
break;
}
}
}
}
}
function register(eventName, callback)
{
var handler = {
eventName: eventName,
callback: callback
};
var id = LxUtils.generateUUID();
handlers[id] = handler;
if (angular.isUndefined(schedule[eventName]))
{
schedule[eventName] = [];
$document.on(eventName, handle);
}
schedule[eventName].unshift(handlers[id]);
return id;
}
function unregister(id)
{
var found = false;
var handler = handlers[id];
if (angular.isDefined(handler) && angular.isDefined(schedule[handler.eventName]))
{
var index = schedule[handler.eventName].indexOf(handler);
if (angular.isDefined(index) && index > -1)
{
schedule[handler.eventName].splice(index, 1);
delete handlers[id];
found = true;
}
if (schedule[handler.eventName].length === 0)
{
delete schedule[handler.eventName];
$document.off(handler.eventName, handle);
}
}
return found;
}
}
})();
(function()
{
'use strict';
angular
.module('lumx.utils.transclude-replace')
.directive('ngTranscludeReplace', ngTranscludeReplace);
ngTranscludeReplace.$inject = ['$log'];
function ngTranscludeReplace($log)
{
return {
terminal: true,
restrict: 'EA',
link: link
};
function link(scope, element, attrs, ctrl, transclude)
{
if (!transclude)
{
$log.error('orphan',
'Illegal use of ngTranscludeReplace directive in the template! ' +
'No parent directive that requires a transclusion found. ');
return;
}
transclude(function(clone)
{
if (clone.length)
{
element.replaceWith(clone);
}
else
{
element.remove();
}
});
}
}
})();
(function()
{
'use strict';
angular
.module('lumx.utils.utils')
.service('LxUtils', LxUtils);
function LxUtils()
{
var service = this;
service.debounce = debounce;
service.generateUUID = generateUUID;
service.disableBodyScroll = disableBodyScroll;
////////////
// http://underscorejs.org/#debounce (1.8.3)
function debounce(func, wait, immediate)
{
var timeout, args, context, timestamp, result;
wait = wait || 500;
var later = function()
{
var last = Date.now() - timestamp;
if (last < wait && last >= 0)
{
timeout = setTimeout(later, wait - last);
}
else
{
timeout = null;
if (!immediate)
{
result = func.apply(context, args);
if (!timeout)
{
context = args = null;
}
}
}
};
var debounced = function()
{
context = this;
args = arguments;
timestamp = Date.now();
var callNow = immediate && !timeout;
if (!timeout)
{
timeout = setTimeout(later, wait);
}
if (callNow)
{
result = func.apply(context, args);
context = args = null;
}
return result;
};
debounced.clear = function()
{
clearTimeout(timeout);
timeout = context = args = null;
};
return debounced;
}
function generateUUID()
{
var d = new Date().getTime();
var uuid = 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c)
{
var r = (d + Math.random() * 16) % 16 | 0;
d = Math.floor(d / 16);
return (c == 'x' ? r : (r & 0x3 | 0x8))
.toString(16);
});
return uuid.toUpperCase();
}
function disableBodyScroll()
{
var body = document.body;
var documentElement = document.documentElement;
var prevDocumentStyle = documentElement.style.cssText || '';
var prevBodyStyle = body.style.cssText || '';
var viewportTop = window.scrollY || window.pageYOffset || 0;
var clientWidth = body.clientWidth;
var hasVerticalScrollbar = body.scrollHeight > window.innerHeight + 1;
if (hasVerticalScrollbar)
{
angular.element('body').css({
position: 'fixed',
width: '100%',
top: -viewportTop + 'px'
});
}
if (body.clientWidth < clientWidth)
{
body.style.overflow = 'hidden';
}
// This should be applied after the manipulation to the body, because
// adding a scrollbar can potentially resize it, causing the measurement
// to change.
if (hasVerticalScrollbar)
{
documentElement.style.overflowY = 'scroll';
}
return function restoreScroll()
{
// Reset the inline style CSS to the previous.
body.style.cssText = prevBodyStyle;
documentElement.style.cssText = prevDocumentStyle;
// The body loses its scroll position while being fixed.
body.scrollTop = viewportTop;
};
}
}
})();
(function()
{
'use strict';
angular
.module('lumx.button')
.directive('lxButton', lxButton);
function lxButton()
{
var buttonClass;
return {
restrict: 'E',
templateUrl: getTemplateUrl,
compile: compile,
replace: true,
transclude: true
};
function compile(element, attrs)
{
setButtonStyle(element, attrs.lxSize, attrs.lxColor, attrs.lxType);
return function(scope, element, attrs)
{
attrs.$observe('lxSize', function(lxSize)
{
setButtonStyle(element, lxSize, attrs.lxColor, attrs.lxType);
});
attrs.$observe('lxColor', function(lxColor)
{
setButtonStyle(element, attrs.lxSize, lxColor, attrs.lxType);
});
attrs.$observe('lxType', function(lxType)
{
setButtonStyle(element, attrs.lxSize, attrs.lxColor, lxType);
});
element.on('click', function(event)
{
if (attrs.disabled === true)
{
event.preventDefault();
event.stopImmediatePropagation();
}
});
};
}
function getTemplateUrl(element, attrs)
{
return isAnchor(attrs) ? 'link.html' : 'button.html';
}
function isAnchor(attrs)
{
return angular.isDefined(attrs.href) || angular.isDefined(attrs.ngHref) || angular.isDefined(attrs.ngLink) || angular.isDefined(attrs.uiSref);
}
function setButtonStyle(element, size, color, type)
{
var buttonBase = 'btn';
var buttonSize = angular.isDefined(size) ? size : 'm';
var buttonColor = angular.isDefined(color) ? color : 'primary';
var buttonType = angular.isDefined(type) ? type : 'raised';
element.removeClass(buttonClass);
buttonClass = buttonBase + ' btn--' + buttonSize + ' btn--' + buttonColor + ' btn--' + buttonType;
element.addClass(buttonClass);
}
}
})();
(function()
{
'use strict';
angular
.module('lumx.checkbox')
.directive('lxCheckbox', lxCheckbox)
.directive('lxCheckboxLabel', lxCheckboxLabel)
.directive('lxCheckboxHelp', lxCheckboxHelp);
function lxCheckbox()
{
return {
restrict: 'E',
templateUrl: 'checkbox.html',
scope:
{
lxColor: '@?',
name: '@?',
ngChange: '&?',
ngDisabled: '=?',
ngFalseValue: '@?',
ngModel: '=',
ngTrueValue: '@?',
theme: '@?lxTheme'
},
controller: LxCheckboxController,
controllerAs: 'lxCheckbox',
bindToController: true,
transclude: true,
replace: true
};
}
LxCheckboxController.$inject = ['$scope', '$timeout', 'LxUtils'];
function LxCheckboxController($scope, $timeout, LxUtils)
{
var lxCheckbox = this;
var checkboxId;
var checkboxHasChildren;
var timer;
lxCheckbox.getCheckboxId = getCheckboxId;
lxCheckbox.getCheckboxHasChildren = getCheckboxHasChildren;
lxCheckbox.setCheckboxId = setCheckboxId;
lxCheckbox.setCheckboxHasChildren = setCheckboxHasChildren;
lxCheckbox.triggerNgChange = triggerNgChange;
$scope.$on('$destroy', function()
{
$timeout.cancel(timer);
});
init();
////////////
function getCheckboxId()
{
return checkboxId;
}
function getCheckboxHasChildren()
{
return checkboxHasChildren;
}
function init()
{
setCheckboxId(LxUtils.generateUUID());
setCheckboxHasChildren(false);
lxCheckbox.ngTrueValue = angular.isUndefined(lxCheckbox.ngTrueValue) ? true : lxCheckbox.ngTrueValue;
lxCheckbox.ngFalseValue = angular.isUndefined(lxCheckbox.ngFalseValue) ? false : lxCheckbox.ngFalseValue;
lxCheckbox.lxColor = angular.isUndefined(lxCheckbox.lxColor) ? 'accent' : lxCheckbox.lxColor;
}
function setCheckboxId(_checkboxId)
{
checkboxId = _checkboxId;
}
function setCheckboxHasChildren(_checkboxHasChildren)
{
checkboxHasChildren = _checkboxHasChildren;
}
function triggerNgChange()
{
timer = $timeout(lxCheckbox.ngChange);
}
}
function lxCheckboxLabel()
{
return {
restrict: 'AE',
require: ['^lxCheckbox', '^lxCheckboxLabel'],
templateUrl: 'checkbox-label.html',
link: link,
controller: LxCheckboxLabelController,
controllerAs: 'lxCheckboxLabel',
bindToController: true,
transclude: true,
replace: true
};
function link(scope, element, attrs, ctrls)
{
ctrls[0].setCheckboxHasChildren(true);
ctrls[1].setCheckboxId(ctrls[0].getCheckboxId());
}
}
function LxCheckboxLabelController()
{
var lxCheckboxLabel = this;
var checkboxId;
lxCheckboxLabel.getCheckboxId = getCheckboxId;
lxCheckboxLabel.setCheckboxId = setCheckboxId;
////////////
function getCheckboxId()
{
return checkboxId;
}
function setCheckboxId(_checkboxId)
{
checkboxId = _checkboxId;
}
}
function lxCheckboxHelp()
{
return {
restrict: 'AE',
require: '^lxCheckbox',
templateUrl: 'checkbox-help.html',
transclude: true,
replace: true
};
}
})();
(function()
{
'use strict';
angular
.module('lumx.data-table')
.directive('lxDataTable', lxDataTable);
function lxDataTable()
{
return {
restrict: 'E',
templateUrl: 'data-table.html',
scope:
{
border: '=?lxBorder',
selectable: '=?lxSelectable',
thumbnail: '=?lxThumbnail',
tbody: '=lxTbody',
thead: '=lxThead'
},
link: link,
controller: LxDataTableController,
controllerAs: 'lxDataTable',
bindToController: true,
transclude: true,
replace: true
};
function link(scope, element, attrs, ctrl)
{
attrs.$observe('id', function(_newId)
{
ctrl.id = _newId;
});
}
}
LxDataTableController.$inject = ['$rootScope', '$sce', '$scope'];
function LxDataTableController($rootScope, $sce, $scope)
{
var lxDataTable = this;
lxDataTable.areAllRowsSelected = areAllRowsSelected;
lxDataTable.border = angular.isUndefined(lxDataTable.border) ? true : lxDataTable.border;
lxDataTable.sort = sort;
lxDataTable.toggle = toggle;
lxDataTable.toggleAllSelected = toggleAllSelected;
lxDataTable.$sce = $sce;
lxDataTable.allRowsSelected = false;
lxDataTable.selectedRows = [];
$scope.$on('lx-data-table__select', function(event, id, row)
{
if (id === lxDataTable.id && angular.isDefined(row))
{
if (angular.isArray(row) && row.length > 0)
{
row = row[0];
}
_select(row);
}
});
$scope.$on('lx-data-table__select-all', function(event, id)
{
if (id === lxDataTable.id)
{
_selectAll();
}
});
$scope.$on('lx-data-table__unselect', function(event, id, row)
{
if (id === lxDataTable.id && angular.isDefined(row))
{
if (angular.isArray(row) && row.length > 0)
{
row = row[0];
}
_unselect(row);
}
});
$scope.$on('lx-data-table__unselect-all', function(event, id)
{
if (id === lxDataTable.id)
{
_unselectAll();
}
});
////////////
function _selectAll()
{
lxDataTable.selectedRows.length = 0;
for (var i = 0, len = lxDataTable.tbody.length; i < len; i++)
{
if (!lxDataTable.tbody[i].lxDataTableDisabled)
{
lxDataTable.tbody[i].lxDataTableSelected = true;
lxDataTable.selectedRows.push(lxDataTable.tbody[i]);
}
}
lxDataTable.allRowsSelected = true;
$rootScope.$broadcast('lx-data-table__unselected', lxDataTable.id, lxDataTable.selectedRows);
}
function _select(row)
{
toggle(row, true);
}
function _unselectAll()
{
for (var i = 0, len = lxDataTable.tbody.length; i < len; i++)
{
if (!lxDataTable.tbody[i].lxDataTableDisabled)
{
lxDataTable.tbody[i].lxDataTableSelected = false;
}
}
lxDataTable.allRowsSelected = false;
lxDataTable.selectedRows.length = 0;
$rootScope.$broadcast('lx-data-table__selected', lxDataTable.id, lxDataTable.selectedRows);
}
function _unselect(row)
{
toggle(row, false);
}
////////////
function areAllRowsSelected()
{
var displayedRows = 0;
for (var i = 0, len = lxDataTable.tbody.length; i < len; i++)
{
if (!lxDataTable.tbody[i].lxDataTableDisabled)
{
displayedRows++;
}
}
if (displayedRows === lxDataTable.selectedRows.length)
{
lxDataTable.allRowsSelected = true;
}
else
{
lxDataTable.allRowsSelected = false;
}
}
function sort(_column)
{
if (!_column.sortable)
{
return;
}
for (var i = 0, len = lxDataTable.thead.length; i < len; i++)
{
if (lxDataTable.thead[i].sortable && lxDataTable.thead[i].name !== _column.name)
{
lxDataTable.thead[i].sort = undefined;
}
}
if (!_column.sort || _column.sort === 'desc')
{
_column.sort = 'asc';
}
else
{
_column.sort = 'desc';
}
$rootScope.$broadcast('lx-data-table__sorted', lxDataTable.id, _column);
}
function toggle(_row, _newSelectedStatus)
{
if (_row.lxDataTableDisabled || !lxDataTable.selectable)
{
return;
}
_row.lxDataTableSelected = angular.isDefined(_newSelectedStatus) ? _newSelectedStatus : !_row.lxDataTableSelected;
if (_row.lxDataTableSelected)
{
// Make sure it's not already in.
if (lxDataTable.selectedRows.length === 0 || (lxDataTable.selectedRows.length && lxDataTable.selectedRows.indexOf(_row) === -1))
{
lxDataTable.selectedRows.push(_row);
lxDataTable.areAllRowsSelected();
$rootScope.$broadcast('lx-data-table__selected', lxDataTable.id, lxDataTable.selectedRows);
}
}
else
{
if (lxDataTable.selectedRows.length && lxDataTable.selectedRows.indexOf(_row) > -1)
{
lxDataTable.selectedRows.splice(lxDataTable.selectedRows.indexOf(_row), 1);
lxDataTable.allRowsSelected = false;
$rootScope.$broadcast('lx-data-table__unselected', lxDataTable.id, lxDataTable.selectedRows);
}
}
}
function toggleAllSelected()
{
if (lxDataTable.allRowsSelected)
{
_unselectAll();
}
else
{
_selectAll();
}
}
}
})();
(function()
{
'use strict';
angular
.module('lumx.data-table')
.service('LxDataTableService', LxDataTableService);
LxDataTableService.$inject = ['$rootScope'];
function LxDataTableService($rootScope)
{
var service = this;
service.select = select;
service.selectAll = selectAll;
service.unselect = unselect;
service.unselectAll = unselectAll;
////////////
function select(_dataTableId, row)
{
$rootScope.$broadcast('lx-data-table__select', _dataTableId, row);
}
function selectAll(_dataTableId)
{
$rootScope.$broadcast('lx-data-table__select-all', _dataTableId);
}
function unselect(_dataTableId, row)
{
$rootScope.$broadcast('lx-data-table__unselect', _dataTableId, row);
}
function unselectAll(_dataTableId)
{
$rootScope.$broadcast('lx-data-table__unselect-all', _dataTableId);
}
}
})();
(function()
{
'use strict';
angular
.module('lumx.date-picker')
.directive('lxDatePicker', lxDatePicker);
lxDatePicker.$inject = ['LxDatePickerService', 'LxUtils'];
function lxDatePicker(LxDatePickerService, LxUtils)
{
return {
restrict: 'AE',
templateUrl: 'date-picker.html',
scope:
{
autoClose: '=?lxAutoClose',
callback: '&?lxCallback',
color: '@?lxColor',
escapeClose: '=?lxEscapeClose',
inputFormat: '@?lxInputFormat',
maxDate: '=?lxMaxDate',
ngModel: '=',
minDate: '=?lxMinDate',
locale: '@lxLocale'
},
link: link,
controller: LxDatePickerController,
controllerAs: 'lxDatePicker',
bindToController: true,
replace: true,
transclude: true
};
function link(scope, element, attrs)
{
if (angular.isDefined(attrs.id))
{
attrs.$observe('id', function(_newId)
{
scope.lxDatePicker.pickerId = _newId;
LxDatePickerService.registerScope(scope.lxDatePicker.pickerId, scope);
});
}
else
{
scope.lxDatePicker.pickerId = LxUtils.generateUUID();
LxDatePickerService.registerScope(scope.lxDatePicker.pickerId, scope);
}
}
}
LxDatePickerController.$inject = ['$element', '$scope', '$timeout', '$transclude', 'LxDatePickerService', 'LxUtils'];
function LxDatePickerController($element, $scope, $timeout, $transclude, LxDatePickerService, LxUtils)
{
var lxDatePicker = this;
var input;
var modelController;
var timer1;
var timer2;
var watcher1;
var watcher2;
lxDatePicker.closeDatePicker = closeDatePicker;
lxDatePicker.displayYearSelection = displayYearSelection;
lxDatePicker.hideYearSelection = hideYearSelection;
lxDatePicker.getDateFormatted = getDateFormatted;
lxDatePicker.nextMonth = nextMonth;
lxDatePicker.openDatePicker = openDatePicker;
lxDatePicker.previousMonth = previousMonth;
lxDatePicker.select = select;
lxDatePicker.selectYear = selectYear;
lxDatePicker.autoClose = angular.isDefined(lxDatePicker.autoClose) ? lxDatePicker.autoClose : true;
lxDatePicker.color = angular.isDefined(lxDatePicker.color) ? lxDatePicker.color : 'primary';
lxDatePicker.element = $element.find('.lx-date-picker');
lxDatePicker.escapeClose = angular.isDefined(lxDatePicker.escapeClose) ? lxDatePicker.escapeClose : true;
lxDatePicker.isOpen = false;
lxDatePicker.moment = moment;
lxDatePicker.yearSelection = false;
lxDatePicker.uuid = LxUtils.generateUUID();
$transclude(function(clone)
{
if (clone.length)
{
lxDatePicker.hasInput = true;
timer1 = $timeout(function()
{
input = $element.find('.lx-date-input input');
modelController = input.data('$ngModelController');
watcher2 = $scope.$watch(function()
{
return modelController.$viewValue;
}, function(newValue, oldValue)
{
if (angular.isUndefined(newValue))
{
lxDatePicker.ngModel = undefined;
}
});
});
}
});
watcher1 = $scope.$watch(function()
{
return lxDatePicker.ngModel;
}, init);
$scope.$on('$destroy', function()
{
$timeout.cancel(timer1);
$timeout.cancel(timer2);
if (angular.isFunction(watcher1))
{
watcher1();
}
if (angular.isFunction(watcher2))
{
watcher2();
}
});
////////////
function closeDatePicker()
{
LxDatePickerService.close(lxDatePicker.pickerId);
}
function displayYearSelection()
{
lxDatePicker.yearSelection = true;
timer2 = $timeout(function()
{
var yearSelector = angular.element('.lx-date-picker__year-selector');
var activeYear = yearSelector.find('.lx-date-picker__year--is-active');
yearSelector.scrollTop(yearSelector.scrollTop() + activeYear.position().top - yearSelector.height() / 2 + activeYear.height() / 2);
});
}
function hideYearSelection()
{
lxDatePicker.yearSelection = false;
}
function generateCalendar()
{
lxDatePicker.days = [];
var previousDay = angular.copy(lxDatePicker.ngModelMoment).date(0);
var firstDayOfMonth = angular.copy(lxDatePicker.ngModelMoment).date(1);
var lastDayOfMonth = firstDayOfMonth.clone().endOf('month');
var maxDays = lastDayOfMonth.date();
lxDatePicker.emptyFirstDays = [];
for (var i = firstDayOfMonth.day() === 0 ? 6 : firstDayOfMonth.day() - 1; i > 0; i--)
{
lxDatePicker.emptyFirstDays.push(
{});
}
for (var j = 0; j < maxDays; j++)
{
var date = angular.copy(previousDay.add(1, 'days'));
date.selected = angular.isDefined(lxDatePicker.ngModel) && date.isSame(lxDatePicker.ngModel, 'day');
date.today = date.isSame(moment(), 'day');
if (angular.isDefined(lxDatePicker.minDate) && date.toDate() < lxDatePicker.minDate)
{
date.disabled = true;
}
if (angular.isDefined(lxDatePicker.maxDate) && date.toDate() > lxDatePicker.maxDate)
{
date.disabled = true;
}
lxDatePicker.days.push(date);
}
lxDatePicker.emptyLastDays = [];
for (var k = 7 - (lastDayOfMonth.day() === 0 ? 7 : lastDayOfMonth.day()); k > 0; k--)
{
lxDatePicker.emptyLastDays.push(
{});
}
}
function getDateFormatted()
{
var dateFormatted = lxDatePicker.ngModelMoment.format('llll').replace(lxDatePicker.ngModelMoment.format('LT'), '').trim().replace(lxDatePicker.ngModelMoment.format('YYYY'), '').trim();
var dateFormattedLastChar = dateFormatted.slice(-1);
if (dateFormattedLastChar === ',')
{
dateFormatted = dateFormatted.slice(0, -1);
}
return dateFormatted;
}
function init()
{
moment.locale(lxDatePicker.locale);
lxDatePicker.ngModelMoment = angular.isDefined(lxDatePicker.ngModel) ? moment(angular.copy(lxDatePicker.ngModel)) : moment();
lxDatePicker.days = [];
lxDatePicker.daysOfWeek = [moment.weekdaysMin(1), moment.weekdaysMin(2), moment.weekdaysMin(3), moment.weekdaysMin(4), moment.weekdaysMin(5), moment.weekdaysMin(6), moment.weekdaysMin(0)];
lxDatePicker.years = [];
for (var y = moment().year() - 100; y <= moment().year() + 100; y++)
{
lxDatePicker.years.push(y);
}
generateCalendar();
}
function nextMonth()
{
lxDatePicker.ngModelMoment = lxDatePicker.ngModelMoment.add(1, 'month');
generateCalendar();
}
function openDatePicker()
{
LxDatePickerService.open(lxDatePicker.pickerId);
}
function previousMonth()
{
lxDatePicker.ngModelMoment = lxDatePicker.ngModelMoment.subtract(1, 'month');
generateCalendar();
}
function select(_day)
{
if (!_day.disabled)
{
lxDatePicker.ngModel = _day.toDate();
lxDatePicker.ngModelMoment = angular.copy(_day);
if (angular.isDefined(lxDatePicker.callback))
{
lxDatePicker.callback(
{
newDate: lxDatePicker.ngModel
});
}
if (angular.isDefined(modelController) && lxDatePicker.inputFormat)
{
modelController.$setViewValue(angular.copy(_day).format(lxDatePicker.inputFormat));
modelController.$render();
}
generateCalendar();
}
}
function selectYear(_year)
{
lxDatePicker.yearSelection = false;
lxDatePicker.ngModelMoment = lxDatePicker.ngModelMoment.year(_year);
generateCalendar();
}
}
})();
(function()
{
'use strict';
angular
.module('lumx.date-picker')
.service('LxDatePickerService', LxDatePickerService);
LxDatePickerService.$inject = ['$rootScope', '$timeout', 'LxDepthService', 'LxEventSchedulerService'];
function LxDatePickerService($rootScope, $timeout, LxDepthService, LxEventSchedulerService)
{
var service = this;
var activeDatePickerId;
var datePickerFilter;
var idEventScheduler;
var scopeMap = {};
service.close = closeDatePicker;
service.open = openDatePicker;
service.registerScope = registerScope;
////////////
function closeDatePicker(_datePickerId)
{
if (angular.isDefined(idEventScheduler))
{
LxEventSchedulerService.unregister(idEventScheduler);
idEventScheduler = undefined;
}
activeDatePickerId = undefined;
$rootScope.$broadcast('lx-date-picker__close-start', _datePickerId);
datePickerFilter.removeClass('lx-date-picker-filter--is-shown');
scopeMap[_datePickerId].element.removeClass('lx-date-picker--is-shown');
$timeout(function()
{
angular.element('body').removeClass('no-scroll-date-picker-' + scopeMap[_datePickerId].uuid);
datePickerFilter.remove();
scopeMap[_datePickerId].element
.hide()
.appendTo(scopeMap[_datePickerId].elementParent);
scopeMap[_datePickerId].isOpen = false;
$rootScope.$broadcast('lx-date-picker__close-end', _datePickerId);
}, 600);
}
function onKeyUp(_event)
{
if (_event.keyCode == 27 && angular.isDefined(activeDatePickerId))
{
closeDatePicker(activeDatePickerId);
}
_event.stopPropagation();
}
function openDatePicker(_datePickerId)
{
LxDepthService.register();
activeDatePickerId = _datePickerId;
angular.element('body').addClass('no-scroll-date-picker-' + scopeMap[_datePickerId].uuid);
datePickerFilter = angular.element('<div/>',
{
class: 'lx-date-picker-filter'
});
datePickerFilter
.css('z-index', LxDepthService.getDepth())
.appendTo('body');
if (scopeMap[activeDatePickerId].autoClose)
{
datePickerFilter.on('click', function()
{
closeDatePicker(activeDatePickerId);
});
}
if (scopeMap[activeDatePickerId].escapeClose)
{
idEventScheduler = LxEventSchedulerService.register('keyup', onKeyUp);
}
scopeMap[activeDatePickerId].element
.css('z-index', LxDepthService.getDepth() + 1)
.appendTo('body')
.show();
$timeout(function()
{
$rootScope.$broadcast('lx-date-picker__open-start', activeDatePickerId);
scopeMap[activeDatePickerId].isOpen = true;
datePickerFilter.addClass('lx-date-picker-filter--is-shown');
scopeMap[activeDatePickerId].element.addClass('lx-date-picker--is-shown');
}, 100);
$timeout(function()
{
$rootScope.$broadcast('lx-date-picker__open-end', activeDatePickerId);
}, 700);
}
function registerScope(_datePickerId, _datePickerScope)
{
scopeMap[_datePickerId] = _datePickerScope.lxDatePicker;
}
}
})();
(function()
{
'use strict';
angular
.module('lumx.dialog')
.directive('lxDialog', lxDialog)
.directive('lxDialogHeader', lxDialogHeader)
.directive('lxDialogContent', lxDialogContent)
.directive('lxDialogFooter', lxDialogFooter)
.directive('lxDialogClose', lxDialogClose);
function lxDialog()
{
return {
restrict: 'E',
template: '<div class="dialog" ng-class="{ \'dialog--l\': !lxDialog.size || lxDialog.size === \'l\', \'dialog--s\': lxDialog.size === \'s\', \'dialog--m\': lxDialog.size === \'m\' }"><div ng-if="lxDialog.isOpen" ng-transclude></div></div>',
scope:
{
autoClose: '=?lxAutoClose',
escapeClose: '=?lxEscapeClose',
size: '@?lxSize'
},
link: link,
controller: LxDialogController,
controllerAs: 'lxDialog',
bindToController: true,
replace: true,
transclude: true
};
function link(scope, element, attrs, ctrl)
{
attrs.$observe('id', function(_newId)
{
ctrl.id = _newId;
});
}
}
LxDialogController.$inject = ['$element', '$interval', '$rootScope', '$scope', '$timeout', '$window', 'LxDepthService', 'LxEventSchedulerService', 'LxUtils'];
function LxDialogController($element, $interval, $rootScope, $scope, $timeout, $window, LxDepthService, LxEventSchedulerService, LxUtils)
{
var lxDialog = this;
var dialogFilter = angular.element('<div/>',
{
class: 'dialog-filter'
});
var dialogHeight;
var dialogInterval;
var dialogScrollable;
var elementParent = $element.parent();
var idEventScheduler;
var resizeDebounce;
var windowHeight;
lxDialog.autoClose = angular.isDefined(lxDialog.autoClose) ? lxDialog.autoClose : true;
lxDialog.escapeClose = angular.isDefined(lxDialog.escapeClose) ? lxDialog.escapeClose : true;
lxDialog.isOpen = false;
lxDialog.uuid = LxUtils.generateUUID();
$scope.$on('lx-dialog__open', function(event, id)
{
if (id === lxDialog.id)
{
open();
}
});
$scope.$on('lx-dialog__close', function(event, id)
{
if (id === lxDialog.id)
{
close();
}
});
$scope.$on('$destroy', function()
{
close();
});
////////////
function checkDialogHeight()
{
var dialog = $element;
var dialogHeader = dialog.find('.dialog__header');
var dialogContent = dialog.find('.dialog__content');
var dialogFooter = dialog.find('.dialog__footer');
if (!dialogFooter.length)
{
dialogFooter = dialog.find('.dialog__actions');
}
if (angular.isUndefined(dialogHeader))
{
return;
}
var heightToCheck = 60 + dialogHeader.outerHeight() + dialogContent.outerHeight() + dialogFooter.outerHeight();
if (dialogHeight === heightToCheck && windowHeight === $window.innerHeight)
{
return;
}
dialogHeight = heightToCheck;
windowHeight = $window.innerHeight;
if (heightToCheck >= $window.innerHeight)
{
dialog.addClass('dialog--is-fixed');
dialogScrollable
.css(
{
top: dialogHeader.outerHeight(),
bottom: dialogFooter.outerHeight()
})
.off('scroll', checkScrollEnd)
.on('scroll', checkScrollEnd);
}
else
{
dialog.removeClass('dialog--is-fixed');
dialogScrollable
.removeAttr('style')
.off('scroll', checkScrollEnd);
}
}
function checkDialogHeightOnResize()
{
if (resizeDebounce)
{
$timeout.cancel(resizeDebounce);
}
resizeDebounce = $timeout(function()
{
checkDialogHeight();
}, 200);
}
function checkScrollEnd()
{
if (dialogScrollable.scrollTop() + dialogScrollable.innerHeight() >= dialogScrollable[0].scrollHeight)
{
$rootScope.$broadcast('lx-dialog__scroll-end', lxDialog.id);
dialogScrollable.off('scroll', checkScrollEnd);
$timeout(function()
{
dialogScrollable.on('scroll', checkScrollEnd);
}, 500);
}
}
function onKeyUp(_event)
{
if (_event.keyCode == 27)
{
close();
}
_event.stopPropagation();
}
function open()
{
if (lxDialog.isOpen)
{
return;
}
LxDepthService.register();
angular.element('body').addClass('no-scroll-dialog-' + lxDialog.uuid);
dialogFilter
.css('z-index', LxDepthService.getDepth())
.appendTo('body');
if (lxDialog.autoClose)
{
dialogFilter.on('click', function()
{
close();
});
}
if (lxDialog.escapeClose)
{
idEventScheduler = LxEventSchedulerService.register('keyup', onKeyUp);
}
$element
.css('z-index', LxDepthService.getDepth() + 1)
.appendTo('body')
.show();
$timeout(function()
{
$rootScope.$broadcast('lx-dialog__open-start', lxDialog.id);
lxDialog.isOpen = true;
dialogFilter.addClass('dialog-filter--is-shown');
$element.addClass('dialog--is-shown');
}, 100);
$timeout(function()
{
if ($element.find('.dialog__scrollable').length === 0)
{
$element.find('.dialog__content').wrap(angular.element('<div/>',
{
class: 'dialog__scrollable'
}));
}
dialogScrollable = $element.find('.dialog__scrollable');
}, 200);
$timeout(function()
{
$rootScope.$broadcast('lx-dialog__open-end', lxDialog.id);
}, 700);
dialogInterval = $interval(function()
{
checkDialogHeight();
}, 500);
angular.element($window).on('resize', checkDialogHeightOnResize);
}
function close()
{
if (!lxDialog.isOpen)
{
return;
}
if (angular.isDefined(idEventScheduler))
{
LxEventSchedulerService.unregister(idEventScheduler);
idEventScheduler = undefined;
}
angular.element($window).off('resize', checkDialogHeightOnResize);
$element.find('.dialog__scrollable').off('scroll', checkScrollEnd);
$rootScope.$broadcast('lx-dialog__close-start', lxDialog.id);
if (resizeDebounce)
{
$timeout.cancel(resizeDebounce);
}
$interval.cancel(dialogInterval);
dialogFilter.removeClass('dialog-filter--is-shown');
$element.removeClass('dialog--is-shown');
$timeout(function()
{
angular.element('body').removeClass('no-scroll-dialog-' + lxDialog.uuid);
dialogFilter.remove();
$element
.hide()
.removeClass('dialog--is-fixed')
.appendTo(elementParent);
lxDialog.isOpen = false;
dialogHeight = undefined;
$rootScope.$broadcast('lx-dialog__close-end', lxDialog.id);
}, 600);
}
}
function lxDialogHeader()
{
return {
restrict: 'E',
template: '<div class="dialog__header" ng-transclude></div>',
replace: true,
transclude: true
};
}
function lxDialogContent()
{
return {
restrict: 'E',
template: '<div class="dialog__scrollable"><div class="dialog__content" ng-transclude></div></div>',
replace: true,
transclude: true
};
}
function lxDialogFooter()
{
return {
restrict: 'E',
template: '<div class="dialog__footer" ng-transclude></div>',
replace: true,
transclude: true
};
}
lxDialogClose.$inject = ['LxDialogService'];
function lxDialogClose(LxDialogService)
{
return {
restrict: 'A',
link: function(scope, element)
{
element.on('click', function()
{
LxDialogService.close(element.parents('.dialog').attr('id'));
});
scope.$on('$destroy', function()
{
element.off();
});
}
};
}
})();
(function()
{
'use strict';
angular
.module('lumx.dialog')
.service('LxDialogService', LxDialogService);
LxDialogService.$inject = ['$rootScope'];
function LxDialogService($rootScope)
{
var service = this;
service.open = open;
service.close = close;
////////////
function open(_dialogId)
{
$rootScope.$broadcast('lx-dialog__open', _dialogId);
}
function close(_dialogId)
{
$rootScope.$broadcast('lx-dialog__close', _dialogId);
}
}
})();
(function()
{
'use strict';
angular
.module('lumx.dropdown')
.directive('lxDropdown', lxDropdown)
.directive('lxDropdownToggle', lxDropdownToggle)
.directive('lxDropdownMenu', lxDropdownMenu)
.directive('lxDropdownFilter', lxDropdownFilter);
function lxDropdown()
{
return {
restrict: 'E',
templateUrl: 'dropdown.html',
scope:
{
depth: '@?lxDepth',
effect: '@?lxEffect',
escapeClose: '=?lxEscapeClose',
hover: '=?lxHover',
hoverDelay: '=?lxHoverDelay',
offset: '@?lxOffset',
overToggle: '=?lxOverToggle',
position: '@?lxPosition',
width: '@?lxWidth'
},
link: link,
controller: LxDropdownController,
controllerAs: 'lxDropdown',
bindToController: true,
transclude: true
};
function link(scope, element, attrs, ctrl)
{
var backwardOneWay = ['position', 'width'];
var backwardTwoWay = ['escapeClose', 'overToggle'];
angular.forEach(backwardOneWay, function(attribute)
{
if (angular.isDefined(attrs[attribute]))
{
attrs.$observe(attribute, function(newValue)
{
scope.lxDropdown[attribute] = newValue;
});
}
});
angular.forEach(backwardTwoWay, function(attribute)
{
if (angular.isDefined(attrs[attribute]))
{
scope.$watch(function()
{
return scope.$parent.$eval(attrs[attribute]);
}, function(newValue)
{
scope.lxDropdown[attribute] = newValue;
});
}
});
attrs.$observe('id', function(_newId)
{
ctrl.uuid = _newId;
});
scope.$on('$destroy', function()
{
if (ctrl.isOpen)
{
ctrl.closeDropdownMenu();
}
});
}
}
LxDropdownController.$inject = ['$element', '$interval', '$scope', '$timeout', '$window', 'LxDepthService',
'LxDropdownService', 'LxEventSchedulerService', 'LxUtils'
];
function LxDropdownController($element, $interval, $scope, $timeout, $window, LxDepthService,
LxDropdownService, LxEventSchedulerService, LxUtils)
{
var lxDropdown = this;
var dropdownInterval;
var dropdownMenu;
var dropdownToggle;
var idEventScheduler;
var openTimeout;
var positionTarget;
var scrollMask = angular.element('<div/>',
{
class: 'scroll-mask'
});
var enableBodyScroll;
lxDropdown.closeDropdownMenu = closeDropdownMenu;
lxDropdown.openDropdownMenu = openDropdownMenu;
lxDropdown.registerDropdownMenu = registerDropdownMenu;
lxDropdown.registerDropdownToggle = registerDropdownToggle;
lxDropdown.toggle = toggle;
lxDropdown.uuid = LxUtils.generateUUID();
lxDropdown.effect = angular.isDefined(lxDropdown.effect) ? lxDropdown.effect : 'expand';
lxDropdown.escapeClose = angular.isDefined(lxDropdown.escapeClose) ? lxDropdown.escapeClose : true;
lxDropdown.hasToggle = false;
lxDropdown.isOpen = false;
lxDropdown.overToggle = angular.isDefined(lxDropdown.overToggle) ? lxDropdown.overToggle : false;
lxDropdown.position = angular.isDefined(lxDropdown.position) ? lxDropdown.position : 'left';
$scope.$on('lx-dropdown__open', function(_event, _params)
{
if (_params.uuid === lxDropdown.uuid && !lxDropdown.isOpen)
{
LxDropdownService.closeActiveDropdown();
LxDropdownService.registerActiveDropdownUuid(lxDropdown.uuid);
positionTarget = _params.target;
registerDropdownToggle(angular.element(positionTarget));
openDropdownMenu();
}
});
$scope.$on('lx-dropdown__close', function(_event, _params)
{
if (_params.uuid === lxDropdown.uuid && lxDropdown.isOpen)
{
closeDropdownMenu();
}
});
$scope.$on('$destroy', function()
{
$timeout.cancel(openTimeout);
});
////////////
function closeDropdownMenu()
{
$interval.cancel(dropdownInterval);
LxDropdownService.resetActiveDropdownUuid();
var velocityProperties;
var velocityEasing;
scrollMask.remove();
if (angular.isFunction(enableBodyScroll)) {
enableBodyScroll();
}
enableBodyScroll = undefined;
if (lxDropdown.hasToggle)
{
dropdownToggle
.off('wheel')
.css('z-index', '');
}
dropdownMenu
.off('wheel')
.css(
{
overflow: 'hidden'
});
if (lxDropdown.effect === 'expand')
{
velocityProperties = {
width: 0,
height: 0
};
velocityEasing = 'easeOutQuint';
}
else if (lxDropdown.effect === 'fade')
{
velocityProperties = {
opacity: 0
};
velocityEasing = 'linear';
}
if (lxDropdown.effect === 'expand' || lxDropdown.effect === 'fade')
{
dropdownMenu.velocity(velocityProperties,
{
duration: 200,
easing: velocityEasing,
complete: function()
{
dropdownMenu
.removeAttr('style')
.removeClass('dropdown-menu--is-open')
.appendTo($element.find('.dropdown'));
$scope.$apply(function()
{
lxDropdown.isOpen = false;
if (lxDropdown.escapeClose)
{
LxEventSchedulerService.unregister(idEventScheduler);
idEventScheduler = undefined;
}
});
}
});
}
else if (lxDropdown.effect === 'none')
{
dropdownMenu
.removeAttr('style')
.removeClass('dropdown-menu--is-open')
.appendTo($element.find('.dropdown'));
lxDropdown.isOpen = false;
if (lxDropdown.escapeClose)
{
LxEventSchedulerService.unregister(idEventScheduler);
idEventScheduler = undefined;
}
}
}
function getAvailableHeight()
{
var availableHeightOnTop;
var availableHeightOnBottom;
var direction;
var dropdownToggleHeight = dropdownToggle.outerHeight();
var dropdownToggleTop = dropdownToggle.offset().top - angular.element($window).scrollTop();
var windowHeight = $window.innerHeight;
if (lxDropdown.overToggle)
{
availableHeightOnTop = dropdownToggleTop + dropdownToggleHeight;
availableHeightOnBottom = windowHeight - dropdownToggleTop;
}
else
{
availableHeightOnTop = dropdownToggleTop;
availableHeightOnBottom = windowHeight - (dropdownToggleTop + dropdownToggleHeight);
}
if (availableHeightOnTop > availableHeightOnBottom)
{
direction = 'top';
}
else
{
direction = 'bottom';
}
return {
top: availableHeightOnTop,
bottom: availableHeightOnBottom,
direction: direction
};
}
function initDropdownPosition()
{
var availableHeight = getAvailableHeight();
var dropdownMenuWidth;
var dropdownMenuLeft;
var dropdownMenuRight;
var dropdownToggleWidth = dropdownToggle.outerWidth();
var dropdownToggleHeight = dropdownToggle.outerHeight();
var dropdownToggleTop = dropdownToggle.offset().top - angular.element($window).scrollTop();
var windowWidth = $window.innerWidth;
var windowHeight = $window.innerHeight;
if (angular.isDefined(lxDropdown.width))
{
if (lxDropdown.width.indexOf('%') > -1)
{
dropdownMenuWidth = dropdownToggleWidth * (lxDropdown.width.slice(0, -1) / 100);
}
else
{
dropdownMenuWidth = lxDropdown.width;
}
}
else
{
dropdownMenuWidth = 'auto';
}
if (lxDropdown.position === 'left')
{
dropdownMenuLeft = dropdownToggle.offset().left;
dropdownMenuRight = 'auto';
}
else if (lxDropdown.position === 'right')
{
dropdownMenuLeft = 'auto';
dropdownMenuRight = windowWidth - dropdownToggle.offset().left - dropdownToggleWidth;
}
else if (lxDropdown.position === 'center')
{
dropdownMenuLeft = (dropdownToggle.offset().left + (dropdownToggleWidth / 2)) - (dropdownMenuWidth / 2);
dropdownMenuRight = 'auto';
}
dropdownMenu.css(
{
left: dropdownMenuLeft,
right: dropdownMenuRight,
width: dropdownMenuWidth
});
if (availableHeight.direction === 'top')
{
dropdownMenu.css(
{
bottom: lxDropdown.overToggle ? (windowHeight - dropdownToggleTop - dropdownToggleHeight) : (windowHeight - dropdownToggleTop + ~~lxDropdown.offset)
});
return availableHeight.top;
}
else if (availableHeight.direction === 'bottom')
{
dropdownMenu.css(
{
top: lxDropdown.overToggle ? dropdownToggleTop : (dropdownToggleTop + dropdownToggleHeight + ~~lxDropdown.offset)
});
return availableHeight.bottom;
}
}
function openDropdownMenu()
{
lxDropdown.isOpen = true;
LxDepthService.register();
scrollMask
.css('z-index', LxDepthService.getDepth())
.appendTo('body');
scrollMask.on('wheel', function preventDefault(e) {
e.preventDefault();
});
enableBodyScroll = LxUtils.disableBodyScroll();
if (lxDropdown.hasToggle)
{
dropdownToggle
.css('z-index', LxDepthService.getDepth() + 1)
.on('wheel', function preventDefault(e) {
e.preventDefault();
});
}
dropdownMenu
.addClass('dropdown-menu--is-open')
.css('z-index', LxDepthService.getDepth() + 1)
.appendTo('body');
dropdownMenu.on('wheel', function preventDefault(e) {
var d = e.originalEvent.deltaY;
if (d < 0 && dropdownMenu.scrollTop() === 0) {
e.preventDefault();
}
else {
if (d > 0 && (dropdownMenu.scrollTop() == dropdownMenu.get(0).scrollHeight - dropdownMenu.innerHeight())) {
e.preventDefault();
}
}
});
if (lxDropdown.escapeClose)
{
idEventScheduler = LxEventSchedulerService.register('keyup', onKeyUp);
}
openTimeout = $timeout(function()
{
var availableHeight = initDropdownPosition() - ~~lxDropdown.offset;
var dropdownMenuHeight = dropdownMenu.outerHeight();
var dropdownMenuWidth = dropdownMenu.outerWidth();
var enoughHeight = true;
if (availableHeight < dropdownMenuHeight)
{
enoughHeight = false;
dropdownMenuHeight = availableHeight;
}
if (lxDropdown.effect === 'expand')
{
dropdownMenu.css(
{
width: 0,
height: 0,
opacity: 1,
overflow: 'hidden'
});
dropdownMenu.find('.dropdown-menu__content').css(
{
width: dropdownMenuWidth,
height: dropdownMenuHeight
});
dropdownMenu.velocity(
{
width: dropdownMenuWidth
},
{
duration: 200,
easing: 'easeOutQuint',
queue: false
});
dropdownMenu.velocity(
{
height: dropdownMenuHeight
},
{
duration: 500,
easing: 'easeOutQuint',
queue: false,
complete: function()
{
dropdownMenu.css(
{
overflow: 'auto'
});
if (angular.isUndefined(lxDropdown.width))
{
dropdownMenu.css(
{
width: 'auto'
});
}
$timeout(updateDropdownMenuHeight);
dropdownMenu.find('.dropdown-menu__content').removeAttr('style');
dropdownInterval = $interval(updateDropdownMenuHeight, 500);
}
});
}
else if (lxDropdown.effect === 'fade')
{
dropdownMenu.css(
{
height: dropdownMenuHeight
});
dropdownMenu.velocity(
{
opacity: 1,
},
{
duration: 200,
easing: 'linear',
queue: false,
complete: function()
{
$timeout(updateDropdownMenuHeight);
dropdownInterval = $interval(updateDropdownMenuHeight, 500);
}
});
}
else if (lxDropdown.effect === 'none')
{
dropdownMenu.css(
{
opacity: 1
});
$timeout(updateDropdownMenuHeight);
dropdownInterval = $interval(updateDropdownMenuHeight, 500);
}
});
}
function onKeyUp(_event)
{
if (_event.keyCode == 27)
{
closeDropdownMenu();
}
_event.stopPropagation();
}
function registerDropdownMenu(_dropdownMenu)
{
dropdownMenu = _dropdownMenu;
}
function registerDropdownToggle(_dropdownToggle)
{
if (!positionTarget)
{
lxDropdown.hasToggle = true;
}
dropdownToggle = _dropdownToggle;
}
function toggle()
{
if (!lxDropdown.isOpen)
{
openDropdownMenu();
}
else
{
closeDropdownMenu();
}
}
function updateDropdownMenuHeight()
{
if (positionTarget)
{
registerDropdownToggle(angular.element(positionTarget));
}
var availableHeight = getAvailableHeight();
var dropdownMenuHeight = dropdownMenu.find('.dropdown-menu__content').outerHeight();
dropdownMenu.css(
{
height: 'auto'
});
if ((availableHeight[availableHeight.direction] - ~~lxDropdown.offset) < dropdownMenuHeight)
{
if (availableHeight.direction === 'top')
{
dropdownMenu.css(
{
top: 0
});
}
else if (availableHeight.direction === 'bottom')
{
dropdownMenu.css(
{
bottom: 0
});
}
}
else
{
if (availableHeight.direction === 'top')
{
dropdownMenu.css(
{
top: 'auto'
});
}
else if (availableHeight.direction === 'bottom')
{
dropdownMenu.css(
{
bottom: 'auto'
});
}
}
}
}
lxDropdownToggle.$inject = ['$timeout', 'LxDropdownService'];
function lxDropdownToggle($timeout, LxDropdownService)
{
return {
restrict: 'AE',
templateUrl: 'dropdown-toggle.html',
require: '^lxDropdown',
scope: true,
link: link,
replace: true,
transclude: true
};
function link(scope, element, attrs, ctrl)
{
var hoverTimeout = [];
var mouseEvent = ctrl.hover ? 'mouseenter' : 'click';
ctrl.registerDropdownToggle(element);
element.on(mouseEvent, function(_event)
{
if (mouseEvent === 'mouseenter' && 'ontouchstart' in window) {
return;
}
if (!ctrl.hover)
{
_event.stopPropagation();
}
LxDropdownService.closeActiveDropdown();
LxDropdownService.registerActiveDropdownUuid(ctrl.uuid);
if (ctrl.hover)
{
ctrl.mouseOnToggle = true;
if (!ctrl.isOpen)
{
hoverTimeout[0] = $timeout(function()
{
scope.$apply(function()
{
ctrl.openDropdownMenu();
});
}, ctrl.hoverDelay);
}
}
else
{
scope.$apply(function()
{
ctrl.toggle();
});
}
});
if (ctrl.hover)
{
element.on('mouseleave', function()
{
ctrl.mouseOnToggle = false;
$timeout.cancel(hoverTimeout[0]);
hoverTimeout[1] = $timeout(function()
{
if (!ctrl.mouseOnMenu)
{
scope.$apply(function()
{
ctrl.closeDropdownMenu();
});
}
}, ctrl.hoverDelay);
});
}
scope.$on('$destroy', function()
{
element.off();
if (ctrl.hover)
{
$timeout.cancel(hoverTimeout[0]);
$timeout.cancel(hoverTimeout[1]);
}
});
}
}
lxDropdownMenu.$inject = ['$timeout'];
function lxDropdownMenu($timeout)
{
return {
restrict: 'E',
templateUrl: 'dropdown-menu.html',
require: ['lxDropdownMenu', '^lxDropdown'],
scope: true,
link: link,
controller: LxDropdownMenuController,
controllerAs: 'lxDropdownMenu',
bindToController: true,
replace: true,
transclude: true
};
function link(scope, element, attrs, ctrls)
{
var hoverTimeout;
ctrls[1].registerDropdownMenu(element);
ctrls[0].setParentController(ctrls[1]);
if (ctrls[1].hover)
{
element.on('mouseenter', function()
{
ctrls[1].mouseOnMenu = true;
});
element.on('mouseleave', function()
{
ctrls[1].mouseOnMenu = false;
hoverTimeout = $timeout(function()
{
if (!ctrls[1].mouseOnToggle)
{
scope.$apply(function()
{
ctrls[1].closeDropdownMenu();
});
}
}, ctrls[1].hoverDelay);
});
}
scope.$on('$destroy', function()
{
if (ctrls[1].hover)
{
element.off();
$timeout.cancel(hoverTimeout);
}
});
}
}
LxDropdownMenuController.$inject = ['$element'];
function LxDropdownMenuController($element)
{
var lxDropdownMenu = this;
lxDropdownMenu.setParentController = setParentController;
////////////
function addDropdownDepth()
{
if (lxDropdownMenu.parentCtrl.depth)
{
$element.addClass('dropdown-menu--depth-' + lxDropdownMenu.parentCtrl.depth);
}
else
{
$element.addClass('dropdown-menu--depth-1');
}
}
function setParentController(_parentCtrl)
{
lxDropdownMenu.parentCtrl = _parentCtrl;
addDropdownDepth();
}
}
lxDropdownFilter.$inject = ['$timeout'];
function lxDropdownFilter($timeout)
{
return {
restrict: 'A',
link: link
};
function link(scope, element)
{
var focusTimeout;
element.on('click', function(_event)
{
_event.stopPropagation();
});
focusTimeout = $timeout(function()
{
element.find('input').focus();
}, 200);
scope.$on('$destroy', function()
{
$timeout.cancel(focusTimeout);
element.off();
});
}
}
})();
(function()
{
'use strict';
angular
.module('lumx.dropdown')
.service('LxDropdownService', LxDropdownService);
LxDropdownService.$inject = ['$document', '$rootScope', '$timeout'];
function LxDropdownService($document, $rootScope, $timeout)
{
var service = this;
var activeDropdownUuid;
service.close = close;
service.closeActiveDropdown = closeActiveDropdown;
service.open = open;
service.isOpen = isOpen;
service.registerActiveDropdownUuid = registerActiveDropdownUuid;
service.resetActiveDropdownUuid = resetActiveDropdownUuid;
$document.on('click', closeActiveDropdown);
////////////
function close(_uuid)
{
$rootScope.$broadcast('lx-dropdown__close',
{
uuid: _uuid
});
}
function closeActiveDropdown()
{
$rootScope.$broadcast('lx-dropdown__close',
{
uuid: activeDropdownUuid
});
}
function open(_uuid, _target)
{
$rootScope.$broadcast('lx-dropdown__open',
{
uuid: _uuid,
target: _target
});
}
function isOpen(_uuid)
{
return activeDropdownUuid === _uuid;
}
function registerActiveDropdownUuid(_uuid)
{
activeDropdownUuid = _uuid;
}
function resetActiveDropdownUuid()
{
activeDropdownUuid = undefined;
}
}
})();
(function()
{
'use strict';
angular
.module('lumx.fab')
.directive('lxFab', lxFab)
.directive('lxFabTrigger', lxFabTrigger)
.directive('lxFabActions', lxFabActions);
function lxFab()
{
return {
restrict: 'E',
templateUrl: 'fab.html',
scope: true,
link: link,
controller: LxFabController,
controllerAs: 'lxFab',
bindToController: true,
transclude: true,
replace: true
};
function link(scope, element, attrs, ctrl)
{
attrs.$observe('lxDirection', function(newDirection)
{
ctrl.setFabDirection(newDirection);
});
}
}
function LxFabController()
{
var lxFab = this;
lxFab.setFabDirection = setFabDirection;
////////////
function setFabDirection(_direction)
{
lxFab.lxDirection = _direction;
}
}
function lxFabTrigger()
{
return {
restrict: 'E',
require: '^lxFab',
templateUrl: 'fab-trigger.html',
transclude: true,
replace: true
};
}
function lxFabActions()
{
return {
restrict: 'E',
require: '^lxFab',
templateUrl: 'fab-actions.html',
link: link,
transclude: true,
replace: true
};
function link(scope, element, attrs, ctrl)
{
scope.parentCtrl = ctrl;
}
}
})();
(function()
{
'use strict';
angular
.module('lumx.file-input')
.directive('lxFileInput', lxFileInput);
function lxFileInput()
{
return {
restrict: 'E',
templateUrl: 'file-input.html',
scope:
{
label: '@lxLabel',
callback: '&?lxCallback'
},
link: link,
controller: LxFileInputController,
controllerAs: 'lxFileInput',
bindToController: true,
replace: true
};
function link(scope, element, attrs, ctrl)
{
var input = element.find('input');
input
.on('change', ctrl.updateModel)
.on('blur', function()
{
element.removeClass('input-file--is-focus');
});
scope.$on('$destroy', function()
{
input.off();
});
}
}
LxFileInputController.$inject = ['$element', '$scope', '$timeout'];
function LxFileInputController($element, $scope, $timeout)
{
var lxFileInput = this;
var input = $element.find('input');
var timer;
lxFileInput.updateModel = updateModel;
$scope.$on('$destroy', function()
{
$timeout.cancel(timer);
});
////////////
function setFileName()
{
if (input.val())
{
lxFileInput.fileName = input.val().replace(/C:\\fakepath\\/i, '');
$element.addClass('input-file--is-focus');
$element.addClass('input-file--is-active');
}
else
{
lxFileInput.fileName = undefined;
$element.removeClass('input-file--is-active');
}
input.val(undefined);
}
function updateModel()
{
if (angular.isDefined(lxFileInput.callback))
{
lxFileInput.callback(
{
newFile: input[0].files[0]
});
}
timer = $timeout(setFileName);
}
}
})();
(function()
{
'use strict';
angular
.module('lumx.icon')
.directive('lxIcon', lxIcon);
function lxIcon()
{
return {
restrict: 'E',
templateUrl: 'icon.html',
scope:
{
color: '@?lxColor',
id: '@lxId',
size: '@?lxSize',
type: '@?lxType'
},
controller: LxIconController,
controllerAs: 'lxIcon',
bindToController: true,
replace: true
};
}
function LxIconController()
{
var lxIcon = this;
lxIcon.getClass = getClass;
////////////
function getClass()
{
var iconClass = [];
iconClass.push('mdi-' + lxIcon.id);
if (angular.isDefined(lxIcon.size))
{
iconClass.push('icon--' + lxIcon.size);
}
if (angular.isDefined(lxIcon.color))
{
iconClass.push('icon--' + lxIcon.color);
}
if (angular.isDefined(lxIcon.type))
{
iconClass.push('icon--' + lxIcon.type);
}
return iconClass;
}
}
})();
(function()
{
'use strict';
angular
.module('lumx.notification')
.service('LxNotificationService', LxNotificationService);
LxNotificationService.$inject = ['$injector', '$rootScope', '$timeout', 'LxDepthService', 'LxEventSchedulerService'];
function LxNotificationService($injector, $rootScope, $timeout, LxDepthService, LxEventSchedulerService)
{
var service = this;
var dialogFilter;
var dialog;
var idEventScheduler;
var notificationList = [];
var actionClicked = false;
service.alert = showAlertDialog;
service.confirm = showConfirmDialog;
service.error = notifyError;
service.info = notifyInfo;
service.notify = notify;
service.success = notifySuccess;
service.warning = notifyWarning;
////////////
//
// NOTIFICATION
//
function deleteNotification(_notification, _callback)
{
_callback = (!angular.isFunction(_callback)) ? angular.noop : _callback;
var notifIndex = notificationList.indexOf(_notification);
var dnOffset = angular.isDefined(notificationList[notifIndex]) ? 24 + notificationList[notifIndex].height : 24;
for (var idx = 0; idx < notifIndex; idx++)
{
if (notificationList.length > 1)
{
notificationList[idx].margin -= dnOffset;
notificationList[idx].elem.css('marginBottom', notificationList[idx].margin + 'px');
}
}
_notification.elem.removeClass('notification--is-shown');
$timeout(function()
{
_notification.elem.remove();
// Find index again because notificationList may have changed
notifIndex = notificationList.indexOf(_notification);
if (notifIndex != -1)
{
notificationList.splice(notifIndex, 1);
}
_callback(actionClicked);
actionClicked = false
}, 400);
}
function getElementHeight(_elem)
{
return parseFloat(window.getComputedStyle(_elem, null).height);
}
function moveNotificationUp()
{
var newNotifIndex = notificationList.length - 1;
notificationList[newNotifIndex].height = getElementHeight(notificationList[newNotifIndex].elem[0]);
var upOffset = 0;
for (var idx = newNotifIndex; idx >= 0; idx--)
{
if (notificationList.length > 1 && idx !== newNotifIndex)
{
upOffset = 24 + notificationList[newNotifIndex].height;
notificationList[idx].margin += upOffset;
notificationList[idx].elem.css('marginBottom', notificationList[idx].margin + 'px');
}
}
}
function notify(_text, _icon, _sticky, _color, _action, _callback, _delay)
{
var $compile = $injector.get('$compile');
LxDepthService.register();
var notification = angular.element('<div/>',
{
class: 'notification'
});
var notificationText = angular.element('<span/>',
{
class: 'notification__content',
html: _text
});
var notificationTimeout;
var notificationDelay = _delay || 6000;
if (angular.isDefined(_icon))
{
var notificationIcon = angular.element('<i/>',
{
class: 'notification__icon mdi mdi-' + _icon
});
notification
.addClass('notification--has-icon')
.append(notificationIcon);
}
if (angular.isDefined(_color))
{
notification.addClass('notification--' + _color);
}
notification.append(notificationText);
if (angular.isDefined(_action))
{
var notificationAction = angular.element('<button/>',
{
class: 'notification__action btn btn--m btn--flat',
html: _action
});
if (angular.isDefined(_color))
{
notificationAction.addClass('btn--' + _color);
}
else
{
notificationAction.addClass('btn--white');
}
notificationAction.attr('lx-ripple', '');
$compile(notificationAction)($rootScope);
notificationAction.bind('click', function()
{
actionClicked = true;
});
notification
.addClass('notification--has-action')
.append(notificationAction);
}
notification
.css('z-index', LxDepthService.getDepth())
.appendTo('body');
$timeout(function()
{
notification.addClass('notification--is-shown');
}, 100);
var data = {
elem: notification,
margin: 0
};
notificationList.push(data);
moveNotificationUp();
notification.bind('click', function()
{
deleteNotification(data, _callback);
if (angular.isDefined(notificationTimeout))
{
$timeout.cancel(notificationTimeout);
}
});
if (angular.isUndefined(_sticky) || !_sticky)
{
notificationTimeout = $timeout(function()
{
deleteNotification(data, _callback);
}, notificationDelay);
}
}
function notifyError(_text, _sticky)
{
notify(_text, 'alert-circle', _sticky, 'red');
}
function notifyInfo(_text, _sticky)
{
notify(_text, 'information-outline', _sticky, 'blue');
}
function notifySuccess(_text, _sticky)
{
notify(_text, 'check', _sticky, 'green');
}
function notifyWarning(_text, _sticky)
{
notify(_text, 'alert', _sticky, 'orange');
}
//
// ALERT & CONFIRM
//
function buildDialogActions(_buttons, _callback, _unbind)
{
var $compile = $injector.get('$compile');
var dialogActions = angular.element('<div/>',
{
class: 'dialog__actions'
});
var dialogLastBtn = angular.element('<button/>',
{
class: 'btn btn--m btn--blue btn--flat',
text: _buttons.ok
});
if (angular.isDefined(_buttons.cancel))
{
var dialogFirstBtn = angular.element('<button/>',
{
class: 'btn btn--m btn--red btn--flat',
text: _buttons.cancel
});
dialogFirstBtn.attr('lx-ripple', '');
$compile(dialogFirstBtn)($rootScope);
dialogActions.append(dialogFirstBtn);
dialogFirstBtn.bind('click', function()
{
_callback(false);
closeDialog();
});
}
dialogLastBtn.attr('lx-ripple', '');
$compile(dialogLastBtn)($rootScope);
dialogActions.append(dialogLastBtn);
dialogLastBtn.bind('click', function()
{
_callback(true);
closeDialog();
});
if (!_unbind)
{
idEventScheduler = LxEventSchedulerService.register('keyup', function(event)
{
if (event.keyCode == 13)
{
_callback(true);
closeDialog();
}
else if (event.keyCode == 27)
{
_callback(angular.isUndefined(_buttons.cancel));
closeDialog();
}
event.stopPropagation();
});
}
return dialogActions;
}
function buildDialogContent(_text)
{
var dialogContent = angular.element('<div/>',
{
class: 'dialog__content p++ pt0 tc-black-2',
text: _text
});
return dialogContent;
}
function buildDialogHeader(_title)
{
var dialogHeader = angular.element('<div/>',
{
class: 'dialog__header p++ fs-title',
text: _title
});
return dialogHeader;
}
function closeDialog()
{
if (angular.isDefined(idEventScheduler))
{
$timeout(function()
{
LxEventSchedulerService.unregister(idEventScheduler);
idEventScheduler = undefined;
}, 1);
}
dialogFilter.removeClass('dialog-filter--is-shown');
dialog.removeClass('dialog--is-shown');
$timeout(function()
{
dialogFilter.remove();
dialog.remove();
}, 600);
}
function showAlertDialog(_title, _text, _button, _callback, _unbind)
{
LxDepthService.register();
dialogFilter = angular.element('<div/>',
{
class: 'dialog-filter'
});
dialog = angular.element('<div/>',
{
class: 'dialog dialog--alert'
});
var dialogHeader = buildDialogHeader(_title);
var dialogContent = buildDialogContent(_text);
var dialogActions = buildDialogActions(
{
ok: _button
}, _callback, _unbind);
dialogFilter
.css('z-index', LxDepthService.getDepth())
.appendTo('body');
dialog
.append(dialogHeader)
.append(dialogContent)
.append(dialogActions)
.css('z-index', LxDepthService.getDepth() + 1)
.appendTo('body')
.show()
.focus();
$timeout(function()
{
angular.element(document.activeElement).blur();
dialogFilter.addClass('dialog-filter--is-shown');
dialog.addClass('dialog--is-shown');
}, 100);
}
function showConfirmDialog(_title, _text, _buttons, _callback, _unbind)
{
LxDepthService.register();
dialogFilter = angular.element('<div/>',
{
class: 'dialog-filter'
});
dialog = angular.element('<div/>',
{
class: 'dialog dialog--alert'
});
var dialogHeader = buildDialogHeader(_title);
var dialogContent = buildDialogContent(_text);
var dialogActions = buildDialogActions(_buttons, _callback, _unbind);
dialogFilter
.css('z-index', LxDepthService.getDepth())
.appendTo('body');
dialog
.append(dialogHeader)
.append(dialogContent)
.append(dialogActions)
.css('z-index', LxDepthService.getDepth() + 1)
.appendTo('body')
.show()
.focus();
$timeout(function()
{
angular.element(document.activeElement).blur();
dialogFilter.addClass('dialog-filter--is-shown');
dialog.addClass('dialog--is-shown');
}, 100);
}
}
})();
(function()
{
'use strict';
angular
.module('lumx.progress')
.directive('lxProgress', lxProgress);
function lxProgress()
{
return {
restrict: 'E',
templateUrl: 'progress.html',
scope:
{
lxColor: '@?',
lxDiameter: '@?',
lxType: '@',
lxValue: '@'
},
controller: LxProgressController,
controllerAs: 'lxProgress',
bindToController: true,
replace: true
};
}
function LxProgressController()
{
var lxProgress = this;
lxProgress.getCircularProgressValue = getCircularProgressValue;
lxProgress.getLinearProgressValue = getLinearProgressValue;
lxProgress.getProgressDiameter = getProgressDiameter;
init();
////////////
function getCircularProgressValue()
{
if (angular.isDefined(lxProgress.lxValue))
{
return {
'stroke-dasharray': lxProgress.lxValue * 1.26 + ',200'
};
}
}
function getLinearProgressValue()
{
if (angular.isDefined(lxProgress.lxValue))
{
return {
'transform': 'scale(' + lxProgress.lxValue / 100 + ', 1)'
};
}
}
function getProgressDiameter()
{
if (lxProgress.lxType === 'circular')
{
return {
'transform': 'scale(' + parseInt(lxProgress.lxDiameter) / 100 + ')'
};
}
return;
}
function init()
{
lxProgress.lxDiameter = angular.isDefined(lxProgress.lxDiameter) ? lxProgress.lxDiameter : 100;
lxProgress.lxColor = angular.isDefined(lxProgress.lxColor) ? lxProgress.lxColor : 'primary';
}
}
})();
(function()
{
'use strict';
angular
.module('lumx.radio-button')
.directive('lxRadioGroup', lxRadioGroup)
.directive('lxRadioButton', lxRadioButton)
.directive('lxRadioButtonLabel', lxRadioButtonLabel)
.directive('lxRadioButtonHelp', lxRadioButtonHelp);
function lxRadioGroup()
{
return {
restrict: 'E',
templateUrl: 'radio-group.html',
transclude: true,
replace: true
};
}
function lxRadioButton()
{
return {
restrict: 'E',
templateUrl: 'radio-button.html',
scope:
{
lxColor: '@?',
name: '@',
ngChange: '&?',
ngDisabled: '=?',
ngModel: '=',
ngValue: '=?',
value: '@?'
},
controller: LxRadioButtonController,
controllerAs: 'lxRadioButton',
bindToController: true,
transclude: true,
replace: true
};
}
LxRadioButtonController.$inject = ['$scope', '$timeout', 'LxUtils'];
function LxRadioButtonController($scope, $timeout, LxUtils)
{
var lxRadioButton = this;
var radioButtonId;
var radioButtonHasChildren;
var timer;
lxRadioButton.getRadioButtonId = getRadioButtonId;
lxRadioButton.getRadioButtonHasChildren = getRadioButtonHasChildren;
lxRadioButton.setRadioButtonId = setRadioButtonId;
lxRadioButton.setRadioButtonHasChildren = setRadioButtonHasChildren;
lxRadioButton.triggerNgChange = triggerNgChange;
$scope.$on('$destroy', function()
{
$timeout.cancel(timer);
});
init();
////////////
function getRadioButtonId()
{
return radioButtonId;
}
function getRadioButtonHasChildren()
{
return radioButtonHasChildren;
}
function init()
{
setRadioButtonId(LxUtils.generateUUID());
setRadioButtonHasChildren(false);
if (angular.isDefined(lxRadioButton.value) && angular.isUndefined(lxRadioButton.ngValue))
{
lxRadioButton.ngValue = lxRadioButton.value;
}
lxRadioButton.lxColor = angular.isUndefined(lxRadioButton.lxColor) ? 'accent' : lxRadioButton.lxColor;
}
function setRadioButtonId(_radioButtonId)
{
radioButtonId = _radioButtonId;
}
function setRadioButtonHasChildren(_radioButtonHasChildren)
{
radioButtonHasChildren = _radioButtonHasChildren;
}
function triggerNgChange()
{
timer = $timeout(lxRadioButton.ngChange);
}
}
function lxRadioButtonLabel()
{
return {
restrict: 'AE',
require: ['^lxRadioButton', '^lxRadioButtonLabel'],
templateUrl: 'radio-button-label.html',
link: link,
controller: LxRadioButtonLabelController,
controllerAs: 'lxRadioButtonLabel',
bindToController: true,
transclude: true,
replace: true
};
function link(scope, element, attrs, ctrls)
{
ctrls[0].setRadioButtonHasChildren(true);
ctrls[1].setRadioButtonId(ctrls[0].getRadioButtonId());
}
}
function LxRadioButtonLabelController()
{
var lxRadioButtonLabel = this;
var radioButtonId;
lxRadioButtonLabel.getRadioButtonId = getRadioButtonId;
lxRadioButtonLabel.setRadioButtonId = setRadioButtonId;
////////////
function getRadioButtonId()
{
return radioButtonId;
}
function setRadioButtonId(_radioButtonId)
{
radioButtonId = _radioButtonId;
}
}
function lxRadioButtonHelp()
{
return {
restrict: 'AE',
require: '^lxRadioButton',
templateUrl: 'radio-button-help.html',
transclude: true,
replace: true
};
}
})();
(function()
{
'use strict';
angular
.module('lumx.ripple')
.directive('lxRipple', lxRipple);
lxRipple.$inject = ['$timeout'];
function lxRipple($timeout)
{
return {
restrict: 'A',
link: link,
};
function link(scope, element, attrs)
{
var timer;
element
.css(
{
position: 'relative',
overflow: 'hidden'
})
.on('mousedown', function(e)
{
var ripple;
if (element.find('.ripple').length === 0)
{
ripple = angular.element('<span/>',
{
class: 'ripple'
});
if (attrs.lxRipple)
{
ripple.addClass('bgc-' + attrs.lxRipple);
}
element.prepend(ripple);
}
else
{
ripple = element.find('.ripple');
}
ripple.removeClass('ripple--is-animated');
if (!ripple.height() && !ripple.width())
{
var diameter = Math.max(element.outerWidth(), element.outerHeight());
ripple.css(
{
height: diameter,
width: diameter
});
}
var x = e.pageX - element.offset().left - ripple.width() / 2;
var y = e.pageY - element.offset().top - ripple.height() / 2;
ripple.css(
{
top: y + 'px',
left: x + 'px'
}).addClass('ripple--is-animated');
timer = $timeout(function()
{
ripple.removeClass('ripple--is-animated');
}, 651);
});
scope.$on('$destroy', function()
{
$timeout.cancel(timer);
element.off();
});
}
}
})();
(function()
{
'use strict';
angular
.module('lumx.search-filter')
.filter('lxSearchHighlight', lxSearchHighlight)
.directive('lxSearchFilter', lxSearchFilter);
lxSearchHighlight.$inject = ['$sce'];
function lxSearchHighlight($sce)
{
function escapeRegexp(queryToEscape)
{
return queryToEscape.replace(/([.?*+^$[\]\\(){}|-])/g, '\\$1');
}
return function (matchItem, query, icon)
{
var string = '';
if (icon)
{
string += '<i class="mdi mdi-' + icon + '"></i>';
}
string += query && matchItem ? matchItem.replace(new RegExp(escapeRegexp(query), 'gi'), '<strong>$&</strong>') : matchItem;
return $sce.trustAsHtml(string);
};
}
function lxSearchFilter()
{
return {
restrict: 'E',
templateUrl: 'search-filter.html',
scope:
{
autocomplete: '&?lxAutocomplete',
closed: '=?lxClosed',
color: '@?lxColor',
icon: '@?lxIcon',
onSelect: '=?lxOnSelect',
searchOnFocus: '=?lxSearchOnFocus',
theme: '@?lxTheme',
width: '@?lxWidth'
},
link: link,
controller: LxSearchFilterController,
controllerAs: 'lxSearchFilter',
bindToController: true,
replace: true,
transclude: true
};
function link(scope, element, attrs, ctrl, transclude)
{
var input;
attrs.$observe('lxWidth', function(newWidth)
{
if (angular.isDefined(scope.lxSearchFilter.closed) && scope.lxSearchFilter.closed)
{
element.find('.search-filter__container').css('width', newWidth);
}
});
transclude(function()
{
input = element.find('input');
ctrl.setInput(input);
ctrl.setModel(input.data('$ngModelController'));
input.on('focus', ctrl.focusInput);
input.on('blur', ctrl.blurInput);
input.on('keydown', ctrl.keyEvent);
});
scope.$on('$destroy', function()
{
input.off();
});
}
}
LxSearchFilterController.$inject = ['$element', '$scope', 'LxDropdownService', 'LxNotificationService', 'LxUtils'];
function LxSearchFilterController($element, $scope, LxDropdownService, LxNotificationService, LxUtils)
{
var lxSearchFilter = this;
var debouncedAutocomplete;
var input;
var itemSelected = false;
lxSearchFilter.blurInput = blurInput;
lxSearchFilter.clearInput = clearInput;
lxSearchFilter.focusInput = focusInput;
lxSearchFilter.getClass = getClass;
lxSearchFilter.keyEvent = keyEvent;
lxSearchFilter.openInput = openInput;
lxSearchFilter.selectItem = selectItem;
lxSearchFilter.setInput = setInput;
lxSearchFilter.setModel = setModel;
lxSearchFilter.activeChoiceIndex = -1;
lxSearchFilter.color = angular.isDefined(lxSearchFilter.color) ? lxSearchFilter.color : 'black';
lxSearchFilter.dropdownId = LxUtils.generateUUID();
lxSearchFilter.theme = angular.isDefined(lxSearchFilter.theme) ? lxSearchFilter.theme : 'light';
////////////
function blurInput()
{
if (angular.isDefined(lxSearchFilter.closed) && lxSearchFilter.closed && !input.val())
{
$element.velocity(
{
width: 40
},
{
duration: 400,
easing: 'easeOutQuint',
queue: false
});
}
if (!input.val())
{
lxSearchFilter.modelController.$setViewValue(undefined);
}
}
function clearInput()
{
lxSearchFilter.modelController.$setViewValue(undefined);
lxSearchFilter.modelController.$render();
// Temporarily disabling search on focus since we never want to trigger it when clearing the input.
var searchOnFocus = lxSearchFilter.searchOnFocus;
lxSearchFilter.searchOnFocus = false;
input.focus();
lxSearchFilter.searchOnFocus = searchOnFocus;
}
function focusInput()
{
if (!lxSearchFilter.searchOnFocus)
{
return;
}
updateAutocomplete(lxSearchFilter.modelController.$viewValue, true);
}
function getClass()
{
var searchFilterClass = [];
if (angular.isUndefined(lxSearchFilter.closed) || !lxSearchFilter.closed)
{
searchFilterClass.push('search-filter--opened-mode');
}
if (angular.isDefined(lxSearchFilter.closed) && lxSearchFilter.closed)
{
searchFilterClass.push('search-filter--closed-mode');
}
if (input.val())
{
searchFilterClass.push('search-filter--has-clear-button');
}
if (angular.isDefined(lxSearchFilter.color))
{
searchFilterClass.push('search-filter--' + lxSearchFilter.color);
}
if (angular.isDefined(lxSearchFilter.theme))
{
searchFilterClass.push('search-filter--theme-' + lxSearchFilter.theme);
}
if (angular.isFunction(lxSearchFilter.autocomplete))
{
searchFilterClass.push('search-filter--autocomplete');
}
if (LxDropdownService.isOpen(lxSearchFilter.dropdownId))
{
searchFilterClass.push('search-filter--is-open');
}
return searchFilterClass;
}
function keyEvent(_event)
{
if (!angular.isFunction(lxSearchFilter.autocomplete))
{
return;
}
if (!LxDropdownService.isOpen(lxSearchFilter.dropdownId))
{
lxSearchFilter.activeChoiceIndex = -1;
}
switch (_event.keyCode) {
case 13:
keySelect();
if (lxSearchFilter.activeChoiceIndex > -1)
{
_event.preventDefault();
}
break;
case 38:
keyUp();
_event.preventDefault();
break;
case 40:
keyDown();
_event.preventDefault();
break;
}
$scope.$apply();
}
function keyDown()
{
if (lxSearchFilter.autocompleteList.length)
{
lxSearchFilter.activeChoiceIndex += 1;
if (lxSearchFilter.activeChoiceIndex >= lxSearchFilter.autocompleteList.length)
{
lxSearchFilter.activeChoiceIndex = 0;
}
}
}
function keySelect()
{
if (!lxSearchFilter.autocompleteList || lxSearchFilter.activeChoiceIndex === -1)
{
return;
}
selectItem(lxSearchFilter.autocompleteList[lxSearchFilter.activeChoiceIndex]);
}
function keyUp()
{
if (lxSearchFilter.autocompleteList.length)
{
lxSearchFilter.activeChoiceIndex -= 1;
if (lxSearchFilter.activeChoiceIndex < 0)
{
lxSearchFilter.activeChoiceIndex = lxSearchFilter.autocompleteList.length - 1;
}
}
}
function onAutocompleteSuccess(autocompleteList)
{
lxSearchFilter.autocompleteList = autocompleteList;
if (lxSearchFilter.autocompleteList.length)
{
LxDropdownService.open(lxSearchFilter.dropdownId, $element);
}
else
{
LxDropdownService.close(lxSearchFilter.dropdownId);
}
lxSearchFilter.isLoading = false;
}
function onAutocompleteError(error)
{
LxNotificationService.error(error);
lxSearchFilter.isLoading = false;
}
function openInput()
{
if (angular.isDefined(lxSearchFilter.closed) && lxSearchFilter.closed)
{
$element.velocity(
{
width: angular.isDefined(lxSearchFilter.width) ? parseInt(lxSearchFilter.width) : 240
},
{
duration: 400,
easing: 'easeOutQuint',
queue: false,
complete: function()
{
input.focus();
}
});
}
else
{
input.focus();
}
}
function selectItem(_item)
{
itemSelected = true;
LxDropdownService.close(lxSearchFilter.dropdownId);
lxSearchFilter.modelController.$setViewValue(_item);
lxSearchFilter.modelController.$render();
if (angular.isFunction(lxSearchFilter.onSelect))
{
lxSearchFilter.onSelect(_item);
}
}
function setInput(_input)
{
input = _input;
}
function setModel(_modelController)
{
lxSearchFilter.modelController = _modelController;
if (angular.isFunction(lxSearchFilter.autocomplete) && angular.isFunction(lxSearchFilter.autocomplete()))
{
debouncedAutocomplete = LxUtils.debounce(function()
{
lxSearchFilter.isLoading = true;
(lxSearchFilter.autocomplete()).apply(this, arguments);
}, 500);
lxSearchFilter.modelController.$parsers.push(updateAutocomplete);
}
}
function updateAutocomplete(_newValue, _immediate)
{
if ((_newValue || (angular.isUndefined(_newValue) && lxSearchFilter.searchOnFocus)) && !itemSelected)
{
if (_immediate)
{
lxSearchFilter.isLoading = true;
(lxSearchFilter.autocomplete())(_newValue, onAutocompleteSuccess, onAutocompleteError);
}
else
{
debouncedAutocomplete(_newValue, onAutocompleteSuccess, onAutocompleteError);
}
}
else
{
debouncedAutocomplete.clear();
LxDropdownService.close(lxSearchFilter.dropdownId);
}
itemSelected = false;
return _newValue;
}
}
})();
(function()
{
'use strict';
angular
.module('lumx.select')
.filter('filterChoices', filterChoices)
.directive('lxSelect', lxSelect)
.directive('lxSelectSelected', lxSelectSelected)
.directive('lxSelectChoices', lxSelectChoices);
filterChoices.$inject = ['$filter'];
function filterChoices($filter)
{
return function(choices, externalFilter, textFilter)
{
if (externalFilter)
{
return choices;
}
var toFilter = [];
if (!angular.isArray(choices))
{
if (angular.isObject(choices))
{
for (var idx in choices)
{
if (angular.isArray(choices[idx]))
{
toFilter = toFilter.concat(choices[idx]);
}
}
}
}
else
{
toFilter = choices;
}
return $filter('filter')(toFilter, textFilter);
};
}
function lxSelect()
{
return {
restrict: 'E',
templateUrl: 'select.html',
scope:
{
allowClear: '=?lxAllowClear',
allowNewValue: '=?lxAllowNewValue',
autocomplete: '=?lxAutocomplete',
newValueTransform: '=?lxNewValueTransform',
choices: '=?lxChoices',
choicesCustomStyle: '=?lxChoicesCustomStyle',
customStyle: '=?lxCustomStyle',
displayFilter: '=?lxDisplayFilter',
error: '=?lxError',
filter: '&?lxFilter',
fixedLabel: '=?lxFixedLabel',
helper: '=?lxHelper',
helperMessage: '@?lxHelperMessage',
label: '@?lxLabel',
loading: '=?lxLoading',
modelToSelection: '&?lxModelToSelection',
multiple: '=?lxMultiple',
ngChange: '&?',
ngDisabled: '=?',
ngModel: '=',
selectionToModel: '&?lxSelectionToModel',
theme: '@?lxTheme',
valid: '=?lxValid',
viewMode: '@?lxViewMode'
},
link: link,
controller: LxSelectController,
controllerAs: 'lxSelect',
bindToController: true,
replace: true,
transclude: true
};
function link(scope, element, attrs)
{
var backwardOneWay = ['customStyle'];
var backwardTwoWay = ['allowClear', 'choices', 'error', 'loading', 'multiple', 'valid'];
angular.forEach(backwardOneWay, function(attribute)
{
if (angular.isDefined(attrs[attribute]))
{
attrs.$observe(attribute, function(newValue)
{
scope.lxSelect[attribute] = newValue;
});
}
});
angular.forEach(backwardTwoWay, function(attribute)
{
if (angular.isDefined(attrs[attribute]))
{
scope.$watch(function()
{
return scope.$parent.$eval(attrs[attribute]);
}, function(newValue)
{
if (attribute === 'multiple' && angular.isUndefined(newValue))
{
scope.lxSelect[attribute] = true;
}
else
{
scope.lxSelect[attribute] = newValue;
}
});
}
});
attrs.$observe('placeholder', function(newValue)
{
scope.lxSelect.label = newValue;
});
attrs.$observe('change', function(newValue)
{
scope.lxSelect.ngChange = function(data)
{
return scope.$parent.$eval(newValue, data);
};
});
attrs.$observe('filter', function(newValue)
{
scope.lxSelect.filter = function(data)
{
return scope.$parent.$eval(newValue, data);
};
scope.lxSelect.displayFilter = true;
});
attrs.$observe('modelToSelection', function(newValue)
{
scope.lxSelect.modelToSelection = function(data)
{
return scope.$parent.$eval(newValue, data);
};
});
attrs.$observe('selectionToModel', function(newValue)
{
scope.lxSelect.selectionToModel = function(data)
{
return scope.$parent.$eval(newValue, data);
};
});
}
}
LxSelectController.$inject = ['$interpolate', '$element', '$filter', '$sce', 'LxDropdownService', 'LxUtils'];
function LxSelectController($interpolate, $element, $filter, $sce, LxDropdownService, LxUtils)
{
var lxSelect = this;
var choiceTemplate;
var selectedTemplate;
lxSelect.displayChoice = displayChoice;
lxSelect.displaySelected = displaySelected;
lxSelect.displaySubheader = displaySubheader;
lxSelect.getFilteredChoices = getFilteredChoices;
lxSelect.getSelectedModel = getSelectedModel;
lxSelect.isSelected = isSelected;
lxSelect.keyEvent = keyEvent;
lxSelect.registerChoiceTemplate = registerChoiceTemplate;
lxSelect.registerSelectedTemplate = registerSelectedTemplate;
lxSelect.select = select;
lxSelect.toggleChoice = toggleChoice;
lxSelect.unselect = unselect;
lxSelect.updateFilter = updateFilter;
lxSelect.helperDisplayable = helperDisplayable;
lxSelect.activeChoiceIndex = -1;
lxSelect.activeSelectedIndex = -1;
lxSelect.uuid = LxUtils.generateUUID();
lxSelect.filterModel = undefined;
lxSelect.ngModel = angular.isUndefined(lxSelect.ngModel) && lxSelect.multiple ? [] : lxSelect.ngModel;
lxSelect.unconvertedModel = lxSelect.multiple ? [] : undefined;
lxSelect.viewMode = angular.isUndefined(lxSelect.viewMode) ? 'field' : 'chips';
////////////
function arrayObjectIndexOf(arr, obj)
{
for (var i = 0; i < arr.length; i++)
{
if (angular.equals(arr[i], obj))
{
return i;
}
}
return -1;
}
function displayChoice(_choice)
{
var choiceScope = {
$choice: _choice
};
return $sce.trustAsHtml($interpolate(choiceTemplate)(choiceScope));
}
function displaySelected(_selected)
{
var selectedScope = {};
if (!angular.isArray(lxSelect.choices))
{
var found = false;
for (var header in lxSelect.choices)
{
if (found)
{
break;
}
if (lxSelect.choices.hasOwnProperty(header))
{
for (var idx = 0, len = lxSelect.choices[header].length; idx < len; idx++)
{
if (angular.equals(_selected, lxSelect.choices[header][idx]))
{
selectedScope.$selectedSubheader = header;
found = true;
break;
}
}
}
}
}
if (angular.isDefined(_selected))
{
selectedScope.$selected = _selected;
}
else
{
selectedScope.$selected = getSelectedModel();
}
return $sce.trustAsHtml($interpolate(selectedTemplate)(selectedScope));
}
function displaySubheader(_subheader)
{
return $sce.trustAsHtml(_subheader);
}
function getFilteredChoices()
{
return $filter('filterChoices')(lxSelect.choices, lxSelect.filter, lxSelect.filterModel);
}
function getSelectedModel()
{
if (angular.isDefined(lxSelect.modelToSelection) || angular.isDefined(lxSelect.selectionToModel))
{
return lxSelect.unconvertedModel;
}
else
{
return lxSelect.ngModel;
}
}
function isSelected(_choice)
{
if (lxSelect.multiple && angular.isDefined(getSelectedModel()))
{
return arrayObjectIndexOf(getSelectedModel(), _choice) !== -1;
}
else if (angular.isDefined(getSelectedModel()))
{
return angular.equals(getSelectedModel(), _choice);
}
}
function keyEvent(_event)
{
if (_event.keyCode !== 8)
{
lxSelect.activeSelectedIndex = -1;
}
if (!LxDropdownService.isOpen('dropdown-' + lxSelect.uuid))
{
lxSelect.activeChoiceIndex = -1;
}
switch (_event.keyCode) {
case 8:
keyRemove();
break;
case 13:
keySelect();
_event.preventDefault();
break;
case 38:
keyUp();
_event.preventDefault();
break;
case 40:
keyDown();
_event.preventDefault();
break;
}
}
function keyDown()
{
var filteredChoices = $filter('filterChoices')(lxSelect.choices, lxSelect.filter, lxSelect.filterModel);
if (filteredChoices.length)
{
lxSelect.activeChoiceIndex += 1;
if (lxSelect.activeChoiceIndex >= filteredChoices.length)
{
lxSelect.activeChoiceIndex = 0;
}
}
if (lxSelect.autocomplete)
{
LxDropdownService.open('dropdown-' + lxSelect.uuid, '#lx-select-selected-wrapper-' + lxSelect.uuid);
}
}
function keyRemove()
{
if (lxSelect.filterModel || !lxSelect.getSelectedModel().length)
{
return;
}
if (lxSelect.activeSelectedIndex === -1)
{
lxSelect.activeSelectedIndex = lxSelect.getSelectedModel().length - 1;
}
else
{
unselect(lxSelect.getSelectedModel()[lxSelect.activeSelectedIndex]);
}
}
function keySelect()
{
var filteredChoices = $filter('filterChoices')(lxSelect.choices, lxSelect.filter, lxSelect.filterModel);
if (filteredChoices.length && filteredChoices[lxSelect.activeChoiceIndex])
{
toggleChoice(filteredChoices[lxSelect.activeChoiceIndex]);
}
else if (lxSelect.filterModel && lxSelect.allowNewValue)
{
if (angular.isArray(getSelectedModel()))
{
var value = angular.isFunction(lxSelect.newValueTransform) ? lxSelect.newValueTransform(lxSelect.filterModel) : lxSelect.filterModel;
var identical = getSelectedModel().some(function (item) {
return angular.equals(item, value);
});
if (!identical)
{
getSelectedModel().push(value);
}
}
lxSelect.filterModel = undefined;
LxDropdownService.close('dropdown-' + lxSelect.uuid);
}
}
function keyUp()
{
var filteredChoices = $filter('filterChoices')(lxSelect.choices, lxSelect.filter, lxSelect.filterModel);
if (filteredChoices.length)
{
lxSelect.activeChoiceIndex -= 1;
if (lxSelect.activeChoiceIndex < 0)
{
lxSelect.activeChoiceIndex = filteredChoices.length - 1;
}
}
if (lxSelect.autocomplete)
{
LxDropdownService.open('dropdown-' + lxSelect.uuid, '#lx-select-selected-wrapper-' + lxSelect.uuid);
}
}
function registerChoiceTemplate(_choiceTemplate)
{
choiceTemplate = _choiceTemplate;
}
function registerSelectedTemplate(_selectedTemplate)
{
selectedTemplate = _selectedTemplate;
}
function select(_choice)
{
if (lxSelect.multiple && angular.isUndefined(lxSelect.ngModel))
{
lxSelect.ngModel = [];
}
if (angular.isDefined(lxSelect.selectionToModel))
{
lxSelect.selectionToModel(
{
data: _choice,
callback: function(resp)
{
if (lxSelect.multiple)
{
lxSelect.ngModel.push(resp);
}
else
{
lxSelect.ngModel = resp;
}
if (lxSelect.autocomplete)
{
$element.find('.lx-select-selected__filter').focus();
}
}
});
}
else
{
if (lxSelect.multiple)
{
lxSelect.ngModel.push(_choice);
}
else
{
lxSelect.ngModel = _choice;
}
if (lxSelect.autocomplete)
{
$element.find('.lx-select-selected__filter').focus();
}
}
}
function toggleChoice(_choice, _event)
{
if (lxSelect.multiple && !lxSelect.autocomplete)
{
_event.stopPropagation();
}
if (lxSelect.multiple && isSelected(_choice))
{
unselect(_choice);
}
else
{
select(_choice);
}
if (lxSelect.autocomplete)
{
lxSelect.activeChoiceIndex = -1;
lxSelect.filterModel = undefined;
LxDropdownService.close('dropdown-' + lxSelect.uuid);
}
}
function unselect(_choice)
{
if (angular.isDefined(lxSelect.selectionToModel))
{
lxSelect.selectionToModel(
{
data: _choice,
callback: function(resp)
{
removeElement(lxSelect.ngModel, resp);
if (lxSelect.autocomplete)
{
$element.find('.lx-select-selected__filter').focus();
lxSelect.activeSelectedIndex = -1;
}
}
});
removeElement(lxSelect.unconvertedModel, _choice);
}
else
{
removeElement(lxSelect.ngModel, _choice);
if (lxSelect.autocomplete)
{
$element.find('.lx-select-selected__filter').focus();
lxSelect.activeSelectedIndex = -1;
}
}
}
function updateFilter()
{
if (angular.isDefined(lxSelect.filter))
{
lxSelect.filter(
{
newValue: lxSelect.filterModel
});
}
if (lxSelect.autocomplete)
{
lxSelect.activeChoiceIndex = -1;
if (lxSelect.filterModel)
{
LxDropdownService.open('dropdown-' + lxSelect.uuid, '#lx-select-selected-wrapper-' + lxSelect.uuid);
}
else
{
LxDropdownService.close('dropdown-' + lxSelect.uuid);
}
}
}
function helperDisplayable() {
// If helper message is not defined, message is not displayed...
if (angular.isUndefined(lxSelect.helperMessage))
{
return false;
}
// If helper is defined return it's state.
if (angular.isDefined(lxSelect.helper))
{
return lxSelect.helper;
}
// Else check if there's choices.
var choices = lxSelect.getFilteredChoices();
if (angular.isArray(choices))
{
return !choices.length;
}
else if (angular.isObject(choices))
{
return !Object.keys(choices).length;
}
return true;
}
function removeElement(model, element)
{
var index = -1;
for (var i = 0, len = model.length; i < len; i++)
{
if (angular.equals(element, model[i]))
{
index = i;
break;
}
}
if (index > -1)
{
model.splice(index, 1);
}
}
}
function lxSelectSelected()
{
return {
restrict: 'E',
require: ['lxSelectSelected', '^lxSelect'],
templateUrl: 'select-selected.html',
link: link,
controller: LxSelectSelectedController,
controllerAs: 'lxSelectSelected',
bindToController: true,
transclude: true
};
function link(scope, element, attrs, ctrls, transclude)
{
ctrls[0].setParentController(ctrls[1]);
transclude(scope, function(clone)
{
var template = '';
for (var i = 0; i < clone.length; i++)
{
template += clone[i].data || clone[i].outerHTML || '';
}
ctrls[1].registerSelectedTemplate(template);
});
}
}
function LxSelectSelectedController()
{
var lxSelectSelected = this;
lxSelectSelected.clearModel = clearModel;
lxSelectSelected.setParentController = setParentController;
lxSelectSelected.removeSelected = removeSelected;
////////////
function clearModel(_event)
{
_event.stopPropagation();
lxSelectSelected.parentCtrl.ngModel = undefined;
lxSelectSelected.parentCtrl.unconvertedModel = undefined;
}
function setParentController(_parentCtrl)
{
lxSelectSelected.parentCtrl = _parentCtrl;
}
function removeSelected(_selected, _event)
{
_event.stopPropagation();
lxSelectSelected.parentCtrl.unselect(_selected);
}
}
function lxSelectChoices()
{
return {
restrict: 'E',
require: ['lxSelectChoices', '^lxSelect'],
templateUrl: 'select-choices.html',
link: link,
controller: LxSelectChoicesController,
controllerAs: 'lxSelectChoices',
bindToController: true,
transclude: true
};
function link(scope, element, attrs, ctrls, transclude)
{
ctrls[0].setParentController(ctrls[1]);
transclude(scope, function(clone)
{
var template = '';
for (var i = 0; i < clone.length; i++)
{
template += clone[i].data || clone[i].outerHTML || '';
}
ctrls[1].registerChoiceTemplate(template);
});
}
}
LxSelectChoicesController.$inject = ['$scope', '$timeout'];
function LxSelectChoicesController($scope, $timeout)
{
var lxSelectChoices = this;
var timer;
lxSelectChoices.isArray = isArray;
lxSelectChoices.setParentController = setParentController;
$scope.$on('$destroy', function()
{
$timeout.cancel(timer);
});
////////////
function isArray()
{
return angular.isArray(lxSelectChoices.parentCtrl.choices);
}
function setParentController(_parentCtrl)
{
lxSelectChoices.parentCtrl = _parentCtrl;
$scope.$watch(function()
{
return lxSelectChoices.parentCtrl.ngModel;
}, function(newModel, oldModel)
{
timer = $timeout(function()
{
if (newModel !== oldModel && angular.isDefined(lxSelectChoices.parentCtrl.ngChange))
{
lxSelectChoices.parentCtrl.ngChange(
{
newValue: newModel,
oldValue: oldModel
});
}
if (angular.isDefined(lxSelectChoices.parentCtrl.modelToSelection) || angular.isDefined(lxSelectChoices.parentCtrl.selectionToModel))
{
toSelection();
}
});
}, true);
}
function toSelection()
{
if (lxSelectChoices.parentCtrl.multiple)
{
lxSelectChoices.parentCtrl.unconvertedModel = [];
angular.forEach(lxSelectChoices.parentCtrl.ngModel, function(item)
{
lxSelectChoices.parentCtrl.modelToSelection(
{
data: item,
callback: function(resp)
{
lxSelectChoices.parentCtrl.unconvertedModel.push(resp);
}
});
});
}
else
{
lxSelectChoices.parentCtrl.modelToSelection(
{
data: lxSelectChoices.parentCtrl.ngModel,
callback: function(resp)
{
lxSelectChoices.parentCtrl.unconvertedModel = resp;
}
});
}
}
}
})();
(function()
{
'use strict';
angular
.module('lumx.stepper')
.directive('lxStepper', lxStepper)
.directive('lxStep', lxStep)
.directive('lxStepNav', lxStepNav);
/* Stepper */
function lxStepper()
{
return {
restrict: 'E',
templateUrl: 'stepper.html',
scope: {
cancel: '&?lxCancel',
complete: '&lxComplete',
isLinear: '=?lxIsLinear',
labels: '=?lxLabels',
layout: '@?lxLayout'
},
controller: LxStepperController,
controllerAs: 'lxStepper',
bindToController: true,
transclude: true
};
}
function LxStepperController()
{
var lxStepper = this;
var _classes = [];
var _defaultValues = {
isLinear: true,
labels: {
'back': 'Back',
'cancel': 'Cancel',
'continue': 'Continue',
'optional': 'Optional'
},
layout: 'horizontal'
};
lxStepper.addStep = addStep;
lxStepper.getClasses = getClasses;
lxStepper.goToStep = goToStep;
lxStepper.isComplete = isComplete;
lxStepper.updateStep = updateStep;
lxStepper.activeIndex = 0;
lxStepper.isLinear = angular.isDefined(lxStepper.isLinear) ? lxStepper.isLinear : _defaultValues.isLinear;
lxStepper.labels = angular.isDefined(lxStepper.labels) ? lxStepper.labels : _defaultValues.labels;
lxStepper.layout = angular.isDefined(lxStepper.layout) ? lxStepper.layout : _defaultValues.layout;
lxStepper.steps = [];
////////////
function addStep(step)
{
lxStepper.steps.push(step);
}
function getClasses()
{
_classes.length = 0;
_classes.push('lx-stepper--layout-' + lxStepper.layout);
if (lxStepper.isLinear)
{
_classes.push('lx-stepper--is-linear');
}
if (lxStepper.steps[lxStepper.activeIndex].feedback)
{
_classes.push('lx-stepper--step-has-feedback');
}
if (lxStepper.steps[lxStepper.activeIndex].isLoading)
{
_classes.push('lx-stepper--step-is-loading');
}
return _classes;
}
function goToStep(index, bypass)
{
// Check if the the wanted step previous steps are optionals. If so, check if the step before the last optional step is valid to allow going to the wanted step from the nav (only if linear stepper).
var stepBeforeLastOptionalStep;
if (!bypass && lxStepper.isLinear)
{
for (var i = index - 1; i >= 0; i--)
{
if (angular.isDefined(lxStepper.steps[i]) && !lxStepper.steps[i].isOptional)
{
stepBeforeLastOptionalStep = lxStepper.steps[i];
break;
}
}
if (angular.isDefined(stepBeforeLastOptionalStep) && stepBeforeLastOptionalStep.isValid === true)
{
bypass = true;
}
}
// Check if the wanted step previous step is not valid to disallow going to the wanted step from the nav (only if linear stepper).
if (!bypass && lxStepper.isLinear && angular.isDefined(lxStepper.steps[index - 1]) && (angular.isUndefined(lxStepper.steps[index - 1].isValid) || lxStepper.steps[index - 1].isValid === false))
{
return;
}
if (index < lxStepper.steps.length)
{
lxStepper.activeIndex = parseInt(index);
}
}
function isComplete()
{
var countMandatory = 0;
var countValid = 0;
for (var i = 0, len = lxStepper.steps.length; i < len; i++)
{
if (!lxStepper.steps[i].isOptional)
{
countMandatory++;
if (lxStepper.steps[i].isValid === true) {
countValid++;
}
}
}
if (countValid === countMandatory)
{
lxStepper.complete();
return true;
}
}
function updateStep(step)
{
for (var i = 0, len = lxStepper.steps.length; i < len; i++)
{
if (lxStepper.steps[i].uuid === step.uuid)
{
lxStepper.steps[i].index = step.index;
lxStepper.steps[i].label = step.label;
return;
}
}
}
}
/* Step */
function lxStep()
{
return {
restrict: 'E',
require: ['lxStep', '^lxStepper'],
templateUrl: 'step.html',
scope: {
feedback: '@?lxFeedback',
isEditable: '=?lxIsEditable',
isOptional: '=?lxIsOptional',
label: '@lxLabel',
submit: '&?lxSubmit',
validate: '&?lxValidate'
},
link: link,
controller: LxStepController,
controllerAs: 'lxStep',
bindToController: true,
replace: true,
transclude: true
};
function link(scope, element, attrs, ctrls)
{
ctrls[0].init(ctrls[1], element.index());
attrs.$observe('lxFeedback', function(feedback)
{
ctrls[0].setFeedback(feedback);
});
attrs.$observe('lxLabel', function(label)
{
ctrls[0].setLabel(label);
});
attrs.$observe('lxIsEditable', function(isEditable)
{
ctrls[0].setIsEditable(isEditable);
});
attrs.$observe('lxIsOptional', function(isOptional)
{
ctrls[0].setIsOptional(isOptional);
});
}
}
LxStepController.$inject = ['$q', 'LxNotificationService', 'LxUtils'];
function LxStepController($q, LxNotificationService, LxUtils)
{
var lxStep = this;
var _classes = [];
var _nextStepIndex;
lxStep.getClasses = getClasses;
lxStep.init = init;
lxStep.previousStep = previousStep;
lxStep.setFeedback = setFeedback;
lxStep.setLabel = setLabel;
lxStep.setIsEditable = setIsEditable;
lxStep.setIsOptional = setIsOptional;
lxStep.submitStep = submitStep;
lxStep.step = {
errorMessage: undefined,
feedback: undefined,
index: undefined,
isEditable: false,
isLoading: false,
isOptional: false,
isValid: undefined,
label: undefined,
uuid: LxUtils.generateUUID()
};
////////////
function getClasses()
{
_classes.length = 0;
if (lxStep.step.index === lxStep.parent.activeIndex)
{
_classes.push('lx-step--is-active');
}
return _classes;
}
function init(parent, index)
{
lxStep.parent = parent;
lxStep.step.index = index;
lxStep.parent.addStep(lxStep.step);
}
function previousStep()
{
if (lxStep.step.index > 0)
{
lxStep.parent.goToStep(lxStep.step.index - 1);
}
}
function setFeedback(feedback)
{
lxStep.step.feedback = feedback;
updateParentStep();
}
function setLabel(label)
{
lxStep.step.label = label;
updateParentStep();
}
function setIsEditable(isEditable)
{
lxStep.step.isEditable = isEditable;
updateParentStep();
}
function setIsOptional(isOptional)
{
lxStep.step.isOptional = isOptional;
updateParentStep();
}
function submitStep()
{
if (lxStep.step.isValid === true && !lxStep.step.isEditable)
{
lxStep.parent.goToStep(_nextStepIndex, true);
return;
}
var validateFunction = lxStep.validate;
var validity = true;
if (angular.isFunction(validateFunction))
{
validity = validateFunction();
}
if (validity === true)
{
lxStep.step.isLoading = true;
updateParentStep();
var submitFunction = lxStep.submit;
if (!angular.isFunction(submitFunction))
{
submitFunction = function()
{
return $q(function(resolve)
{
resolve();
});
};
}
var promise = submitFunction();
promise.then(function(nextStepIndex)
{
lxStep.step.isValid = true;
updateParentStep();
var isComplete = lxStep.parent.isComplete();
if (!isComplete)
{
_nextStepIndex = angular.isDefined(nextStepIndex) && nextStepIndex > lxStep.parent.activeIndex && (!lxStep.parent.isLinear || (lxStep.parent.isLinear && lxStep.parent.steps[nextStepIndex - 1].isOptional)) ? nextStepIndex : lxStep.step.index + 1;
lxStep.parent.goToStep(_nextStepIndex, true);
}
}).catch(function(error)
{
LxNotificationService.error(error);
}).finally(function()
{
lxStep.step.isLoading = false;
updateParentStep();
});
}
else
{
lxStep.step.isValid = false;
lxStep.step.errorMessage = validity;
updateParentStep();
}
}
function updateParentStep()
{
lxStep.parent.updateStep(lxStep.step);
}
}
/* Step nav */
function lxStepNav()
{
return {
restrict: 'E',
require: ['lxStepNav', '^lxStepper'],
templateUrl: 'step-nav.html',
scope: {
activeIndex: '@lxActiveIndex',
step: '=lxStep'
},
link: link,
controller: LxStepNavController,
controllerAs: 'lxStepNav',
bindToController: true,
replace: true,
transclude: false
};
function link(scope, element, attrs, ctrls)
{
ctrls[0].init(ctrls[1]);
}
}
function LxStepNavController()
{
var lxStepNav = this;
var _classes = [];
lxStepNav.getClasses = getClasses;
lxStepNav.init = init;
////////////
function getClasses()
{
_classes.length = 0;
if (parseInt(lxStepNav.step.index) === parseInt(lxStepNav.activeIndex))
{
_classes.push('lx-step-nav--is-active');
}
if (lxStepNav.step.isValid === true)
{
_classes.push('lx-step-nav--is-valid');
}
else if (lxStepNav.step.isValid === false)
{
_classes.push('lx-step-nav--has-error');
}
if (lxStepNav.step.isEditable)
{
_classes.push('lx-step-nav--is-editable');
}
if (lxStepNav.step.isOptional)
{
_classes.push('lx-step-nav--is-optional');
}
return _classes;
}
function init(parent, index)
{
lxStepNav.parent = parent;
}
}
})();
(function()
{
'use strict';
angular
.module('lumx.switch')
.directive('lxSwitch', lxSwitch)
.directive('lxSwitchLabel', lxSwitchLabel)
.directive('lxSwitchHelp', lxSwitchHelp);
function lxSwitch()
{
return {
restrict: 'E',
templateUrl: 'switch.html',
scope:
{
ngModel: '=',
name: '@?',
ngTrueValue: '@?',
ngFalseValue: '@?',
ngChange: '&?',
ngDisabled: '=?',
lxColor: '@?',
lxPosition: '@?'
},
controller: LxSwitchController,
controllerAs: 'lxSwitch',
bindToController: true,
transclude: true,
replace: true
};
}
LxSwitchController.$inject = ['$scope', '$timeout', 'LxUtils'];
function LxSwitchController($scope, $timeout, LxUtils)
{
var lxSwitch = this;
var switchId;
var switchHasChildren;
var timer;
lxSwitch.getSwitchId = getSwitchId;
lxSwitch.getSwitchHasChildren = getSwitchHasChildren;
lxSwitch.setSwitchId = setSwitchId;
lxSwitch.setSwitchHasChildren = setSwitchHasChildren;
lxSwitch.triggerNgChange = triggerNgChange;
$scope.$on('$destroy', function()
{
$timeout.cancel(timer);
});
init();
////////////
function getSwitchId()
{
return switchId;
}
function getSwitchHasChildren()
{
return switchHasChildren;
}
function init()
{
setSwitchId(LxUtils.generateUUID());
setSwitchHasChildren(false);
lxSwitch.ngTrueValue = angular.isUndefined(lxSwitch.ngTrueValue) ? true : lxSwitch.ngTrueValue;
lxSwitch.ngFalseValue = angular.isUndefined(lxSwitch.ngFalseValue) ? false : lxSwitch.ngFalseValue;
lxSwitch.lxColor = angular.isUndefined(lxSwitch.lxColor) ? 'accent' : lxSwitch.lxColor;
lxSwitch.lxPosition = angular.isUndefined(lxSwitch.lxPosition) ? 'left' : lxSwitch.lxPosition;
}
function setSwitchId(_switchId)
{
switchId = _switchId;
}
function setSwitchHasChildren(_switchHasChildren)
{
switchHasChildren = _switchHasChildren;
}
function triggerNgChange()
{
timer = $timeout(lxSwitch.ngChange);
}
}
function lxSwitchLabel()
{
return {
restrict: 'AE',
require: ['^lxSwitch', '^lxSwitchLabel'],
templateUrl: 'switch-label.html',
link: link,
controller: LxSwitchLabelController,
controllerAs: 'lxSwitchLabel',
bindToController: true,
transclude: true,
replace: true
};
function link(scope, element, attrs, ctrls)
{
ctrls[0].setSwitchHasChildren(true);
ctrls[1].setSwitchId(ctrls[0].getSwitchId());
}
}
function LxSwitchLabelController()
{
var lxSwitchLabel = this;
var switchId;
lxSwitchLabel.getSwitchId = getSwitchId;
lxSwitchLabel.setSwitchId = setSwitchId;
////////////
function getSwitchId()
{
return switchId;
}
function setSwitchId(_switchId)
{
switchId = _switchId;
}
}
function lxSwitchHelp()
{
return {
restrict: 'AE',
require: '^lxSwitch',
templateUrl: 'switch-help.html',
transclude: true,
replace: true
};
}
})();
(function()
{
'use strict';
angular
.module('lumx.tabs')
.directive('lxTabs', lxTabs)
.directive('lxTab', lxTab)
.directive('lxTabsPanes', lxTabsPanes)
.directive('lxTabPane', lxTabPane);
function lxTabs()
{
return {
restrict: 'E',
templateUrl: 'tabs.html',
scope:
{
layout: '@?lxLayout',
theme: '@?lxTheme',
color: '@?lxColor',
indicator: '@?lxIndicator',
activeTab: '=?lxActiveTab',
panesId: '@?lxPanesId',
links: '=?lxLinks'
},
controller: LxTabsController,
controllerAs: 'lxTabs',
bindToController: true,
replace: true,
transclude: true
};
}
LxTabsController.$inject = ['LxUtils', '$element', '$scope', '$timeout'];
function LxTabsController(LxUtils, $element, $scope, $timeout)
{
var lxTabs = this;
var tabsLength;
var timer1;
var timer2;
var timer3;
var timer4;
lxTabs.removeTab = removeTab;
lxTabs.setActiveTab = setActiveTab;
lxTabs.setViewMode = setViewMode;
lxTabs.tabIsActive = tabIsActive;
lxTabs.updateTabs = updateTabs;
lxTabs.activeTab = angular.isDefined(lxTabs.activeTab) ? lxTabs.activeTab : 0;
lxTabs.color = angular.isDefined(lxTabs.color) ? lxTabs.color : 'primary';
lxTabs.indicator = angular.isDefined(lxTabs.indicator) ? lxTabs.indicator : 'accent';
lxTabs.layout = angular.isDefined(lxTabs.layout) ? lxTabs.layout : 'full';
lxTabs.tabs = [];
lxTabs.theme = angular.isDefined(lxTabs.theme) ? lxTabs.theme : 'light';
lxTabs.viewMode = angular.isDefined(lxTabs.links) ? 'separate' : 'gather';
$scope.$watch(function()
{
return lxTabs.activeTab;
}, function(_newActiveTab, _oldActiveTab)
{
timer1 = $timeout(function()
{
setIndicatorPosition(_oldActiveTab);
if (lxTabs.viewMode === 'separate')
{
angular.element('#' + lxTabs.panesId).find('.tabs__pane').hide();
angular.element('#' + lxTabs.panesId).find('.tabs__pane').eq(lxTabs.activeTab).show();
}
});
});
$scope.$watch(function()
{
return lxTabs.links;
}, function(_newLinks)
{
lxTabs.viewMode = angular.isDefined(_newLinks) ? 'separate' : 'gather';
angular.forEach(_newLinks, function(link, index)
{
var tab = {
uuid: (angular.isUndefined(link.uuid) || link.uuid.length === 0) ? LxUtils.generateUUID() : link.uuid,
index: index,
label: link.label,
icon: link.icon,
disabled: link.disabled
};
updateTabs(tab);
});
});
timer2 = $timeout(function()
{
tabsLength = lxTabs.tabs.length;
});
$scope.$on('$destroy', function()
{
$timeout.cancel(timer1);
$timeout.cancel(timer2);
$timeout.cancel(timer3);
$timeout.cancel(timer4);
});
////////////
function removeTab(_tab)
{
lxTabs.tabs.splice(_tab.index, 1);
angular.forEach(lxTabs.tabs, function(tab, index)
{
tab.index = index;
});
if (lxTabs.activeTab === 0)
{
timer3 = $timeout(function()
{
setIndicatorPosition();
});
}
else
{
setActiveTab(lxTabs.tabs[0]);
}
}
function setActiveTab(_tab)
{
if (!_tab.disabled)
{
lxTabs.activeTab = _tab.index;
}
}
function setIndicatorPosition(_previousActiveTab)
{
var direction = lxTabs.activeTab > _previousActiveTab ? 'right' : 'left';
var indicator = $element.find('.tabs__indicator');
var activeTab = $element.find('.tabs__link').eq(lxTabs.activeTab);
var indicatorLeft = activeTab.position().left;
var indicatorRight = $element.outerWidth() - (indicatorLeft + activeTab.outerWidth());
if (angular.isUndefined(_previousActiveTab))
{
indicator.css(
{
left: indicatorLeft,
right: indicatorRight
});
}
else
{
var animationProperties = {
duration: 200,
easing: 'easeOutQuint'
};
if (direction === 'left')
{
indicator.velocity(
{
left: indicatorLeft
}, animationProperties);
indicator.velocity(
{
right: indicatorRight
}, animationProperties);
}
else
{
indicator.velocity(
{
right: indicatorRight
}, animationProperties);
indicator.velocity(
{
left: indicatorLeft
}, animationProperties);
}
}
}
function setViewMode(_viewMode)
{
lxTabs.viewMode = _viewMode;
}
function tabIsActive(_index)
{
return lxTabs.activeTab === _index;
}
function updateTabs(_tab)
{
var newTab = true;
angular.forEach(lxTabs.tabs, function(tab)
{
if (tab.index === _tab.index)
{
newTab = false;
tab.uuid = _tab.uuid;
tab.icon = _tab.icon;
tab.label = _tab.label;
}
});
if (newTab)
{
lxTabs.tabs.push(_tab);
if (angular.isDefined(tabsLength))
{
timer4 = $timeout(function()
{
setIndicatorPosition();
});
}
}
}
}
function lxTab()
{
return {
restrict: 'E',
require: ['lxTab', '^lxTabs'],
templateUrl: 'tab.html',
scope:
{
ngDisabled: '=?'
},
link: link,
controller: LxTabController,
controllerAs: 'lxTab',
bindToController: true,
replace: true,
transclude: true
};
function link(scope, element, attrs, ctrls)
{
ctrls[0].init(ctrls[1], element.index());
attrs.$observe('lxLabel', function(_newLabel)
{
ctrls[0].setLabel(_newLabel);
});
attrs.$observe('lxIcon', function(_newIcon)
{
ctrls[0].setIcon(_newIcon);
});
}
}
LxTabController.$inject = ['$scope', 'LxUtils'];
function LxTabController($scope, LxUtils)
{
var lxTab = this;
var parentCtrl;
var tab = {
uuid: LxUtils.generateUUID(),
index: undefined,
label: undefined,
icon: undefined,
disabled: false
};
lxTab.init = init;
lxTab.setIcon = setIcon;
lxTab.setLabel = setLabel;
lxTab.tabIsActive = tabIsActive;
$scope.$watch(function()
{
return lxTab.ngDisabled;
}, function(_isDisabled)
{
if (_isDisabled)
{
tab.disabled = true;
}
else
{
tab.disabled = false;
}
parentCtrl.updateTabs(tab);
});
$scope.$on('$destroy', function()
{
parentCtrl.removeTab(tab);
});
////////////
function init(_parentCtrl, _index)
{
parentCtrl = _parentCtrl;
tab.index = _index;
parentCtrl.updateTabs(tab);
}
function setIcon(_icon)
{
tab.icon = _icon;
parentCtrl.updateTabs(tab);
}
function setLabel(_label)
{
tab.label = _label;
parentCtrl.updateTabs(tab);
}
function tabIsActive()
{
return parentCtrl.tabIsActive(tab.index);
}
}
function lxTabsPanes()
{
return {
restrict: 'E',
templateUrl: 'tabs-panes.html',
scope: true,
replace: true,
transclude: true
};
}
function lxTabPane()
{
return {
restrict: 'E',
templateUrl: 'tab-pane.html',
scope: true,
replace: true,
transclude: true
};
}
})();
(function()
{
'use strict';
angular
.module('lumx.text-field')
.directive('lxTextField', lxTextField);
lxTextField.$inject = ['$timeout'];
function lxTextField($timeout)
{
return {
restrict: 'E',
templateUrl: 'text-field.html',
scope:
{
allowClear: '=?lxAllowClear',
error: '=?lxError',
fixedLabel: '=?lxFixedLabel',
focus: '=?lxFocus',
icon: '@?lxIcon',
label: '@lxLabel',
ngDisabled: '=?',
theme: '@?lxTheme',
valid: '=?lxValid'
},
link: link,
controller: LxTextFieldController,
controllerAs: 'lxTextField',
bindToController: true,
replace: true,
transclude: true
};
function link(scope, element, attrs, ctrl, transclude)
{
var backwardOneWay = ['icon', 'label', 'theme'];
var backwardTwoWay = ['error', 'fixedLabel', 'valid'];
var input;
var timer;
angular.forEach(backwardOneWay, function(attribute)
{
if (angular.isDefined(attrs[attribute]))
{
attrs.$observe(attribute, function(newValue)
{
scope.lxTextField[attribute] = newValue;
});
}
});
angular.forEach(backwardTwoWay, function(attribute)
{
if (angular.isDefined(attrs[attribute]))
{
scope.$watch(function()
{
return scope.$parent.$eval(attrs[attribute]);
}, function(newValue)
{
scope.lxTextField[attribute] = newValue;
});
}
});
transclude(function()
{
input = element.find('textarea');
if (input[0])
{
input.on('cut paste drop keydown', function()
{
timer = $timeout(ctrl.updateTextareaHeight);
});
}
else
{
input = element.find('input');
}
input.addClass('text-field__input');
ctrl.setInput(input);
ctrl.setModel(input.data('$ngModelController'));
input.on('focus', function()
{
var phase = scope.$root.$$phase;
if (phase === '$apply' || phase === '$digest')
{
ctrl.focusInput();
}
else
{
scope.$apply(ctrl.focusInput);
}
});
input.on('blur', ctrl.blurInput);
});
scope.$on('$destroy', function()
{<|fim▁hole|> input.off();
});
}
}
LxTextFieldController.$inject = ['$scope', '$timeout'];
function LxTextFieldController($scope, $timeout)
{
var lxTextField = this;
var input;
var modelController;
var timer1;
var timer2;
lxTextField.blurInput = blurInput;
lxTextField.clearInput = clearInput;
lxTextField.focusInput = focusInput;
lxTextField.hasValue = hasValue;
lxTextField.setInput = setInput;
lxTextField.setModel = setModel;
lxTextField.updateTextareaHeight = updateTextareaHeight;
$scope.$watch(function()
{
return modelController.$viewValue;
}, function(newValue, oldValue)
{
if (angular.isDefined(newValue) && newValue)
{
lxTextField.isActive = true;
}
else
{
lxTextField.isActive = false;
}
});
$scope.$watch(function()
{
return lxTextField.focus;
}, function(newValue, oldValue)
{
if (angular.isDefined(newValue) && newValue)
{
$timeout(function()
{
input.focus();
// Reset the value so we can re-focus the field later on if we want to.
lxTextField.focus = false;
});
}
});
$scope.$on('$destroy', function()
{
$timeout.cancel(timer1);
$timeout.cancel(timer2);
});
////////////
function blurInput()
{
if (!hasValue())
{
$scope.$apply(function()
{
lxTextField.isActive = false;
});
}
$scope.$apply(function()
{
lxTextField.isFocus = false;
});
}
function clearInput(_event)
{
_event.stopPropagation();
modelController.$setViewValue(undefined);
modelController.$render();
}
function focusInput()
{
lxTextField.isActive = true;
lxTextField.isFocus = true;
}
function hasValue()
{
return angular.isDefined(input.val()) && input.val().length > 0;
}
function init()
{
lxTextField.isActive = hasValue();
lxTextField.focus = angular.isDefined(lxTextField.focus) ? lxTextField.focus : false;
lxTextField.isFocus = lxTextField.focus;
}
function setInput(_input)
{
input = _input;
timer1 = $timeout(init);
if (input.selector === 'textarea')
{
timer2 = $timeout(updateTextareaHeight);
}
}
function setModel(_modelControler)
{
modelController = _modelControler;
}
function updateTextareaHeight()
{
var tmpTextArea = angular.element('<textarea class="text-field__input" style="width: ' + input.width() + 'px;">' + input.val() + '</textarea>');
tmpTextArea.appendTo('body');
input.css(
{
height: tmpTextArea[0].scrollHeight + 'px'
});
tmpTextArea.remove();
}
}
})();
(function()
{
'use strict';
angular
.module('lumx.tooltip')
.directive('lxTooltip', lxTooltip);
function lxTooltip()
{
return {
restrict: 'A',
scope:
{
tooltip: '@lxTooltip',
position: '@?lxTooltipPosition'
},
link: link,
controller: LxTooltipController,
controllerAs: 'lxTooltip',
bindToController: true
};
function link(scope, element, attrs, ctrl)
{
if (angular.isDefined(attrs.lxTooltip))
{
attrs.$observe('lxTooltip', function(newValue)
{
ctrl.updateTooltipText(newValue);
});
}
if (angular.isDefined(attrs.lxTooltipPosition))
{
attrs.$observe('lxTooltipPosition', function(newValue)
{
scope.lxTooltip.position = newValue;
});
}
element.on('mouseenter', ctrl.showTooltip);
element.on('mouseleave', ctrl.hideTooltip);
scope.$on('$destroy', function()
{
element.off();
});
}
}
LxTooltipController.$inject = ['$element', '$scope', '$timeout', 'LxDepthService'];
function LxTooltipController($element, $scope, $timeout, LxDepthService)
{
var lxTooltip = this;
var timer1;
var timer2;
var tooltip;
var tooltipBackground;
var tooltipLabel;
lxTooltip.hideTooltip = hideTooltip;
lxTooltip.showTooltip = showTooltip;
lxTooltip.updateTooltipText = updateTooltipText;
lxTooltip.position = angular.isDefined(lxTooltip.position) ? lxTooltip.position : 'top';
$scope.$on('$destroy', function()
{
if (angular.isDefined(tooltip))
{
tooltip.remove();
tooltip = undefined;
}
$timeout.cancel(timer1);
$timeout.cancel(timer2);
});
////////////
function hideTooltip()
{
if (angular.isDefined(tooltip))
{
tooltip.removeClass('tooltip--is-active');
timer1 = $timeout(function()
{
if (angular.isDefined(tooltip))
{
tooltip.remove();
tooltip = undefined;
}
}, 200);
}
}
function setTooltipPosition()
{
var width = $element.outerWidth(),
height = $element.outerHeight(),
top = $element.offset().top,
left = $element.offset().left;
tooltip
.append(tooltipBackground)
.append(tooltipLabel)
.appendTo('body');
if (lxTooltip.position === 'top')
{
tooltip.css(
{
left: left - (tooltip.outerWidth() / 2) + (width / 2),
top: top - tooltip.outerHeight()
});
}
else if (lxTooltip.position === 'bottom')
{
tooltip.css(
{
left: left - (tooltip.outerWidth() / 2) + (width / 2),
top: top + height
});
}
else if (lxTooltip.position === 'left')
{
tooltip.css(
{
left: left - tooltip.outerWidth(),
top: top + (height / 2) - (tooltip.outerHeight() / 2)
});
}
else if (lxTooltip.position === 'right')
{
tooltip.css(
{
left: left + width,
top: top + (height / 2) - (tooltip.outerHeight() / 2)
});
}
}
function showTooltip()
{
if (angular.isUndefined(tooltip))
{
LxDepthService.register();
tooltip = angular.element('<div/>',
{
class: 'tooltip tooltip--' + lxTooltip.position
});
tooltipBackground = angular.element('<div/>',
{
class: 'tooltip__background'
});
tooltipLabel = angular.element('<span/>',
{
class: 'tooltip__label',
text: lxTooltip.tooltip
});
setTooltipPosition();
tooltip
.append(tooltipBackground)
.append(tooltipLabel)
.css('z-index', LxDepthService.getDepth())
.appendTo('body');
timer2 = $timeout(function()
{
tooltip.addClass('tooltip--is-active');
});
}
}
function updateTooltipText(_newValue)
{
if (angular.isDefined(tooltipLabel))
{
tooltipLabel.text(_newValue);
}
}
}
})();
angular.module("lumx.dropdown").run(['$templateCache', function(a) { a.put('dropdown.html', '<div class="dropdown"\n' +
' ng-class="{ \'dropdown--has-toggle\': lxDropdown.hasToggle,\n' +
' \'dropdown--is-open\': lxDropdown.isOpen }"\n' +
' ng-transclude></div>\n' +
'');
a.put('dropdown-toggle.html', '<div class="dropdown-toggle" ng-transclude></div>\n' +
'');
a.put('dropdown-menu.html', '<div class="dropdown-menu">\n' +
' <div class="dropdown-menu__content" ng-transclude ng-if="lxDropdownMenu.parentCtrl.isOpen"></div>\n' +
'</div>\n' +
'');
}]);
angular.module("lumx.file-input").run(['$templateCache', function(a) { a.put('file-input.html', '<div class="input-file">\n' +
' <span class="input-file__label">{{ lxFileInput.label }}</span>\n' +
' <span class="input-file__filename">{{ lxFileInput.fileName }}</span>\n' +
' <input type="file" class="input-file__input">\n' +
'</div>\n' +
'');
}]);
angular.module("lumx.text-field").run(['$templateCache', function(a) { a.put('text-field.html', '<div class="text-field"\n' +
' ng-class="{ \'text-field--error\': lxTextField.error,\n' +
' \'text-field--fixed-label\': lxTextField.fixedLabel,\n' +
' \'text-field--has-icon\': lxTextField.icon,\n' +
' \'text-field--has-value\': lxTextField.hasValue(),\n' +
' \'text-field--is-active\': lxTextField.isActive,\n' +
' \'text-field--is-disabled\': lxTextField.ngDisabled,\n' +
' \'text-field--is-focus\': lxTextField.isFocus,\n' +
' \'text-field--theme-light\': !lxTextField.theme || lxTextField.theme === \'light\',\n' +
' \'text-field--theme-dark\': lxTextField.theme === \'dark\',\n' +
' \'text-field--valid\': lxTextField.valid }">\n' +
' <div class="text-field__icon" ng-if="lxTextField.icon">\n' +
' <i class="mdi mdi-{{ lxTextField.icon }}"></i>\n' +
' </div>\n' +
'\n' +
' <label class="text-field__label">\n' +
' {{ lxTextField.label }}\n' +
' </label>\n' +
'\n' +
' <div ng-transclude></div>\n' +
'\n' +
' <span class="text-field__clear" ng-click="lxTextField.clearInput($event)" ng-if="lxTextField.allowClear">\n' +
' <i class="mdi mdi-close-circle"></i>\n' +
' </span>\n' +
'</div>\n' +
'');
}]);
angular.module("lumx.search-filter").run(['$templateCache', function(a) { a.put('search-filter.html', '<div class="search-filter" ng-class="lxSearchFilter.getClass()">\n' +
' <div class="search-filter__container">\n' +
' <div class="search-filter__button">\n' +
' <lx-button type="submit" lx-size="l" lx-color="{{ lxSearchFilter.color }}" lx-type="icon" ng-click="lxSearchFilter.openInput()">\n' +
' <i class="mdi mdi-magnify"></i>\n' +
' </lx-button>\n' +
' </div>\n' +
'\n' +
' <div class="search-filter__input" ng-transclude></div>\n' +
'\n' +
' <div class="search-filter__clear">\n' +
' <lx-button type="button" lx-size="l" lx-color="{{ lxSearchFilter.color }}" lx-type="icon" ng-click="lxSearchFilter.clearInput()">\n' +
' <i class="mdi mdi-close"></i>\n' +
' </lx-button>\n' +
' </div>\n' +
' </div>\n' +
'\n' +
' <div class="search-filter__loader" ng-if="lxSearchFilter.isLoading">\n' +
' <lx-progress lx-type="linear"></lx-progress>\n' +
' </div>\n' +
'\n' +
' <lx-dropdown id="{{ lxSearchFilter.dropdownId }}" lx-effect="none" lx-width="100%" ng-if="lxSearchFilter.autocomplete">\n' +
' <lx-dropdown-menu class="search-filter__autocomplete-list">\n' +
' <ul>\n' +
' <li ng-repeat="item in lxSearchFilter.autocompleteList track by $index">\n' +
' <a class="search-filter__autocomplete-item"\n' +
' ng-class="{ \'search-filter__autocomplete-item--is-active\': lxSearchFilter.activeChoiceIndex === $index }"\n' +
' ng-click="lxSearchFilter.selectItem(item)"\n' +
' ng-bind-html="item | lxSearchHighlight:lxSearchFilter.modelController.$viewValue:lxSearchFilter.icon"></a>\n' +
' </li>\n' +
' </ul>\n' +
' </lx-dropdown-menu>\n' +
' </lx-dropdown>\n' +
'</div>');
}]);
angular.module("lumx.select").run(['$templateCache', function(a) { a.put('select.html', '<div class="lx-select"\n' +
' ng-class="{ \'lx-select--error\': lxSelect.error,\n' +
' \'lx-select--fixed-label\': lxSelect.fixedLabel && lxSelect.viewMode === \'field\',\n' +
' \'lx-select--is-active\': (!lxSelect.multiple && lxSelect.getSelectedModel()) || (lxSelect.multiple && lxSelect.getSelectedModel().length),\n' +
' \'lx-select--is-disabled\': lxSelect.ngDisabled,\n' +
' \'lx-select--is-multiple\': lxSelect.multiple,\n' +
' \'lx-select--is-unique\': !lxSelect.multiple,\n' +
' \'lx-select--theme-light\': !lxSelect.theme || lxSelect.theme === \'light\',\n' +
' \'lx-select--theme-dark\': lxSelect.theme === \'dark\',\n' +
' \'lx-select--valid\': lxSelect.valid,\n' +
' \'lx-select--custom-style\': lxSelect.customStyle,\n' +
' \'lx-select--default-style\': !lxSelect.customStyle,\n' +
' \'lx-select--view-mode-field\': !lxSelect.multiple || (lxSelect.multiple && lxSelect.viewMode === \'field\'),\n' +
' \'lx-select--view-mode-chips\': lxSelect.multiple && lxSelect.viewMode === \'chips\',\n' +
' \'lx-select--autocomplete\': lxSelect.autocomplete }">\n' +
' <span class="lx-select-label" ng-if="!lxSelect.autocomplete">\n' +
' {{ ::lxSelect.label }}\n' +
' </span>\n' +
'\n' +
' <lx-dropdown id="dropdown-{{ lxSelect.uuid }}" lx-width="100%" lx-effect="{{ lxSelect.autocomplete ? \'none\' : \'expand\' }}">\n' +
' <ng-transclude></ng-transclude>\n' +
' </lx-dropdown>\n' +
'</div>\n' +
'');
a.put('select-selected.html', '<div>\n' +
' <lx-dropdown-toggle ng-if="::!lxSelectSelected.parentCtrl.autocomplete">\n' +
' <ng-include src="\'select-selected-content.html\'"></ng-include>\n' +
' </lx-dropdown-toggle>\n' +
'\n' +
' <ng-include src="\'select-selected-content.html\'" ng-if="::lxSelectSelected.parentCtrl.autocomplete"></ng-include>\n' +
'</div>\n' +
'');
a.put('select-selected-content.html', '<div class="lx-select-selected-wrapper" id="lx-select-selected-wrapper-{{ lxSelectSelected.parentCtrl.uuid }}">\n' +
' <div class="lx-select-selected" ng-if="!lxSelectSelected.parentCtrl.multiple && lxSelectSelected.parentCtrl.getSelectedModel()">\n' +
' <span class="lx-select-selected__value"\n' +
' ng-bind-html="lxSelectSelected.parentCtrl.displaySelected()"></span>\n' +
'\n' +
' <a class="lx-select-selected__clear"\n' +
' ng-click="lxSelectSelected.clearModel($event)"\n' +
' ng-if="::lxSelectSelected.parentCtrl.allowClear">\n' +
' <i class="mdi mdi-close-circle"></i>\n' +
' </a>\n' +
' </div>\n' +
'\n' +
' <div class="lx-select-selected" ng-if="lxSelectSelected.parentCtrl.multiple">\n' +
' <span class="lx-select-selected__tag"\n' +
' ng-class="{ \'lx-select-selected__tag--is-active\': lxSelectSelected.parentCtrl.activeSelectedIndex === $index }"\n' +
' ng-click="lxSelectSelected.removeSelected(selected, $event)"\n' +
' ng-repeat="selected in lxSelectSelected.parentCtrl.getSelectedModel()"\n' +
' ng-bind-html="lxSelectSelected.parentCtrl.displaySelected(selected)"></span>\n' +
'\n' +
' <input type="text"\n' +
' placeholder="{{ ::lxSelectSelected.parentCtrl.label }}"\n' +
' class="lx-select-selected__filter"\n' +
' ng-model="lxSelectSelected.parentCtrl.filterModel"\n' +
' ng-change="lxSelectSelected.parentCtrl.updateFilter()"\n' +
' ng-keydown="lxSelectSelected.parentCtrl.keyEvent($event)"\n' +
' ng-if="::lxSelectSelected.parentCtrl.autocomplete && !lxSelectSelected.parentCtrl.ngDisabled">\n' +
' </div>\n' +
'</div>');
a.put('select-choices.html', '<lx-dropdown-menu class="lx-select-choices"\n' +
' ng-class="{ \'lx-select-choices--custom-style\': lxSelectChoices.parentCtrl.choicesCustomStyle,\n' +
' \'lx-select-choices--default-style\': !lxSelectChoices.parentCtrl.choicesCustomStyle,\n' +
' \'lx-select-choices--is-multiple\': lxSelectChoices.parentCtrl.multiple,\n' +
' \'lx-select-choices--is-unique\': !lxSelectChoices.parentCtrl.multiple, }">\n' +
' <ul>\n' +
' <li class="lx-select-choices__filter" ng-if="::lxSelectChoices.parentCtrl.displayFilter && !lxSelectChoices.parentCtrl.autocomplete">\n' +
' <lx-search-filter lx-dropdown-filter>\n' +
' <input type="text" ng-model="lxSelectChoices.parentCtrl.filterModel" ng-change="lxSelectChoices.parentCtrl.updateFilter()">\n' +
' </lx-search-filter>\n' +
' </li>\n' +
' \n' +
' <div ng-if="::lxSelectChoices.isArray()">\n' +
' <li class="lx-select-choices__choice"\n' +
' ng-class="{ \'lx-select-choices__choice--is-selected\': lxSelectChoices.parentCtrl.isSelected(choice),\n' +
' \'lx-select-choices__choice--is-focus\': lxSelectChoices.parentCtrl.activeChoiceIndex === $index }"\n' +
' ng-repeat="choice in lxSelectChoices.parentCtrl.choices | filterChoices:lxSelectChoices.parentCtrl.filter:lxSelectChoices.parentCtrl.filterModel"\n' +
' ng-bind-html="::lxSelectChoices.parentCtrl.displayChoice(choice)"\n' +
' ng-click="lxSelectChoices.parentCtrl.toggleChoice(choice, $event)"></li>\n' +
' </div>\n' +
'\n' +
' <div ng-if="::!lxSelectChoices.isArray()">\n' +
' <li class="lx-select-choices__subheader"\n' +
' ng-repeat-start="(subheader, children) in lxSelectChoices.parentCtrl.choices"\n' +
' ng-bind-html="::lxSelectChoices.parentCtrl.displaySubheader(subheader)"></li>\n' +
'\n' +
' <li class="lx-select-choices__choice"\n' +
' ng-class="{ \'lx-select-choices__choice--is-selected\': lxSelectChoices.parentCtrl.isSelected(choice),\n' +
' \'lx-select-choices__choice--is-focus\': lxSelectChoices.parentCtrl.activeChoiceIndex === $index }"\n' +
' ng-repeat-end\n' +
' ng-repeat="choice in children | filterChoices:lxSelectChoices.parentCtrl.filter:lxSelectChoices.parentCtrl.filterModel"\n' +
' ng-bind-html="::lxSelectChoices.parentCtrl.displayChoice(choice)"\n' +
' ng-click="lxSelectChoices.parentCtrl.toggleChoice(choice, $event)"></li>\n' +
' </div>\n' +
'\n' +
' <li class="lx-select-choices__subheader" ng-if="lxSelectChoices.parentCtrl.helperDisplayable()">\n' +
' {{ lxSelectChoices.parentCtrl.helperMessage }}\n' +
' </li>\n' +
'\n' +
' <li class="lx-select-choices__loader" ng-if="lxSelectChoices.parentCtrl.loading">\n' +
' <lx-progress lx-type="circular" lx-color="primary" lx-diameter="20"></lx-progress>\n' +
' </li>\n' +
' </ul>\n' +
'</lx-dropdown-menu>\n' +
'');
}]);
angular.module("lumx.tabs").run(['$templateCache', function(a) { a.put('tabs.html', '<div class="tabs tabs--layout-{{ lxTabs.layout }} tabs--theme-{{ lxTabs.theme }} tabs--color-{{ lxTabs.color }} tabs--indicator-{{ lxTabs.indicator }}">\n' +
' <div class="tabs__links">\n' +
' <a class="tabs__link"\n' +
' ng-class="{ \'tabs__link--is-active\': lxTabs.tabIsActive(tab.index),\n' +
' \'tabs__link--is-disabled\': tab.disabled }"\n' +
' ng-repeat="tab in lxTabs.tabs"\n' +
' ng-click="lxTabs.setActiveTab(tab)"\n' +
' lx-ripple>\n' +
' <i class="mdi mdi-{{ tab.icon }}" ng-if="tab.icon"></i>\n' +
' <span ng-if="tab.label">{{ tab.label }}</span>\n' +
' </a>\n' +
' </div>\n' +
' \n' +
' <div class="tabs__panes" ng-if="lxTabs.viewMode === \'gather\'" ng-transclude></div>\n' +
' <div class="tabs__indicator"></div>\n' +
'</div>\n' +
'');
a.put('tabs-panes.html', '<div class="tabs">\n' +
' <div class="tabs__panes" ng-transclude></div>\n' +
'</div>');
a.put('tab.html', '<div class="tabs__pane" ng-class="{ \'tabs__pane--is-disabled\': lxTab.ngDisabled }">\n' +
' <div ng-if="lxTab.tabIsActive()" ng-transclude></div>\n' +
'</div>\n' +
'');
a.put('tab-pane.html', '<div class="tabs__pane" ng-transclude></div>\n' +
'');
}]);
angular.module("lumx.date-picker").run(['$templateCache', function(a) { a.put('date-picker.html', '<div class="lx-date">\n' +
' <!-- Date picker input -->\n' +
' <div class="lx-date-input" ng-click="lxDatePicker.openDatePicker()" ng-if="lxDatePicker.hasInput">\n' +
' <ng-transclude></ng-transclude>\n' +
' </div>\n' +
' \n' +
' <!-- Date picker -->\n' +
' <div class="lx-date-picker lx-date-picker--{{ lxDatePicker.color }}">\n' +
' <div ng-if="lxDatePicker.isOpen">\n' +
' <!-- Date picker: header -->\n' +
' <div class="lx-date-picker__header">\n' +
' <a class="lx-date-picker__current-year"\n' +
' ng-class="{ \'lx-date-picker__current-year--is-active\': lxDatePicker.yearSelection }"\n' +
' ng-click="lxDatePicker.displayYearSelection()">\n' +
' {{ lxDatePicker.moment(lxDatePicker.ngModel).format(\'YYYY\') }}\n' +
' </a>\n' +
'\n' +
' <a class="lx-date-picker__current-date"\n' +
' ng-class="{ \'lx-date-picker__current-date--is-active\': !lxDatePicker.yearSelection }"\n' +
' ng-click="lxDatePicker.hideYearSelection()">\n' +
' {{ lxDatePicker.getDateFormatted() }}\n' +
' </a>\n' +
' </div>\n' +
' \n' +
' <!-- Date picker: content -->\n' +
' <div class="lx-date-picker__content">\n' +
' <!-- Calendar -->\n' +
' <div class="lx-date-picker__calendar" ng-if="!lxDatePicker.yearSelection">\n' +
' <div class="lx-date-picker__nav">\n' +
' <lx-button lx-size="l" lx-color="black" lx-type="icon" ng-click="lxDatePicker.previousMonth()">\n' +
' <i class="mdi mdi-chevron-left"></i>\n' +
' </lx-button>\n' +
'\n' +
' <span>{{ lxDatePicker.ngModelMoment.format(\'MMMM YYYY\') }}</span>\n' +
' \n' +
' <lx-button lx-size="l" lx-color="black" lx-type="icon" ng-click="lxDatePicker.nextMonth()">\n' +
' <i class="mdi mdi-chevron-right"></i>\n' +
' </lx-button>\n' +
' </div>\n' +
'\n' +
' <div class="lx-date-picker__days-of-week">\n' +
' <span ng-repeat="day in lxDatePicker.daysOfWeek">{{ day }}</span>\n' +
' </div>\n' +
'\n' +
' <div class="lx-date-picker__days">\n' +
' <span class="lx-date-picker__day lx-date-picker__day--is-empty"\n' +
' ng-repeat="x in lxDatePicker.emptyFirstDays"> </span>\n' +
'\n' +
' <div class="lx-date-picker__day"\n' +
' ng-class="{ \'lx-date-picker__day--is-selected\': day.selected,\n' +
' \'lx-date-picker__day--is-today\': day.today && !day.selected,\n' +
' \'lx-date-picker__day--is-disabled\': day.disabled }"\n' +
' ng-repeat="day in lxDatePicker.days">\n' +
' <a ng-click="lxDatePicker.select(day)">{{ day ? day.format(\'D\') : \'\' }}</a>\n' +
' </div>\n' +
'\n' +
' <span class="lx-date-picker__day lx-date-picker__day--is-empty"\n' +
' ng-repeat="x in lxDatePicker.emptyLastDays"> </span>\n' +
' </div>\n' +
' </div>\n' +
'\n' +
' <!-- Year selection -->\n' +
' <div class="lx-date-picker__year-selector" ng-if="lxDatePicker.yearSelection">\n' +
' <a class="lx-date-picker__year"\n' +
' ng-class="{ \'lx-date-picker__year--is-active\': year == lxDatePicker.moment(lxDatePicker.ngModel).format(\'YYYY\') }"\n' +
' ng-repeat="year in lxDatePicker.years"\n' +
' ng-click="lxDatePicker.selectYear(year)"\n' +
' ng-if="lxDatePicker.yearSelection">\n' +
' {{ year }}\n' +
' </a>\n' +
' </div>\n' +
' </div>\n' +
'\n' +
' <!-- Actions -->\n' +
' <div class="lx-date-picker__actions">\n' +
' <lx-button lx-color="{{ lxDatePicker.color }}" lx-type="flat" ng-click="lxDatePicker.closeDatePicker()">\n' +
' Ok\n' +
' </lx-button>\n' +
' </div>\n' +
' </div>\n' +
' </div>\n' +
'</div>');
}]);
angular.module("lumx.progress").run(['$templateCache', function(a) { a.put('progress.html', '<div class="progress-container progress-container--{{ lxProgress.lxType }} progress-container--{{ lxProgress.lxColor }}"\n' +
' ng-class="{ \'progress-container--determinate\': lxProgress.lxValue,\n' +
' \'progress-container--indeterminate\': !lxProgress.lxValue }">\n' +
' <div class="progress-circular"\n' +
' ng-if="lxProgress.lxType === \'circular\'"\n' +
' ng-style="lxProgress.getProgressDiameter()">\n' +
' <svg class="progress-circular__svg">\n' +
' <circle class="progress-circular__path" cx="50" cy="50" r="20" fill="none" stroke-width="4" stroke-miterlimit="10" ng-style="lxProgress.getCircularProgressValue()">\n' +
' </svg>\n' +
' </div>\n' +
'\n' +
' <div class="progress-linear" ng-if="lxProgress.lxType === \'linear\'">\n' +
' <div class="progress-linear__background"></div>\n' +
' <div class="progress-linear__bar progress-linear__bar--first" ng-style="lxProgress.getLinearProgressValue()"></div>\n' +
' <div class="progress-linear__bar progress-linear__bar--second"></div>\n' +
' </div>\n' +
'</div>\n' +
'');
}]);
angular.module("lumx.button").run(['$templateCache', function(a) { a.put('link.html', '<a ng-transclude lx-ripple></a>\n' +
'');
a.put('button.html', '<button ng-transclude lx-ripple></button>\n' +
'');
}]);
angular.module("lumx.checkbox").run(['$templateCache', function(a) { a.put('checkbox.html', '<div class="checkbox checkbox--{{ lxCheckbox.lxColor }}"\n' +
' ng-class="{ \'checkbox--theme-light\': !lxCheckbox.theme || lxCheckbox.theme === \'light\',\n' +
' \'checkbox--theme-dark\': lxCheckbox.theme === \'dark\' }" >\n' +
' <input id="{{ lxCheckbox.getCheckboxId() }}"\n' +
' type="checkbox"\n' +
' class="checkbox__input"\n' +
' name="{{ lxCheckbox.name }}"\n' +
' ng-model="lxCheckbox.ngModel"\n' +
' ng-true-value="{{ lxCheckbox.ngTrueValue }}"\n' +
' ng-false-value="{{ lxCheckbox.ngFalseValue }}"\n' +
' ng-change="lxCheckbox.triggerNgChange()"\n' +
' ng-disabled="lxCheckbox.ngDisabled">\n' +
'\n' +
' <label for="{{ lxCheckbox.getCheckboxId() }}" class="checkbox__label" ng-transclude ng-if="!lxCheckbox.getCheckboxHasChildren()"></label>\n' +
' <ng-transclude-replace ng-if="lxCheckbox.getCheckboxHasChildren()"></ng-transclude-replace>\n' +
'</div>\n' +
'');
a.put('checkbox-label.html', '<label for="{{ lxCheckboxLabel.getCheckboxId() }}" class="checkbox__label" ng-transclude></label>\n' +
'');
a.put('checkbox-help.html', '<span class="checkbox__help" ng-transclude></span>\n' +
'');
}]);
angular.module("lumx.radio-button").run(['$templateCache', function(a) { a.put('radio-group.html', '<div class="radio-group" ng-transclude></div>\n' +
'');
a.put('radio-button.html', '<div class="radio-button radio-button--{{ lxRadioButton.lxColor }}">\n' +
' <input id="{{ lxRadioButton.getRadioButtonId() }}"\n' +
' type="radio"\n' +
' class="radio-button__input"\n' +
' name="{{ lxRadioButton.name }}"\n' +
' ng-model="lxRadioButton.ngModel"\n' +
' ng-value="lxRadioButton.ngValue"\n' +
' ng-change="lxRadioButton.triggerNgChange()"\n' +
' ng-disabled="lxRadioButton.ngDisabled">\n' +
'\n' +
' <label for="{{ lxRadioButton.getRadioButtonId() }}" class="radio-button__label" ng-transclude ng-if="!lxRadioButton.getRadioButtonHasChildren()"></label>\n' +
' <ng-transclude-replace ng-if="lxRadioButton.getRadioButtonHasChildren()"></ng-transclude-replace>\n' +
'</div>\n' +
'');
a.put('radio-button-label.html', '<label for="{{ lxRadioButtonLabel.getRadioButtonId() }}" class="radio-button__label" ng-transclude></label>\n' +
'');
a.put('radio-button-help.html', '<span class="radio-button__help" ng-transclude></span>\n' +
'');
}]);
angular.module("lumx.stepper").run(['$templateCache', function(a) { a.put('stepper.html', '<div class="lx-stepper" ng-class="lxStepper.getClasses()">\n' +
' <div class="lx-stepper__header" ng-if="lxStepper.layout === \'horizontal\'">\n' +
' <div class="lx-stepper__nav">\n' +
' <lx-step-nav lx-active-index="{{ lxStepper.activeIndex }}" lx-step="step" ng-repeat="step in lxStepper.steps"></lx-step-nav>\n' +
' </div>\n' +
'\n' +
' <div class="lx-stepper__feedback" ng-if="lxStepper.steps[lxStepper.activeIndex].feedback">\n' +
' <span>{{ lxStepper.steps[lxStepper.activeIndex].feedback }}</span>\n' +
' </div>\n' +
' </div>\n' +
'\n' +
' <div class="lx-stepper__steps" ng-transclude></div>\n' +
'</div>');
a.put('step.html', '<div class="lx-step" ng-class="lxStep.getClasses()">\n' +
' <div class="lx-step__nav" ng-if="lxStep.parent.layout === \'vertical\'">\n' +
' <lx-step-nav lx-active-index="{{ lxStep.parent.activeIndex }}" lx-step="lxStep.step"></lx-step-nav>\n' +
' </div>\n' +
'\n' +
' <div class="lx-step__wrapper" ng-if="lxStep.parent.activeIndex === lxStep.step.index">\n' +
' <div class="lx-step__content">\n' +
' <ng-transclude></ng-transclude>\n' +
'\n' +
' <div class="lx-step__progress" ng-if="lxStep.step.isLoading">\n' +
' <lx-progress lx-type="circular"></lx-progress>\n' +
' </div>\n' +
' </div>\n' +
'\n' +
' <div class="lx-step__actions" ng-if="lxStep.parent.activeIndex === lxStep.step.index">\n' +
' <div class="lx-step__action lx-step__action--continue">\n' +
' <lx-button ng-click="lxStep.submitStep()" ng-disabled="lxStep.isLoading">{{ lxStep.parent.labels.continue }}</lx-button>\n' +
' </div>\n' +
'\n' +
' <div class="lx-step__action lx-step__action--cancel" ng-if="lxStep.parent.cancel">\n' +
' <lx-button lx-color="black" lx-type="flat" ng-click="lxStep.parent.cancel()" ng-disabled="lxStep.isLoading">{{ lxStep.parent.labels.cancel }}</lx-button>\n' +
' </div>\n' +
'\n' +
' <div class="lx-step__action lx-step__action--back" ng-if="lxStep.parent.isLinear">\n' +
' <lx-button lx-color="black" lx-type="flat" ng-click="lxStep.previousStep()" ng-disabled="lxStep.isLoading || lxStep.step.index === 0">{{ lxStep.parent.labels.back }}</lx-button>\n' +
' </div>\n' +
' </div>\n' +
' </div>\n' +
'</div>');
a.put('step-nav.html', '<div class="lx-step-nav" ng-click="lxStepNav.parent.goToStep(lxStepNav.step.index)" ng-class="lxStepNav.getClasses()" lx-ripple>\n' +
' <div class="lx-step-nav__indicator lx-step-nav__indicator--index" ng-if="lxStepNav.step.isValid === undefined">\n' +
' <span>{{ lxStepNav.step.index + 1 }}</span>\n' +
' </div>\n' +
'\n' +
' <div class="lx-step-nav__indicator lx-step-nav__indicator--icon" ng-if="lxStepNav.step.isValid === true">\n' +
' <lx-icon lx-id="check" ng-if="!lxStepNav.step.isEditable"></lx-icon>\n' +
' <lx-icon lx-id="pencil" ng-if="lxStepNav.step.isEditable"></lx-icon>\n' +
' </div>\n' +
'\n' +
' <div class="lx-step-nav__indicator lx-step-nav__indicator--error" ng-if="lxStepNav.step.isValid === false">\n' +
' <lx-icon lx-id="alert"></lx-icon>\n' +
' </div>\n' +
'\n' +
' <div class="lx-step-nav__wrapper">\n' +
' <div class="lx-step-nav__label">\n' +
' <span>{{ lxStepNav.step.label }}</span>\n' +
' </div>\n' +
'\n' +
' <div class="lx-step-nav__state">\n' +
' <span ng-if="(lxStepNav.step.isValid === undefined || lxStepNav.step.isValid === true) && lxStepNav.step.isOptional">{{ lxStepNav.parent.labels.optional }}</span>\n' +
' <span ng-if="lxStepNav.step.isValid === false">{{ lxStepNav.step.errorMessage }}</span>\n' +
' </div>\n' +
' </div>\n' +
'</div>');
}]);
angular.module("lumx.switch").run(['$templateCache', function(a) { a.put('switch.html', '<div class="switch switch--{{ lxSwitch.lxColor }} switch--{{ lxSwitch.lxPosition }}">\n' +
' <input id="{{ lxSwitch.getSwitchId() }}"\n' +
' type="checkbox"\n' +
' class="switch__input"\n' +
' name="{{ lxSwitch.name }}"\n' +
' ng-model="lxSwitch.ngModel"\n' +
' ng-true-value="{{ lxSwitch.ngTrueValue }}"\n' +
' ng-false-value="{{ lxSwitch.ngFalseValue }}"\n' +
' ng-change="lxSwitch.triggerNgChange()"\n' +
' ng-disabled="lxSwitch.ngDisabled">\n' +
'\n' +
' <label for="{{ lxSwitch.getSwitchId() }}" class="switch__label" ng-transclude ng-if="!lxSwitch.getSwitchHasChildren()"></label>\n' +
' <ng-transclude-replace ng-if="lxSwitch.getSwitchHasChildren()"></ng-transclude-replace>\n' +
'</div>\n' +
'');
a.put('switch-label.html', '<label for="{{ lxSwitchLabel.getSwitchId() }}" class="switch__label" ng-transclude></label>\n' +
'');
a.put('switch-help.html', '<span class="switch__help" ng-transclude></span>\n' +
'');
}]);
angular.module("lumx.fab").run(['$templateCache', function(a) { a.put('fab.html', '<div class="fab">\n' +
' <ng-transclude-replace></ng-transclude-replace>\n' +
'</div>\n' +
'');
a.put('fab-trigger.html', '<div class="fab__primary" ng-transclude></div>\n' +
'');
a.put('fab-actions.html', '<div class="fab__actions fab__actions--{{ parentCtrl.lxDirection }}" ng-transclude></div>\n' +
'');
}]);
angular.module("lumx.icon").run(['$templateCache', function(a) { a.put('icon.html', '<i class="icon mdi" ng-class="lxIcon.getClass()"></i>');
}]);
angular.module("lumx.data-table").run(['$templateCache', function(a) { a.put('data-table.html', '<div class="data-table-container">\n' +
' <table class="data-table"\n' +
' ng-class="{ \'data-table--no-border\': !lxDataTable.border,\n' +
' \'data-table--thumbnail\': lxDataTable.thumbnail }">\n' +
' <thead>\n' +
' <tr ng-class="{ \'data-table__selectable-row\': lxDataTable.selectable,\n' +
' \'data-table__selectable-row--is-selected\': lxDataTable.selectable && lxDataTable.allRowsSelected }">\n' +
' <th ng-if="lxDataTable.thumbnail"></th>\n' +
' <th ng-click="lxDataTable.toggleAllSelected()"\n' +
' ng-if="lxDataTable.selectable"></th>\n' +
' <th ng-class=" { \'data-table__sortable-cell\': th.sortable,\n' +
' \'data-table__sortable-cell--asc\': th.sortable && th.sort === \'asc\',\n' +
' \'data-table__sortable-cell--desc\': th.sortable && th.sort === \'desc\' }"\n' +
' ng-click="lxDataTable.sort(th)"\n' +
' ng-repeat="th in lxDataTable.thead track by $index"\n' +
' ng-if="!lxDataTable.thumbnail || (lxDataTable.thumbnail && $index != 0)">\n' +
' <lx-icon lx-id="{{ th.icon }}" ng-if="th.icon"></lx-icon>\n' +
' <span>{{ th.label }}</span>\n' +
' </th>\n' +
' </tr>\n' +
' </thead>\n' +
'\n' +
' <tbody>\n' +
' <tr ng-class="{ \'data-table__selectable-row\': lxDataTable.selectable,\n' +
' \'data-table__selectable-row--is-disabled\': lxDataTable.selectable && tr.lxDataTableDisabled,\n' +
' \'data-table__selectable-row--is-selected\': lxDataTable.selectable && tr.lxDataTableSelected }"\n' +
' ng-repeat="tr in lxDataTable.tbody"\n' +
' ng-click="lxDataTable.toggle(tr)">\n' +
' <td ng-if="lxDataTable.thumbnail">\n' +
' <div ng-if="lxDataTable.thead[0].format" ng-bind-html="lxDataTable.$sce.trustAsHtml(lxDataTable.thead[0].format(tr))"></div>\n' +
' </td>\n' +
' <td ng-if="lxDataTable.selectable"></td>\n' +
' <td ng-repeat="th in lxDataTable.thead track by $index"\n' +
' ng-if="!lxDataTable.thumbnail || (lxDataTable.thumbnail && $index != 0)">\n' +
' <span ng-if="!th.format">{{ tr[th.name] }}</span>\n' +
' <div ng-if="th.format" ng-bind-html="lxDataTable.$sce.trustAsHtml(th.format(tr))"></div>\n' +
' </td>\n' +
' </tr>\n' +
' </tbody>\n' +
' </table>\n' +
'</div>');
}]);<|fim▁end|> | $timeout.cancel(timer); |
<|file_name|>mixed.go<|end_file_name|><|fim▁begin|>package mixed
import (
"net"
"net/url"
"github.com/nadoo/glider/pkg/log"
"github.com/nadoo/glider/proxy"
"github.com/nadoo/glider/proxy/http"
"github.com/nadoo/glider/proxy/socks5"
)
// Mixed struct.
type Mixed struct {
proxy proxy.Proxy
addr string
httpServer *http.HTTP
socks5Server *socks5.Socks5
}
func init() {
proxy.RegisterServer("mixed", NewMixedServer)
}
// NewMixed returns a mixed proxy.
func NewMixed(s string, p proxy.Proxy) (*Mixed, error) {
u, err := url.Parse(s)
if err != nil {
log.F("parse err: %s", err)
return nil, err
}
m := &Mixed{
proxy: p,
addr: u.Host,
}
m.httpServer, err = http.NewHTTP(s, nil, p)
if err != nil {
return nil, err
}
m.socks5Server, err = socks5.NewSocks5(s, nil, p)
if err != nil {
return nil, err
}
return m, nil
}
// NewMixedServer returns a mixed server.<|fim▁hole|>func NewMixedServer(s string, p proxy.Proxy) (proxy.Server, error) {
return NewMixed(s, p)
}
// ListenAndServe listens on server's addr and serves connections.
func (m *Mixed) ListenAndServe() {
go m.socks5Server.ListenAndServeUDP()
l, err := net.Listen("tcp", m.addr)
if err != nil {
log.Fatalf("[mixed] failed to listen on %s: %v", m.addr, err)
return
}
log.F("[mixed] http & socks5 server listening TCP on %s", m.addr)
for {
c, err := l.Accept()
if err != nil {
log.F("[mixed] failed to accept: %v", err)
continue
}
go m.Serve(c)
}
}
// Serve serves connections.
func (m *Mixed) Serve(c net.Conn) {
conn := proxy.NewConn(c)
if head, err := conn.Peek(1); err == nil {
if head[0] == socks5.Version {
m.socks5Server.Serve(conn)
return
}
}
m.httpServer.Serve(conn)
}<|fim▁end|> | |
<|file_name|>on_vr_language_change_notification.cc<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2013, Ford Motor Company
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following
* disclaimer in the documentation and/or other materials provided with the
* distribution.
*
* Neither the name of the Ford Motor Company nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "application_manager/commands/hmi/on_vr_language_change_notification.h"
#include "application_manager/application_impl.h"
#include "application_manager/state_controller.h"
#include "application_manager/message_helper.h"
#include "interfaces/MOBILE_API.h"
namespace application_manager {
namespace commands {
OnVRLanguageChangeNotification::OnVRLanguageChangeNotification(
const MessageSharedPtr& message, ApplicationManager& application_manager)
: NotificationFromHMI(message, application_manager) {}
OnVRLanguageChangeNotification::~OnVRLanguageChangeNotification() {}
void OnVRLanguageChangeNotification::Run() {
LOG4CXX_AUTO_TRACE(logger_);
HMICapabilities& hmi_capabilities = application_manager_.hmi_capabilities();
hmi_capabilities.set_active_vr_language(
static_cast<hmi_apis::Common_Language::eType>(
(*message_)[strings::msg_params][strings::language].asInt()));
(*message_)[strings::msg_params][strings::hmi_display_language] =
hmi_capabilities.active_ui_language();
(*message_)[strings::params][strings::function_id] =
static_cast<int32_t>(mobile_apis::FunctionID::OnLanguageChangeID);
const ApplicationSet& accessor =
application_manager_.applications().GetData();
ApplicationSetConstIt it = accessor.begin();
for (; accessor.end() != it;) {
ApplicationSharedPtr app = *it++;
(*message_)[strings::params][strings::connection_key] = app->app_id();
SendNotificationToMobile(message_);
if (static_cast<int32_t>(app->language()) !=
(*message_)[strings::msg_params][strings::language].asInt()) {<|fim▁hole|> MessageHelper::GetOnAppInterfaceUnregisteredNotificationToMobile(
app->app_id(),
mobile_api::AppInterfaceUnregisteredReason::LANGUAGE_CHANGE),
commands::Command::ORIGIN_SDL);
application_manager_.UnregisterApplication(
app->app_id(), mobile_apis::Result::SUCCESS, false);
}
}
}
} // namespace commands
} // namespace application_manager<|fim▁end|> | application_manager_.state_controller().SetRegularState(
app, mobile_api::HMILevel::HMI_NONE, false);
application_manager_.ManageMobileCommand( |
<|file_name|>reducer.ts<|end_file_name|><|fim▁begin|>import { createSlice, createEntityAdapter, Reducer, AnyAction, PayloadAction } from '@reduxjs/toolkit';
import { fetchAll, fetchDetails, install, uninstall, loadPluginDashboards, panelPluginLoaded } from './actions';
import { CatalogPlugin, PluginListDisplayMode, ReducerState, RequestStatus } from '../types';
import { STATE_PREFIX } from '../constants';
import { PanelPlugin } from '@grafana/data';
export const pluginsAdapter = createEntityAdapter<CatalogPlugin>();
const isPendingRequest = (action: AnyAction) => new RegExp(`${STATE_PREFIX}\/(.*)\/pending`).test(action.type);
const isFulfilledRequest = (action: AnyAction) => new RegExp(`${STATE_PREFIX}\/(.*)\/fulfilled`).test(action.type);
const isRejectedRequest = (action: AnyAction) => new RegExp(`${STATE_PREFIX}\/(.*)\/rejected`).test(action.type);
// Extract the trailing '/pending', '/rejected', or '/fulfilled'
const getOriginalActionType = (type: string) => {
const separator = type.lastIndexOf('/');
return type.substring(0, separator);
};
const slice = createSlice({
name: 'plugins',
initialState: {
items: pluginsAdapter.getInitialState(),
requests: {},
settings: {
displayMode: PluginListDisplayMode.Grid,
},
// Backwards compatibility
// (we need to have the following fields in the store as well to be backwards compatible with other parts of Grafana)
// TODO<remove once the "plugin_admin_enabled" feature flag is removed>
plugins: [],
errors: [],
searchQuery: '',
hasFetched: false,
dashboards: [],
isLoadingPluginDashboards: false,
panels: {},
} as ReducerState,
reducers: {
setDisplayMode(state, action: PayloadAction<PluginListDisplayMode>) {
state.settings.displayMode = action.payload;
},
},
extraReducers: (builder) =>
builder
// Fetch All
.addCase(fetchAll.fulfilled, (state, action) => {
pluginsAdapter.upsertMany(state.items, action.payload);
})
// Fetch Details
.addCase(fetchDetails.fulfilled, (state, action) => {
pluginsAdapter.updateOne(state.items, action.payload);
})
// Install
.addCase(install.fulfilled, (state, action) => {
pluginsAdapter.updateOne(state.items, action.payload);
})
// Uninstall
.addCase(uninstall.fulfilled, (state, action) => {
pluginsAdapter.updateOne(state.items, action.payload);
})
// Load a panel plugin (backward-compatibility)
// TODO<remove once the "plugin_admin_enabled" feature flag is removed>
.addCase(panelPluginLoaded, (state, action: PayloadAction<PanelPlugin>) => {
state.panels[action.payload.meta.id] = action.payload;
})
// Start loading panel dashboards (backward-compatibility)
// TODO<remove once the "plugin_admin_enabled" feature flag is removed>
.addCase(loadPluginDashboards.pending, (state, action) => {
state.isLoadingPluginDashboards = true;
state.dashboards = [];
})<|fim▁hole|> // Load panel dashboards (backward-compatibility)
// TODO<remove once the "plugin_admin_enabled" feature flag is removed>
.addCase(loadPluginDashboards.fulfilled, (state, action) => {
state.isLoadingPluginDashboards = false;
state.dashboards = action.payload;
})
.addMatcher(isPendingRequest, (state, action) => {
state.requests[getOriginalActionType(action.type)] = {
status: RequestStatus.Pending,
};
})
.addMatcher(isFulfilledRequest, (state, action) => {
state.requests[getOriginalActionType(action.type)] = {
status: RequestStatus.Fulfilled,
};
})
.addMatcher(isRejectedRequest, (state, action) => {
state.requests[getOriginalActionType(action.type)] = {
status: RequestStatus.Rejected,
error: action.payload,
};
}),
});
export const { setDisplayMode } = slice.actions;
export const reducer: Reducer<ReducerState, AnyAction> = slice.reducer;<|fim▁end|> | |
<|file_name|>apps.py<|end_file_name|><|fim▁begin|>from django.apps import AppConfig
<|fim▁hole|> verbose_name = "Wagtail admin"<|fim▁end|> |
class WagtailAdminAppConfig(AppConfig):
name = 'wagtail.wagtailadmin'
label = 'wagtailadmin' |
<|file_name|>use-container.ts<|end_file_name|><|fim▁begin|>import { IoCContainer } from './ioc-container';<|fim▁hole|>
/**
* Sets the IoC container to be used in order to instantiate the decorated classes
* @param container A [typedi]{@link https://github.com/pleerock/typedi} container
*/
export function useContainer(container: any): void {
IoCContainer.INSTANCE = container;
}<|fim▁end|> | |
<|file_name|>convert.py<|end_file_name|><|fim▁begin|>import subprocess
import os
"""
What are the differences and similarities between ffmpeg, libav, and avconv?
https://stackoverflow.com/questions/9477115
ffmeg encoders high to lower quality
libopus > libvorbis >= libfdk_aac > aac > libmp3lame
libfdk_aac due to copyrights needs to be compiled by end user
on MacOS brew install ffmpeg --with-fdk-aac will do just that. Other OS?
https://trac.ffmpeg.org/wiki/Encode/AAC
"""
def song(input_song, output_song, folder, avconv=False, verbose=False):
"""Do the audio format conversion."""
if not input_song == output_song:
print('Converting {0} to {1}'.format(
input_song, output_song.split('.')[-1]))
if avconv:
exit_code = convert_with_avconv(input_song, output_song, folder, verbose)
else:
exit_code = convert_with_ffmpeg(input_song, output_song, folder, verbose)
return exit_code
return 0
def convert_with_avconv(input_song, output_song, folder, verbose):
"""Convert the audio file using avconv."""
if verbose:
level = 'debug'
else:
level = '0'
command = ['avconv',
'-loglevel', level,
'-i', os.path.join(folder, input_song),
'-ab', '192k',
os.path.join(folder, output_song)]
return subprocess.call(command)
def convert_with_ffmpeg(input_song, output_song, folder, verbose):
"""Convert the audio file using FFmpeg."""
ffmpeg_pre = 'ffmpeg -y '
if not verbose:
ffmpeg_pre += '-hide_banner -nostats -v panic '
input_ext = input_song.split('.')[-1]
output_ext = output_song.split('.')[-1]
if input_ext == 'm4a':
if output_ext == 'mp3':
ffmpeg_params = '-codec:v copy -codec:a libmp3lame -q:a 2 '
elif output_ext == 'webm':
ffmpeg_params = '-c:a libopus -vbr on -b:a 192k -vn '
elif input_ext == 'webm':
if output_ext == 'mp3':
ffmpeg_params = ' -ab 192k -ar 44100 -vn '<|fim▁hole|> command = '{0}-i {1} {2}{3}'.format(
ffmpeg_pre, os.path.join(folder, input_song), ffmpeg_params, os.path.join(folder, output_song)).split(' ')
return subprocess.call(command)<|fim▁end|> | elif output_ext == 'm4a':
ffmpeg_params = '-cutoff 20000 -c:a libfdk_aac -b:a 192k -vn '
|
<|file_name|>app.e2e-spec.ts<|end_file_name|><|fim▁begin|>import { AppPage } from './app.po';
import { browser, logging } from 'protractor';
describe('workspace-project App', () => {
let page: AppPage;
beforeEach(() => {
page = new AppPage();
});
it('should display welcome message', () => {
page.navigateTo();
expect(page.getTitleText()).toEqual('sandbox app is running!');
});
<|fim▁hole|> // Assert that there are no errors emitted from the browser
const logs = await browser.manage().logs().get(logging.Type.BROWSER);
expect(logs).not.toContain(jasmine.objectContaining({
level: logging.Level.SEVERE,
} as logging.Entry));
});
});<|fim▁end|> | afterEach(async () => { |
<|file_name|>default_exchange_receiver.py<|end_file_name|><|fim▁begin|>from sender import *
import threading
QUEUE_NAME = 'event_queue'
class CompetingReceiver(object):
def __init__(self):
self.connection = Connection().initialize()
def receive(self):
self.connection.channel.queue_declare(QUEUE_NAME, False, False, False, None)
self.connection.channel.basic_consume(self.connection.callback, QUEUE_NAME, True)
self.connection.channel.start_consuming()
if __name__ == '__main__':<|fim▁hole|> t1 = threading.Thread(target=connection1.receive())
t2 = threading.Thread(target=connection2.receive())
t1.start()
t2.start()
t1.join()
t2.join()
connection1.connection.destroy()
connection2.connection.destroy()<|fim▁end|> | connection1 = CompetingReceiver()
connection2 = CompetingReceiver()
|
<|file_name|>temp.ts<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import path = require('path');
import os = require('os');
export function makeRandomHexString(length: number): string {
const chars = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'];
let result = '';
for (let i = 0; i < length; i++) {<|fim▁hole|> }
return result;
}
export function getTempFile(name: string): string {
return path.join(os.tmpdir(), name);
}<|fim▁end|> | const idx = Math.floor(chars.length * Math.random());
result += chars[idx]; |
<|file_name|>GeneticProfileUtil.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2015 - 2016 Memorial Sloan-Kettering Cancer Center.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY, WITHOUT EVEN THE IMPLIED WARRANTY OF MERCHANTABILITY OR FITNESS
* FOR A PARTICULAR PURPOSE. The software and documentation provided hereunder
* is on an "as is" basis, and Memorial Sloan-Kettering Cancer Center has no
* obligations to provide maintenance, support, updates, enhancements or
* modifications. In no event shall Memorial Sloan-Kettering Cancer Center be
* liable to any party for direct, indirect, special, incidental or
* consequential damages, including lost profits, arising out of the use of this
* software and its documentation, even if Memorial Sloan-Kettering Cancer
* Center has been advised of the possibility of such damage.
*/
/*
* This file is part of cBioPortal.
*
* cBioPortal is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License.
*
* This program is distributed in the hope that it will be useful,<|fim▁hole|> * GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.mskcc.cbio.portal.util;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import org.cbioportal.persistence.GenePanelRepository;
import org.cbioportal.model.GenePanel;
import org.mskcc.cbio.portal.model.GeneticAlterationType;
import org.mskcc.cbio.portal.model.GeneticProfile;
/**
* Genetic Profile Util Class.
*
*/
public class GeneticProfileUtil {
/**
* Gets the GeneticProfile with the Specified GeneticProfile ID.
* @param profileId GeneticProfile ID.
* @param profileList List of Genetic Profiles.
* @return GeneticProfile or null.
*/
public static GeneticProfile getProfile(String profileId,
ArrayList<GeneticProfile> profileList) {
for (GeneticProfile profile : profileList) {
if (profile.getStableId().equals(profileId)) {
return profile;
}
}
return null;
}
/**
* Returns true if Any of the Profiles Selected by the User Refer to mRNA Expression
* outlier profiles.
*
* @param geneticProfileIdSet Set of Chosen Profiles IDs.
* @param profileList List of Genetic Profiles.
* @return true or false.
*/
public static boolean outlierExpressionSelected(HashSet<String> geneticProfileIdSet,
ArrayList<GeneticProfile> profileList) {
Iterator<String> geneticProfileIdIterator = geneticProfileIdSet.iterator();
while (geneticProfileIdIterator.hasNext()) {
String geneticProfileId = geneticProfileIdIterator.next();
GeneticProfile geneticProfile = getProfile (geneticProfileId, profileList);
if (geneticProfile != null && geneticProfile.getGeneticAlterationType() == GeneticAlterationType.MRNA_EXPRESSION) {
String profileName = geneticProfile.getProfileName();
if (profileName != null) {
if (profileName.toLowerCase().contains("outlier")) {
return true;
}
}
}
}
return false;
}
public static int getGenePanelId(String panelId) {
GenePanelRepository genePanelRepository = SpringUtil.getGenePanelRepository();
GenePanel genePanel = genePanelRepository.getGenePanelByStableId(panelId).get(0);
return genePanel.getInternalId();
}
}<|fim▁end|> | * but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
<|file_name|>scoped_tls.rs<|end_file_name|><|fim▁begin|>// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Scoped thread-local storage
//!
//! This module provides the ability to generate *scoped* thread-local
//! variables. In this sense, scoped indicates that thread local storage
//! actually stores a reference to a value, and this reference is only placed
//! in storage for a scoped amount of time.
//!
//! There are no restrictions on what types can be placed into a scoped
//! variable, but all scoped variables are initialized to the equivalent of
//! null. Scoped thread local storage is useful when a value is present for a known
//! period of time and it is not required to relinquish ownership of the
//! contents.
//!
//! # Examples
//!
//! ```
//! # #![feature(scoped_tls)]
//! scoped_thread_local!(static FOO: u32);
//!
//! // Initially each scoped slot is empty.
//! assert!(!FOO.is_set());
//!
//! // When inserting a value, the value is only in place for the duration
//! // of the closure specified.
//! FOO.set(&1, || {
//! FOO.with(|slot| {
//! assert_eq!(*slot, 1);
//! });
//! });
//! ```<|fim▁hole|>
/// Type representing a thread local storage key corresponding to a reference
/// to the type parameter `T`.
///
/// Keys are statically allocated and can contain a reference to an instance of
/// type `T` scoped to a particular lifetime. Keys provides two methods, `set`
/// and `with`, both of which currently use closures to control the scope of
/// their contents.
#[unstable(feature = "scoped_tls",
reason = "scoped TLS has yet to have wide enough use to fully consider \
stabilizing its interface")]
pub struct ScopedKey<T> { inner: imp::KeyInner<T> }
/// Declare a new scoped thread local storage key.
///
/// This macro declares a `static` item on which methods are used to get and
/// set the value stored within.
///
/// See [ScopedKey documentation](thread/struct.ScopedKey.html) for more
/// information.
#[macro_export]
#[allow_internal_unstable]
#[cfg(not(no_elf_tls))]
macro_rules! scoped_thread_local {
(static $name:ident: $t:ty) => (
#[cfg_attr(not(any(windows,
target_os = "android",
target_os = "ios",
target_os = "openbsd",
target_arch = "aarch64")),
thread_local)]
static $name: ::std::thread::ScopedKey<$t> =
::std::thread::ScopedKey::new();
);
(pub static $name:ident: $t:ty) => (
#[cfg_attr(not(any(windows,
target_os = "android",
target_os = "ios",
target_os = "openbsd",
target_arch = "aarch64")),
thread_local)]
pub static $name: ::std::thread::ScopedKey<$t> =
::std::thread::ScopedKey::new();
);
}
#[macro_export]
#[allow_internal_unstable]
#[cfg(no_elf_tls)]
macro_rules! scoped_thread_local {
(static $name:ident: $t:ty) => (
static $name: ::std::thread::ScopedKey<$t> =
::std::thread::ScopedKey::new();
);
(pub static $name:ident: $t:ty) => (
pub static $name: ::std::thread::ScopedKey<$t> =
::std::thread::ScopedKey::new();
);
}
#[unstable(feature = "scoped_tls",
reason = "scoped TLS has yet to have wide enough use to fully consider \
stabilizing its interface")]
impl<T> ScopedKey<T> {
#[doc(hidden)]
pub const fn new() -> ScopedKey<T> {
ScopedKey { inner: imp::KeyInner::new() }
}
/// Inserts a value into this scoped thread local storage slot for a
/// duration of a closure.
///
/// While `cb` is running, the value `t` will be returned by `get` unless
/// this function is called recursively inside of `cb`.
///
/// Upon return, this function will restore the previous value, if any
/// was available.
///
/// # Examples
///
/// ```
/// # #![feature(scoped_tls)]
/// scoped_thread_local!(static FOO: u32);
///
/// FOO.set(&100, || {
/// let val = FOO.with(|v| *v);
/// assert_eq!(val, 100);
///
/// // set can be called recursively
/// FOO.set(&101, || {
/// // ...
/// });
///
/// // Recursive calls restore the previous value.
/// let val = FOO.with(|v| *v);
/// assert_eq!(val, 100);
/// });
/// ```
pub fn set<R, F>(&'static self, t: &T, cb: F) -> R where
F: FnOnce() -> R,
{
struct Reset<'a, T: 'a> {
key: &'a imp::KeyInner<T>,
val: *mut T,
}
impl<'a, T> Drop for Reset<'a, T> {
fn drop(&mut self) {
unsafe { self.key.set(self.val) }
}
}
let prev = unsafe {
let prev = self.inner.get();
self.inner.set(t as *const T as *mut T);
prev
};
let _reset = Reset { key: &self.inner, val: prev };
cb()
}
/// Gets a value out of this scoped variable.
///
/// This function takes a closure which receives the value of this
/// variable.
///
/// # Panics
///
/// This function will panic if `set` has not previously been called.
///
/// # Examples
///
/// ```no_run
/// # #![feature(scoped_tls)]
/// scoped_thread_local!(static FOO: u32);
///
/// FOO.with(|slot| {
/// // work with `slot`
/// });
/// ```
pub fn with<R, F>(&'static self, cb: F) -> R where
F: FnOnce(&T) -> R
{
unsafe {
let ptr = self.inner.get();
assert!(!ptr.is_null(), "cannot access a scoped thread local \
variable without calling `set` first");
cb(&*ptr)
}
}
/// Test whether this TLS key has been `set` for the current thread.
pub fn is_set(&'static self) -> bool {
unsafe { !self.inner.get().is_null() }
}
}
#[cfg(not(any(windows,
target_os = "android",
target_os = "ios",
target_os = "openbsd",
target_arch = "aarch64",
no_elf_tls)))]
mod imp {
use std::cell::Cell;
pub struct KeyInner<T> { inner: Cell<*mut T> }
unsafe impl<T> ::marker::Sync for KeyInner<T> { }
impl<T> KeyInner<T> {
pub const fn new() -> KeyInner<T> {
KeyInner { inner: Cell::new(0 as *mut _) }
}
pub unsafe fn set(&self, ptr: *mut T) { self.inner.set(ptr); }
pub unsafe fn get(&self) -> *mut T { self.inner.get() }
}
}
#[cfg(any(windows,
target_os = "android",
target_os = "ios",
target_os = "openbsd",
target_arch = "aarch64",
no_elf_tls))]
mod imp {
use prelude::v1::*;
use cell::Cell;
use marker;
use sys_common::thread_local::StaticKey as OsStaticKey;
pub struct KeyInner<T> {
pub inner: OsStaticKey,
pub marker: marker::PhantomData<Cell<T>>,
}
unsafe impl<T> marker::Sync for KeyInner<T> { }
impl<T> KeyInner<T> {
pub const fn new() -> KeyInner<T> {
KeyInner {
inner: OsStaticKey::new(None),
marker: marker::PhantomData
}
}
pub unsafe fn set(&self, ptr: *mut T) { self.inner.set(ptr as *mut _) }
pub unsafe fn get(&self) -> *mut T { self.inner.get() as *mut _ }
}
}
#[cfg(test)]
mod tests {
use cell::Cell;
use prelude::v1::*;
scoped_thread_local!(static FOO: u32);
#[test]
fn smoke() {
scoped_thread_local!(static BAR: u32);
assert!(!BAR.is_set());
BAR.set(&1, || {
assert!(BAR.is_set());
BAR.with(|slot| {
assert_eq!(*slot, 1);
});
});
assert!(!BAR.is_set());
}
#[test]
fn cell_allowed() {
scoped_thread_local!(static BAR: Cell<u32>);
BAR.set(&Cell::new(1), || {
BAR.with(|slot| {
assert_eq!(slot.get(), 1);
});
});
}
#[test]
fn scope_item_allowed() {
assert!(!FOO.is_set());
FOO.set(&1, || {
assert!(FOO.is_set());
FOO.with(|slot| {
assert_eq!(*slot, 1);
});
});
assert!(!FOO.is_set());
}
}<|fim▁end|> |
#![unstable(feature = "thread_local_internals")]
use prelude::v1::*; |
<|file_name|>core.go<|end_file_name|><|fim▁begin|>/*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package app
import (
"github.com/golang/glog"
// HACK to ensure that rest mapper from pkg/api is registered for groupName="".
// This is required because both pkg/api/install and federation/apis/core/install
// are installing their respective groupMeta at the same groupName.
// federation/apis/core/install has only a subset of resources and hence if it gets registered first, then installation of v1 API fails in pkg/master.
// TODO(nikhiljindal): Fix this by ensuring that pkg/api/install and federation/apis/core/install do not conflict with each other.
_ "k8s.io/kubernetes/pkg/api/install"
"k8s.io/kubernetes/federation/apis/core"
_ "k8s.io/kubernetes/federation/apis/core/install"
"k8s.io/kubernetes/federation/apis/core/v1"
"k8s.io/kubernetes/federation/cmd/federation-apiserver/app/options"
"k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/api/rest"
"k8s.io/kubernetes/pkg/apimachinery/registered"
"k8s.io/kubernetes/pkg/genericapiserver"
configmapetcd "k8s.io/kubernetes/pkg/registry/core/configmap/etcd"
eventetcd "k8s.io/kubernetes/pkg/registry/core/event/etcd"<|fim▁hole|> namespaceetcd "k8s.io/kubernetes/pkg/registry/core/namespace/etcd"
secretetcd "k8s.io/kubernetes/pkg/registry/core/secret/etcd"
serviceetcd "k8s.io/kubernetes/pkg/registry/core/service/etcd"
)
func installCoreAPIs(s *options.ServerRunOptions, g *genericapiserver.GenericAPIServer, restOptionsFactory restOptionsFactory) {
serviceStore, serviceStatusStore := serviceetcd.NewREST(restOptionsFactory.NewFor(api.Resource("service")))
namespaceStore, namespaceStatusStore, namespaceFinalizeStore := namespaceetcd.NewREST(restOptionsFactory.NewFor(api.Resource("namespaces")))
secretStore := secretetcd.NewREST(restOptionsFactory.NewFor(api.Resource("secrets")))
configMapStore := configmapetcd.NewREST(restOptionsFactory.NewFor(api.Resource("configmaps")))
eventStore := eventetcd.NewREST(restOptionsFactory.NewFor(api.Resource("events")), uint64(s.EventTTL.Seconds()))
coreResources := map[string]rest.Storage{
"secrets": secretStore,
"services": serviceStore,
"services/status": serviceStatusStore,
"namespaces": namespaceStore,
"namespaces/status": namespaceStatusStore,
"namespaces/finalize": namespaceFinalizeStore,
"events": eventStore,
"configmaps": configMapStore,
}
coreGroupMeta := registered.GroupOrDie(core.GroupName)
apiGroupInfo := genericapiserver.APIGroupInfo{
GroupMeta: *coreGroupMeta,
VersionedResourcesStorageMap: map[string]map[string]rest.Storage{
v1.SchemeGroupVersion.Version: coreResources,
},
OptionsExternalVersion: ®istered.GroupOrDie(core.GroupName).GroupVersion,
Scheme: core.Scheme,
ParameterCodec: core.ParameterCodec,
NegotiatedSerializer: core.Codecs,
}
if err := g.InstallLegacyAPIGroup(genericapiserver.DefaultLegacyAPIPrefix, &apiGroupInfo); err != nil {
glog.Fatalf("Error in registering group version: %+v.\n Error: %v\n", apiGroupInfo, err)
}
}<|fim▁end|> | |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django import forms
from django.db import models
class Births(models.Model):
year = models.IntegerField("Year")
county = models.CharField("County",max_length=20)
mothersAge = models.IntegerField("Mother's Age")
mothersEdu = models.CharField("Mother's Education",max_length=50)
source = models.URLField("Source")
isRepeat = models.BooleanField("Is a Repeat Birth")
births = models.IntegerField("Births")
def get_fields(self):
fields = []
for f in self._meta.fields:
fields.append(f.name)
return fields
def get_names(self):
names = []
for f in self._meta.fields:
names.append(self._meta.get_field(f.name).verbose_name.title())
return names
def __unicode__(self):
s = "In " + self.county + " county, " + str(self.year)
s += ", there were " + str(self.births)
if self.isRepeat: s += " repeat births to "
else: s += " first births to "
s += str(self.mothersAge) + "-year-old mothers who "
s += self.mothersEdu + ", according to " + self.source
return s
class Diseases(models.Model):
year = models.IntegerField("Year")
county = models.CharField("County",max_length=20)
topic = models.CharField("Topic",max_length=50)
# Topics:
# HIV Cases
# AIDS Cases
# HIV+AIDS Deaths
# HIV+AIDS Deaths Age-Adjusted
source = models.URLField("Source")
count = models.IntegerField("Count")<|fim▁hole|> s = "In " + self.county + " county, " + str(self.year)
s += ", there were " + str(self.count) + " "
s += self.topic + " (or " + str(self.rate)
s += "%), according to " + self.source
return s
class Upload(models.Model):
upfile = models.FileField(upload_to='Updates Go Here')<|fim▁end|> | rate = models.FloatField("Rate")
def __unicode__(self): |
<|file_name|>tests.js<|end_file_name|><|fim▁begin|>/* global describe, beforeEach, it */
import { expect } from "chai";
import { shouldOpenInNewTab } from "./utils";
describe("app/lib/utils:shouldOpenInNewTab", () => {
let mockClickEvent;
beforeEach(() => {
mockClickEvent = {
ctrlKey: false,
metaKey: false,
type: "click",
button: 0
};
});
it("should default to false", () => {
expect(shouldOpenInNewTab(mockClickEvent)).to.be.false;
});
it("should return true when ctrl-clicked", () => {
mockClickEvent.ctrlKey = true;
expect(shouldOpenInNewTab(mockClickEvent)).to.be.true;
});
it("should return true when cmd-clicked", () => {
mockClickEvent.metaKey = true;
expect(shouldOpenInNewTab(mockClickEvent)).to.be.true;
});
it("should return false for non-click events", () => {
mockClickEvent.type = "keypress";
expect(shouldOpenInNewTab(mockClickEvent)).to.be.false;
});
it("should return true for middle clicks", () => {
mockClickEvent.button = 1;
expect(shouldOpenInNewTab(mockClickEvent)).to.be.true;
});
it("should return false for right clicks", () => {
mockClickEvent.button = 2;<|fim▁hole|><|fim▁end|> | expect(shouldOpenInNewTab(mockClickEvent)).to.be.false;
});
}); |
<|file_name|>meta_expose.hpp<|end_file_name|><|fim▁begin|>#pragma once
#include "core_reflection/metadata/meta_impl.hpp"
#include "core_reflection/reflection_macros.hpp"
#include "core_reflection/metadata/meta_types.hpp"
#include "core_reflection/utilities/reflection_function_utilities.hpp"
namespace wgt
{
BEGIN_EXPOSE(MetaBase, MetaNone())
END_EXPOSE()
BEGIN_EXPOSE(MetaNoneObj, MetaBase, MetaNone())
END_EXPOSE()
BEGIN_EXPOSE(MetaComponentObj, MetaBase, MetaNone())
EXPOSE("componentName", componentName_)
END_EXPOSE()
BEGIN_EXPOSE(MetaAngleObj, MetaComponentObj, MetaNone())
EXPOSE("convertToRadians", convertToRadians_)
END_EXPOSE()
BEGIN_EXPOSE(MetaTimeObj, MetaComponentObj, MetaNone())
END_EXPOSE()
BEGIN_EXPOSE(MetaSignalObj, MetaBase, MetaNone())
END_EXPOSE()
BEGIN_EXPOSE(MetaInvalidatesObjectObj, MetaBase, MetaNone())
END_EXPOSE()
BEGIN_EXPOSE(MetaCallbackObj, MetaBase, MetaNone())
EXPOSE_METHOD("invoke", exposedInvoke)
END_EXPOSE()
BEGIN_EXPOSE(MetaMinMaxObj, MetaBase, MetaNone())
EXPOSE("min", getMin)
EXPOSE("max", getMax)
END_EXPOSE()
BEGIN_EXPOSE(MetaStepSizeObj, MetaBase, MetaNone())
EXPOSE("stepSize", getStepSize)
END_EXPOSE()
BEGIN_EXPOSE(MetaDecimalsObj, MetaBase, MetaNone())
EXPOSE("decimals", getDecimals)
END_EXPOSE()
BEGIN_EXPOSE(MetaEnumObj, MetaBase, MetaNone())
EXPOSE("enumString", getEnumString)
EXPOSE_METHOD("generateEnum", generateEnum)
END_EXPOSE()
BEGIN_EXPOSE(MetaSliderObj, MetaBase, MetaNone())
END_EXPOSE()
BEGIN_EXPOSE(MetaDisplayNameObj, MetaBase, MetaNone())
EXPOSE("displayName", displayName_)
END_EXPOSE()
BEGIN_EXPOSE(MetaAttributeDisplayNameObj, MetaBase, MetaNone())
EXPOSE("attributeName", getAttributeName)
END_EXPOSE()
BEGIN_EXPOSE(MetaDisplayNameCallbackObj, MetaDisplayNameObj, MetaNone())
END_EXPOSE()
BEGIN_EXPOSE(MetaDescriptionObj, MetaBase, MetaNone())
EXPOSE("description", getDescription)
END_EXPOSE()
BEGIN_EXPOSE(MetaPanelLayoutObj, MetaBase, MetaNone())
END_EXPOSE()
BEGIN_EXPOSE(MetaNoNullObj, MetaBase, MetaNone())
END_EXPOSE()
BEGIN_EXPOSE(MetaColorObj, MetaBase, MetaNone())
END_EXPOSE()
BEGIN_EXPOSE(MetaHiddenObj, MetaBase, MetaNone())
END_EXPOSE()
BEGIN_EXPOSE(MetaThumbnailObj, MetaBase, MetaNone())
EXPOSE("width", getWidth)
EXPOSE("height", getHeight)
END_EXPOSE()
BEGIN_EXPOSE(MetaInPlaceObj, MetaBase, MetaNone())
EXPOSE("propName", getPropName)
END_EXPOSE()
BEGIN_EXPOSE(MetaSelectedObj, MetaBase, MetaNone())
EXPOSE("propName", getPropName)
END_EXPOSE()
BEGIN_EXPOSE(MetaHDRColorReinhardTonemapObj, MetaHDRColorObj, MetaNone())
END_EXPOSE()
BEGIN_EXPOSE(MetaGroupObj, MetaBase, MetaAttributeDisplayName("groupName"))
EXPOSE("groupName", groupName_)
END_EXPOSE()
BEGIN_EXPOSE(MetaGroupCallbackObj, MetaGroupObj, MetaAttributeDisplayName("groupName"))
END_EXPOSE()
BEGIN_EXPOSE(MetaUrlObj, MetaBase, MetaNone())
END_EXPOSE()
BEGIN_EXPOSE(MetaPasswordObj, MetaBase, MetaNone())
END_EXPOSE()
BEGIN_EXPOSE(MetaMultilineObj, MetaBase, MetaNone())
END_EXPOSE()
BEGIN_EXPOSE(MetaDirectInvokeObj, MetaBase, MetaNone())
END_EXPOSE()
BEGIN_EXPOSE(MetaInPlacePropertyNameObj, MetaBase, MetaNone())
EXPOSE("propertyName", getPropertyName)
END_EXPOSE()
BEGIN_EXPOSE(MetaReadOnlyObj, MetaBase, MetaNone())
END_EXPOSE()
BEGIN_EXPOSE(MetaUniqueIdObj, MetaBase, MetaNone())
EXPOSE("id", getId)
END_EXPOSE()
BEGIN_EXPOSE(MetaCommandObj, MetaBase, MetaNone())
EXPOSE("commandName", getCommandName)
END_EXPOSE()
BEGIN_EXPOSE(MetaActionObj, MetaBase, MetaNone())
EXPOSE("actionName", getActionName)
EXPOSE_METHOD("execute", execute)
END_EXPOSE()
BEGIN_EXPOSE(MetaNoSerializationObj, MetaBase, MetaNone())
END_EXPOSE()
BEGIN_EXPOSE(MetaOnStackObj, MetaBase, MetaNone())
END_EXPOSE()
BEGIN_EXPOSE(MetaHDRColorObj, MetaBase, MetaNone())
EXPOSE_METHOD("tonemap", tonemap, MetaDirectInvoke())
EXPOSE("shouldUpdate", shouldUpdate, MetaSignalFunc(shouldUpdateSignal))
END_EXPOSE()
BEGIN_EXPOSE(MetaDisplayPathNameCallbackObj, MetaNone())
END_EXPOSE()<|fim▁hole|>END_EXPOSE()
} // end namespace wgt<|fim▁end|> |
BEGIN_EXPOSE(MetaCollectionItemMetaObj, MetaBase, MetaNone()) |
<|file_name|>interop.rs<|end_file_name|><|fim▁begin|>use std::os::raw::c_char;
use std;
use util;
use d3dx;
use shared_dx9::util::write_log_file;
use global_state::HookState;
use types::interop::*;
lazy_static! {
pub static ref LOG_MUTEX: std::sync::Mutex<()> = std::sync::Mutex::new(());
}
unsafe fn loggit(prefix: &str, category: *const c_char, message: *const c_char) -> () {
use std::ffi::CStr;
let _lock = LOG_MUTEX.lock();
// convert the c_strs to rust strs; if it works, we get a &str. If it doesn't,
// we get an error. format error to make a String, store that in a mutable to prevent drop,
// and return a ref to the String for display. amusingly the error contains the
// debug representation of the string that couldn't be converted. ^_^
// TODO: when I am smarter, do this better or make it into a utility function.
let mut cerr = String::new();
let category = CStr::from_ptr(category).to_str().unwrap_or_else(|e| {
cerr = format!("{:?} [conversion error: {}]", CStr::from_ptr(category), e);
&cerr
});
let mut merr = String::new();
let message = CStr::from_ptr(message).to_str().unwrap_or_else(|e| {
merr = format!("{:?} [conversion error: {}]", CStr::from_ptr(message), e);
&merr
});
if prefix == "" {
write_log_file(&format!("[{}]: {}", category, message));
} else {
write_log_file(&format!("[{}:{}]: {}", prefix, category, message));
};
}
#[allow(unused)]
#[no_mangle]
pub unsafe extern "stdcall" fn LogInfo(category: *const c_char, message: *const c_char) -> () {
loggit("", category, message);
}
#[allow(unused)]
#[no_mangle]
pub unsafe extern "stdcall" fn LogWarn(category: *const c_char, message: *const c_char) -> () {
loggit("WARN", category, message);
}<|fim▁hole|>#[no_mangle]
pub unsafe extern "stdcall" fn LogError(category: *const c_char, message: *const c_char) -> () {
loggit("ERROR", category, message);
}
#[allow(unused)]
#[no_mangle]
pub unsafe extern "stdcall" fn SaveTexture(index: i32, filepath: *const u16) -> bool {
match d3dx::save_texture(index, filepath) {
Ok(_) => true,
Err(e) => {
write_log_file(&format!("failed to save texture: {:?}", e));
false
}
}
}
#[allow(unused)]
#[no_mangle]
pub unsafe extern "stdcall" fn OnInitialized(
callbacks: *mut ManagedCallbacks,
global_state_pointer: u64,
) -> i32 {
use std::ffi::CStr;
use std::ffi::CString;
let on_init_error_code = 666;
// reinit global state pointer. technically we only really need to do this for the
// tests, where we can have multiple copies of globals (see rt.sh for details).
write_log_file(&format!(
"OnInitialized called with global state address: {}",
global_state_pointer
));
let local_gs_addr = global_state::get_global_state_ptr() as u64;
if global_state_pointer != local_gs_addr {
write_log_file(&format!(
"WARNING: OnInitialized's global state address {:x} differs from input param {:x}",
local_gs_addr, global_state_pointer
));
}
let global_hookstate = global_state_pointer as *mut HookState;
if global_hookstate == std::ptr::null_mut() {
write_log_file("error: global state pointer is null");
return 666;
}
if callbacks == std::ptr::null_mut() {
write_log_file("error: no callbacks specified");
return 666;
}
let mmpath = match util::get_mm_conf_info() {
Ok((true, Some(mmpath))) => mmpath,
Ok((a, b)) => {
write_log_file(&format!("Unexpected conf return: {:?} {:?}", a, b));
return on_init_error_code;
}
Err(e) => {
write_log_file(&format!("Unexpected conf error value: {:?}", e));
return on_init_error_code;
}
};
// get module path (exe that has loaded this dll).
let exemodule = match util::get_module_name() {
Err(e) => {
write_log_file(&format!(
"Unexpected error getting module handle name: {:?}",
e
));
return on_init_error_code;
}
Ok(s) => s,
};
let mut mmpath = util::to_wide_str(&mmpath);
let mut exemodule = util::to_wide_str(&exemodule);
let cd = ((*callbacks).SetPaths)(mmpath.as_mut_ptr(), exemodule.as_mut_ptr());
if cd == std::ptr::null_mut() {
write_log_file(&format!(
"error calling setpaths, returned conf data is null"
));
return on_init_error_code;
}
let is = InteropState {
callbacks: (*callbacks),
conf_data: (*cd),
loading_mods: false,
done_loading_mods: false,
};
(*global_hookstate).interop_state = Some(is);
0
}<|fim▁end|> |
#[allow(unused)] |
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>#-*- coding: utf-8 -*-
"""
---------------------------------------------------------------------------
OpenVolunteer
Copyright 2009, Ludovic Rivallain
---------------------------------------------------------------------------
This file is part of OpenVolunteer.
OpenVolunteer is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenVolunteer is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with OpenVolunteer. If not, see <http://www.gnu.org/licenses/>.<|fim▁hole|> ---------------------------------------------------------------------------
"""
from django import forms
from models import PRESENCE_CHOICES
class VolunteerForm(forms.Form):
name = forms.CharField(max_length=100)
firstname = forms.CharField(max_length=100)
email = forms.EmailField(required=False)
phone_home = forms.CharField(required=False, max_length=20)
phone_mobile = forms.CharField(required=False, max_length=20)
address = forms.CharField(required=False, widget=forms.Textarea)
birth_place = forms.CharField(required=False, max_length=100)
ca_member = forms.BooleanField(required=False)
comments = forms.CharField(required=False, widget=forms.Textarea)
avatar = forms.ImageField(required=False)
delete_avatar = forms.BooleanField(required=False)
class EventForm(forms.Form):
title = forms.CharField(max_length=100)
place = forms.CharField(required=False, max_length=100)
affiche = forms.ImageField(required=False)
delete_affiche = forms.BooleanField(required=False)
class JobForm(forms.Form):
title = forms.CharField(max_length=100)
description = forms.CharField(required=False, widget=forms.Textarea)
class AnswerForm(forms.Form):
presence = forms.ChoiceField(choices=PRESENCE_CHOICES)
comments = forms.CharField(required=False, widget=forms.Textarea)<|fim▁end|> | |
<|file_name|>rpc_txoutproof.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright (c) 2014-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test gettxoutproof and verifytxoutproof RPCs."""
from test_framework.messages import CMerkleBlock, FromHex, ToHex
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error
from test_framework.wallet import MiniWallet
class MerkleBlockTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
self.extra_args = [
[],
["-txindex"],
]
def run_test(self):
miniwallet = MiniWallet(self.nodes[0])<|fim▁hole|> # Add enough mature utxos to the wallet, so that all txs spend confirmed coins
miniwallet.generate(5)
self.nodes[0].generate(100)
self.sync_all()
chain_height = self.nodes[1].getblockcount()
assert_equal(chain_height, 105)
txid1 = miniwallet.send_self_transfer(from_node=self.nodes[0])['txid']
txid2 = miniwallet.send_self_transfer(from_node=self.nodes[0])['txid']
# This will raise an exception because the transaction is not yet in a block
assert_raises_rpc_error(-5, "Transaction not yet in block", self.nodes[0].gettxoutproof, [txid1])
self.nodes[0].generate(1)
blockhash = self.nodes[0].getblockhash(chain_height + 1)
self.sync_all()
txlist = []
blocktxn = self.nodes[0].getblock(blockhash, True)["tx"]
txlist.append(blocktxn[1])
txlist.append(blocktxn[2])
assert_equal(self.nodes[0].verifytxoutproof(self.nodes[0].gettxoutproof([txid1])), [txid1])
assert_equal(self.nodes[0].verifytxoutproof(self.nodes[0].gettxoutproof([txid1, txid2])), txlist)
assert_equal(self.nodes[0].verifytxoutproof(self.nodes[0].gettxoutproof([txid1, txid2], blockhash)), txlist)
txin_spent = miniwallet.get_utxo() # Get the change from txid2
tx3 = miniwallet.send_self_transfer(from_node=self.nodes[0], utxo_to_spend=txin_spent)
txid3 = tx3['txid']
self.nodes[0].generate(1)
self.sync_all()
txid_spent = txin_spent["txid"]
txid_unspent = txid1 # Input was change from txid2, so txid1 should be unspent
# Invalid txids
assert_raises_rpc_error(-8, "txid must be of length 64 (not 32, for '00000000000000000000000000000000')", self.nodes[0].gettxoutproof, ["00000000000000000000000000000000"], blockhash)
assert_raises_rpc_error(-8, "txid must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')", self.nodes[0].gettxoutproof, ["ZZZ0000000000000000000000000000000000000000000000000000000000000"], blockhash)
# Invalid blockhashes
assert_raises_rpc_error(-8, "blockhash must be of length 64 (not 32, for '00000000000000000000000000000000')", self.nodes[0].gettxoutproof, [txid_spent], "00000000000000000000000000000000")
assert_raises_rpc_error(-8, "blockhash must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')", self.nodes[0].gettxoutproof, [txid_spent], "ZZZ0000000000000000000000000000000000000000000000000000000000000")
# We can't find the block from a fully-spent tx
assert_raises_rpc_error(-5, "Transaction not yet in block", self.nodes[0].gettxoutproof, [txid_spent])
# We can get the proof if we specify the block
assert_equal(self.nodes[0].verifytxoutproof(self.nodes[0].gettxoutproof([txid_spent], blockhash)), [txid_spent])
# We can't get the proof if we specify a non-existent block
assert_raises_rpc_error(-5, "Block not found", self.nodes[0].gettxoutproof, [txid_spent], "0000000000000000000000000000000000000000000000000000000000000000")
# We can get the proof if the transaction is unspent
assert_equal(self.nodes[0].verifytxoutproof(self.nodes[0].gettxoutproof([txid_unspent])), [txid_unspent])
# We can get the proof if we provide a list of transactions and one of them is unspent. The ordering of the list should not matter.
assert_equal(sorted(self.nodes[0].verifytxoutproof(self.nodes[0].gettxoutproof([txid1, txid2]))), sorted(txlist))
assert_equal(sorted(self.nodes[0].verifytxoutproof(self.nodes[0].gettxoutproof([txid2, txid1]))), sorted(txlist))
# We can always get a proof if we have a -txindex
assert_equal(self.nodes[0].verifytxoutproof(self.nodes[1].gettxoutproof([txid_spent])), [txid_spent])
# We can't get a proof if we specify transactions from different blocks
assert_raises_rpc_error(-5, "Not all transactions found in specified or retrieved block", self.nodes[0].gettxoutproof, [txid1, txid3])
# Test empty list
assert_raises_rpc_error(-5, "Transaction not yet in block", self.nodes[0].gettxoutproof, [])
# Test duplicate txid
assert_raises_rpc_error(-8, 'Invalid parameter, duplicated txid', self.nodes[0].gettxoutproof, [txid1, txid1])
# Now we'll try tweaking a proof.
proof = self.nodes[1].gettxoutproof([txid1, txid2])
assert txid1 in self.nodes[0].verifytxoutproof(proof)
assert txid2 in self.nodes[1].verifytxoutproof(proof)
tweaked_proof = FromHex(CMerkleBlock(), proof)
# Make sure that our serialization/deserialization is working
assert txid1 in self.nodes[0].verifytxoutproof(ToHex(tweaked_proof))
# Check to see if we can go up the merkle tree and pass this off as a
# single-transaction block
tweaked_proof.txn.nTransactions = 1
tweaked_proof.txn.vHash = [tweaked_proof.header.hashMerkleRoot]
tweaked_proof.txn.vBits = [True] + [False]*7
for n in self.nodes:
assert not n.verifytxoutproof(ToHex(tweaked_proof))
# TODO: try more variants, eg transactions at different depths, and
# verify that the proofs are invalid
if __name__ == '__main__':
MerkleBlockTest().main()<|fim▁end|> | |
<|file_name|>issue-7573.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub struct CrateId {
local_path: ~str,
junk: ~str
}
impl CrateId {
fn new(s: &str) -> CrateId {
CrateId {
local_path: s.to_owned(),
junk: ~"wutevs"
}
}
}
pub fn remove_package_from_database() {
let mut lines_to_use: ~[&CrateId] = ~[]; //~ ERROR cannot infer an appropriate lifetime
let push_id = |installed_id: &CrateId| {
lines_to_use.push(installed_id);
};
list_database(push_id);
for l in lines_to_use.iter() {
println!("{}", l.local_path);
}<|fim▁hole|> let stuff = ["foo", "bar"];
for l in stuff.iter() {
f(&CrateId::new(*l));
}
}
pub fn main() {
remove_package_from_database();
}<|fim▁end|> |
}
pub fn list_database(f: |&CrateId|) { |
<|file_name|>0014_auto_20171109_1813.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-09 18:13
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('bot', '0013_auto_20171109_1759'),
]
operations = [
migrations.AlterField(
model_name='alertausuario',
name='chat_id',
field=models.IntegerField(blank=True),
),
migrations.AlterField(
model_name='alertausuario',<|fim▁hole|><|fim▁end|> | name='ultima_actualizacion',
field=models.DateTimeField(default=datetime.datetime(2017, 11, 9, 18, 13, 50, 254179)),
),
] |
<|file_name|>error.rs<|end_file_name|><|fim▁begin|>use std::io::Error as IoError;
use std::error::Error as StdError;
use std::fmt::Display;
use hyper::Error as HyperError;
use serde_json::Error as JsonError;
use serde_json::Value;
use websocket::result::WebSocketError;
#[cfg(feature="voice")]
use opus::Error as OpusError;
/// Discord API `Result` alias type.
pub type Result<T> = ::std::result::Result<T, Error>;
/// Discord API error type.
#[derive(Debug)]
pub enum Error {
/// A `hyper` crate error
Hyper(HyperError),
/// A `serde_json` crate error<|fim▁hole|> Json(JsonError),
/// A `websocket` crate error
WebSocket(WebSocketError),
/// A `std::io` module error
Io(IoError),
/// An error in the Opus library, with the function name and error code
#[cfg(feature="voice")]
Opus(OpusError),
/// A websocket connection was closed, possibly with a message
Closed(Option<u16>, Vec<u8>),
/// A json decoding error, with a description and the offending value
Decode(&'static str, Value),
/// A generic non-success response from the REST API
Status(::hyper::status::StatusCode, Option<Value>),
/// A rate limit error, with how many milliseconds to wait before retrying
RateLimited(u64),
/// A Discord protocol error, with a description
Protocol(&'static str),
/// A miscellaneous error, with a description
Other(&'static str),
}
impl Error {
#[doc(hidden)]
pub fn from_response(response: ::hyper::client::Response) -> Error {
let status = response.status;
let value = ::serde_json::from_reader(response).ok();
if status == ::hyper::status::StatusCode::TooManyRequests {
if let Some(Value::Object(ref map)) = value {
if let Some(delay) = map.get("retry_after").and_then(|v| v.as_u64()) {
return Error::RateLimited(delay)
}
}
}
Error::Status(status, value)
}
}
impl From<IoError> for Error {
fn from(err: IoError) -> Error {
Error::Io(err)
}
}
impl From<HyperError> for Error {
fn from(err: HyperError) -> Error {
Error::Hyper(err)
}
}
impl From<JsonError> for Error {
fn from(err: JsonError) -> Error {
Error::Json(err)
}
}
impl From<WebSocketError> for Error {
fn from(err: WebSocketError) -> Error {
Error::WebSocket(err)
}
}
#[cfg(feature="voice")]
impl From<OpusError> for Error {
fn from(err: OpusError) -> Error {
Error::Opus(err)
}
}
#[cfg(not(feature="voice"))]
impl Display for Error {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
match *self {
Error::Hyper(ref inner) => inner.fmt(f),
Error::Json(ref inner) => inner.fmt(f),
Error::WebSocket(ref inner) => inner.fmt(f),
Error::Io(ref inner) => inner.fmt(f),
//Error::Opus(ref inner) => inner.fmt(f),
_ => f.write_str(self.description()),
}
}
}
#[cfg(feature="voice")]
impl Display for Error {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
match *self {
Error::Hyper(ref inner) => inner.fmt(f),
Error::Json(ref inner) => inner.fmt(f),
Error::WebSocket(ref inner) => inner.fmt(f),
Error::Io(ref inner) => inner.fmt(f),
Error::Opus(ref inner) => inner.fmt(f),
_ => f.write_str(self.description()),
}
}
}
#[cfg(not(feature="voice"))]
impl StdError for Error {
fn description(&self) -> &str {
match *self {
Error::Hyper(ref inner) => inner.description(),
Error::Json(ref inner) => inner.description(),
Error::WebSocket(ref inner) => inner.description(),
Error::Io(ref inner) => inner.description(),
Error::Closed(_, _) => "Connection closed",
Error::Decode(msg, _) => msg,
Error::Status(status, _) => status.canonical_reason().unwrap_or("Unknown bad HTTP status"),
Error::RateLimited(_) => "Rate limited",
Error::Protocol(msg) => msg,
Error::Other(msg) => msg,
}
}
fn cause(&self) -> Option<&StdError> {
match *self {
Error::Hyper(ref inner) => Some(inner),
Error::Json(ref inner) => Some(inner),
Error::WebSocket(ref inner) => Some(inner),
Error::Io(ref inner) => Some(inner),
_ => None,
}
}
}
#[cfg(feature="voice")]
impl StdError for Error {
fn description(&self) -> &str {
match *self {
Error::Hyper(ref inner) => inner.description(),
Error::Json(ref inner) => inner.description(),
Error::WebSocket(ref inner) => inner.description(),
Error::Io(ref inner) => inner.description(),
Error::Opus(ref inner) => inner.description(),
Error::Closed(_, _) => "Connection closed",
Error::Decode(msg, _) => msg,
Error::Status(status, _) => status.canonical_reason().unwrap_or("Unknown bad HTTP status"),
Error::RateLimited(_) => "Rate limited",
Error::Protocol(msg) => msg,
Error::Other(msg) => msg,
}
}
fn cause(&self) -> Option<&StdError> {
match *self {
Error::Hyper(ref inner) => Some(inner),
Error::Json(ref inner) => Some(inner),
Error::WebSocket(ref inner) => Some(inner),
Error::Io(ref inner) => Some(inner),
Error::Opus(ref inner) => Some(inner),
_ => None,
}
}
}<|fim▁end|> | |
<|file_name|>log.py<|end_file_name|><|fim▁begin|>import time
def progress(index, size, for_what='当前进度', step=10):
block_size = int(size / step)
if index % block_size == 0:
crt = int(index / block_size)
print('%s ==> [%d / %d]' % (for_what, crt, step))
def log_time():
def _log_time(func):
# func()
def wrapper(*args, **kwargs):
print("start")
start_time = time.time()
result = func() if len(args) == len(kwargs) == 0 else func(*args, **kwargs)
end_time = time.time()
cost_time = end_time - start_time
print("[%s] cost time -> %s" % (func.__name__, cost_time))<|fim▁hole|> return wrapper
return _log_time
def line(log_str, style='-'):
print(style * 12 + str(log_str) + style * 12)
def block(style="-",w=100,h=5):
for _ in range(h):
print(style*w)<|fim▁end|> | return result
|
<|file_name|>test_user.py<|end_file_name|><|fim▁begin|>from shuttl.tests import testbase
from shuttl.Models.User import User, UserDataTakenException, NoOrganizationException, ToManyOrganizations
from shuttl.Models.organization import Organization
from shuttl.Models.Reseller import Reseller
class UserTestCase(testbase.BaseTest):
def _setUp(self):
self.reseller = Reseller(name ="test4", url="test2.com")
self.reseller.save()
pass
def test_create(self):
organization = Organization(name="Test", reseller=self.reseller)
organization.save()
organization = Organization.Get(name="Test", vendor=self.reseller)
data = dict(organization=organization, username="Tester", email="Test@tesi.com", password="Things")
user = User.Create(**data)
self.assertRaises(UserDataTakenException, User.Create, **data)
user2 = User.query.get(user.id)
self.assertEqual(user2.username, user.username)
self.assertEqual(user2, user)
self.assertEqual(user2.password, user.password)
self.assertNotEqual(user2.password, "Things")
self.assertFalse(user.isAdmin)
self.assertFalse(user.isFree)
self.assertFalse(user.isActive)
self.assertFalse(user.is_active)
self.assertFalse(user.is_active)
self.assertIsNotNone(user2.organization)<|fim▁hole|> self.assertRaises(NoOrganizationException, user.save)
pass
def test_password(self):
org = Organization.Create(name="Test", reseller=self.reseller)
usr = User.Create(organization=org, username="Tester", email="blah@blah.com", password="Bullshit")
oldPW = usr.password
self.assertNotEqual(usr.password, "Bullshit")
self.assertTrue(usr.checkPassword("Bullshit"))
usr.setPassword("Things")
self.assertNotEqual(usr.password, oldPW)
self.assertTrue(usr.checkPassword("Things"))
pass<|fim▁end|> | user.organization = None |
<|file_name|>038.py<|end_file_name|><|fim▁begin|>def isPandigital(strN):
Ln = [c for c in strN]
Ln.sort()
if Ln == ['1', '2', '3', '4', '5', '6', '7', '8', '9']:
return True
return False
listPans=[]
listMultiplier=[]
#dont know if these limits are ok
i=9
while i<9999:
n=2<|fim▁hole|> strProd=''
while j<n:
prod=j*i
strProd += str(prod)
j+=1
if len(strProd)>9:
break
#print i, n, strProd
if isPandigital(strProd):
listPans.append(prod)
listMultiplier.append(i)
print "Pandigital", i, j, strProd
n+=1
i+=1
#output:
#Pandigital 9 6 918273645
#Pandigital 192 4 192384576
#Pandigital 219 4 219438657
#Pandigital 273 4 273546819
#Pandigital 327 4 327654981
#Pandigital 6729 3 672913458
#Pandigital 6792 3 679213584
#Pandigital 6927 3 692713854
#Pandigital 7269 3 726914538
#Pandigital 7293 3 729314586
#Pandigital 7329 3 732914658
#Pandigital 7692 3 769215384
#Pandigital 7923 3 792315846
#Pandigital 7932 3 793215864
#Pandigital 9267 3 926718534
#Pandigital 9273 3 927318546
#Pandigital 9327 3 932718654<|fim▁end|> | while n<999:
j=1 |
<|file_name|>Ecs20140526DeleteSnapshotRequest.py<|end_file_name|><|fim▁begin|>'''
Created by auto_sdk on 2015.04.21
<|fim▁hole|> def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.SnapshotId = None
def getapiname(self):
return 'ecs.aliyuncs.com.DeleteSnapshot.2014-05-26'<|fim▁end|> | '''
from aliyun.api.base import RestApi
class Ecs20140526DeleteSnapshotRequest(RestApi):
|
<|file_name|>testRoom.py<|end_file_name|><|fim▁begin|>import unittest
import json
import sys
import os.path
import websocket
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
from app.constants import Message
class TestServer(unittest.TestCase):
"""
This is not a unit test.
Simple client to test basic functionality of the server
"""
def setUp(self):
self.ws = websocket.create_connection("ws://127.0.0.1:8000/ws")
def testOnOpenServerSendsCard(self):
res = self.ws.recv()
print "\nCard on connection", res
self.assertIn("cardReceived", res)
def testServerSendsCard(self):
print "\nSending {0} ServerSendsCard".format(Message.GetCard)
self.ws.send(json.dumps({'type':Message.GetCard}))
self.ws.recv()
res = self.ws.recv()
print res
self.assertIn("Stormtrooper", res)
def testServerSendsListRooms(self):
print "\nSending {0} ListOfRooms".format(Message.GetListOfRoom)
self.ws.send(json.dumps({'type':Message.GetListOfRoom}))
self.ws.recv()
res = self.ws.recv()
print res
self.assertIn(str(Message.ListOfRooms), res)
def testServerCreatesAndSendsRoom(self):
print "\nSending {0} CreateRoom".format(Message.CreateRoom)
self.ws.send(json.dumps({'type':Message.CreateRoom, 'name':'My Favorite Room'}))
self.ws.recv()
res = self.ws.recv()
print res
self.assertIn("id", res)
def testErrorWhenUserCreatesSecondRoom(self):
print "\n **************************---***************"
print "\nSending {0} Create 2 room. Room 1".format(Message.CreateRoom)
self.ws.send(json.dumps({'type':Message.CreateRoom, 'name':'First room, Ok'}))
self.ws.recv()
res = self.ws.recv()
print res
print "create second Room"
self.ws.send(json.dumps({'type':Message.CreateRoom, 'name':'SecondRoom room, WRONG'}))
res2 = self.ws.recv()
print res2
self.assertIn(str(Message.Error), res2)
def test_destroy_room(self):
print "\n **************************---***************"
self.ws.recv()
print "\nSending {0} CreateRoom".format(Message.CreateRoom)
self.ws.send(json.dumps({'type':Message.CreateRoom, 'name':'This room will be destroyed'}))
print "Recv: {0}".format(self.ws.recv())
#get list of rooms
self.ws.send(json.dumps({'type':Message.GetListOfRoom}))
print "list of rooms {0}".format(self.ws.recv())
print "Sending {0} DestroyRoom".format(Message.DestroyRoom)
self.ws.send(json.dumps({'type':Message.DestroyRoom}))
res = self.ws.recv()
print("\nReceive: {0}".format(res))
#get list of rooms
self.ws.send(json.dumps({'type':Message.GetListOfRoom}))<|fim▁hole|> self.assertIn(str(Message.SUCCESS), res)
# def test_client_can_send_chat_messages(self):
# print "\n chat *****************************************"
# self.ws.recv()
# self.ws.send(json.dumps({'type':Message.ListOfRooms}))
# res = self.ws.recv()
# roomId = json.loads(res)['id']
# self.ws.send(json.dumps({'type':Message.ConnectToRoom, 'id':roomId}))
# _= self.ws.recv()
#
# self.assertIn("id", res)
#
def tearDown(self):
self.ws.close()
if __name__ == "__main__":
unittest.main()<|fim▁end|> | print "list of rooms {0}".format(self.ws.recv()) |
<|file_name|>RainScattered.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | export { RainScattered as default } from "./"; |
<|file_name|>utils.rs<|end_file_name|><|fim▁begin|>use std::fmt;
use std::path::{Path, PathBuf};
use std::fs::{self, File};
use rustc_serialize::{Encodable, Encoder};
use url::Url;
use git2::{self, ObjectType};
use core::GitReference;
use util::{CargoResult, ChainError, human, ToUrl, internal};
#[derive(PartialEq, Clone, Debug)]
pub struct GitRevision(git2::Oid);
impl fmt::Display for GitRevision {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&self.0, f)
}
}
/// GitRemote represents a remote repository. It gets cloned into a local
/// GitDatabase.
#[derive(PartialEq,Clone,Debug)]
pub struct GitRemote {
url: Url,
}
#[derive(PartialEq,Clone,RustcEncodable)]
struct EncodableGitRemote {
url: String,
}
impl Encodable for GitRemote {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
EncodableGitRemote {
url: self.url.to_string()
}.encode(s)
}
}
/// GitDatabase is a local clone of a remote repository's database. Multiple
/// GitCheckouts can be cloned from this GitDatabase.
pub struct GitDatabase {
remote: GitRemote,
path: PathBuf,
repo: git2::Repository,
}
#[derive(RustcEncodable)]
pub struct EncodableGitDatabase {
remote: GitRemote,
path: String,
}
impl Encodable for GitDatabase {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
EncodableGitDatabase {
remote: self.remote.clone(),
path: self.path.display().to_string()
}.encode(s)
}
}
/// GitCheckout is a local checkout of a particular revision. Calling
/// `clone_into` with a reference will resolve the reference into a revision,
/// and return a CargoError if no revision for that reference was found.
pub struct GitCheckout<'a> {
database: &'a GitDatabase,
location: PathBuf,
revision: GitRevision,
repo: git2::Repository,
}
#[derive(RustcEncodable)]
pub struct EncodableGitCheckout {
database: EncodableGitDatabase,
location: String,
revision: String,
}
impl<'a> Encodable for GitCheckout<'a> {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
EncodableGitCheckout {
location: self.location.display().to_string(),
revision: self.revision.to_string(),
database: EncodableGitDatabase {
remote: self.database.remote.clone(),
path: self.database.path.display().to_string(),
},
}.encode(s)
}
}
// Implementations
impl GitRemote {
pub fn new(url: &Url) -> GitRemote {
GitRemote { url: url.clone() }
}
pub fn url(&self) -> &Url {
&self.url
}
pub fn rev_for(&self, path: &Path, reference: &GitReference)
-> CargoResult<GitRevision> {
let db = try!(self.db_at(path));
db.rev_for(reference)
}
pub fn checkout(&self, into: &Path) -> CargoResult<GitDatabase> {
let repo = match git2::Repository::open(into) {
Ok(repo) => {
try!(self.fetch_into(&repo).chain_error(|| {
human(format!("failed to fetch into {}", into.display()))
}));
repo
}
Err(..) => {
try!(self.clone_into(into).chain_error(|| {
human(format!("failed to clone into: {}", into.display()))
}))
}
};
Ok(GitDatabase {
remote: self.clone(),
path: into.to_path_buf(),
repo: repo,
})
}
pub fn db_at(&self, db_path: &Path) -> CargoResult<GitDatabase> {
let repo = try!(git2::Repository::open(db_path));
Ok(GitDatabase {
remote: self.clone(),
path: db_path.to_path_buf(),
repo: repo,
})
}
fn fetch_into(&self, dst: &git2::Repository) -> CargoResult<()> {
// Create a local anonymous remote in the repository to fetch the url
let url = self.url.to_string();
let refspec = "refs/heads/*:refs/heads/*";
fetch(dst, &url, refspec)
}
fn clone_into(&self, dst: &Path) -> CargoResult<git2::Repository> {
let url = self.url.to_string();
if fs::metadata(&dst).is_ok() {
try!(fs::remove_dir_all(dst));
}
try!(fs::create_dir_all(dst));
let repo = try!(git2::Repository::init_bare(dst));
try!(fetch(&repo, &url, "refs/heads/*:refs/heads/*"));
Ok(repo)
}
}
impl GitDatabase {
fn path(&self) -> &Path {
&self.path
}
pub fn copy_to(&self, rev: GitRevision, dest: &Path)
-> CargoResult<GitCheckout> {
let checkout = match git2::Repository::open(dest) {
Ok(repo) => {
let checkout = GitCheckout::new(dest, self, rev, repo);
if !checkout.is_fresh() {
try!(checkout.fetch());
try!(checkout.reset());
assert!(checkout.is_fresh());
}
checkout
}
Err(..) => try!(GitCheckout::clone_into(dest, self, rev)),
};
try!(checkout.update_submodules().chain_error(|| {
internal("failed to update submodules")
}));
Ok(checkout)
}
pub fn rev_for(&self, reference: &GitReference) -> CargoResult<GitRevision> {
let id = match *reference {
GitReference::Tag(ref s) => {
try!((|| {
let refname = format!("refs/tags/{}", s);
let id = try!(self.repo.refname_to_id(&refname));
let obj = try!(self.repo.find_object(id, None));
let obj = try!(obj.peel(ObjectType::Commit));
Ok(obj.id())
}).chain_error(|| {
human(format!("failed to find tag `{}`", s))
}))
}
GitReference::Branch(ref s) => {
try!((|| {
let b = try!(self.repo.find_branch(s, git2::BranchType::Local));
b.get().target().chain_error(|| {
human(format!("branch `{}` did not have a target", s))
})
}).chain_error(|| {
human(format!("failed to find branch `{}`", s))
}))
}
GitReference::Rev(ref s) => {
let obj = try!(self.repo.revparse_single(s));
obj.id()
}
};
Ok(GitRevision(id))
}
pub fn has_ref(&self, reference: &str) -> CargoResult<()> {
try!(self.repo.revparse_single(reference));
Ok(())
}
}
impl<'a> GitCheckout<'a> {
fn new(path: &Path, database: &'a GitDatabase, revision: GitRevision,
repo: git2::Repository)
-> GitCheckout<'a>
{
GitCheckout {
location: path.to_path_buf(),
database: database,
revision: revision,
repo: repo,
}
}
fn clone_into(into: &Path, database: &'a GitDatabase,
revision: GitRevision)
-> CargoResult<GitCheckout<'a>>
{
let repo = try!(GitCheckout::clone_repo(database.path(), into));
let checkout = GitCheckout::new(into, database, revision, repo);
try!(checkout.reset());
Ok(checkout)
}
fn clone_repo(source: &Path, into: &Path) -> CargoResult<git2::Repository> {
let dirname = into.parent().unwrap();
try!(fs::create_dir_all(&dirname).chain_error(|| {
human(format!("Couldn't mkdir {}", dirname.display()))
}));
if fs::metadata(&into).is_ok() {
try!(fs::remove_dir_all(into).chain_error(|| {
human(format!("Couldn't rmdir {}", into.display()))
}));
}
let url = try!(source.to_url().map_err(human));
let url = url.to_string();
let repo = try!(git2::Repository::clone(&url, into).chain_error(|| {
internal(format!("failed to clone {} into {}", source.display(),
into.display()))
}));
Ok(repo)
}
fn is_fresh(&self) -> bool {
match self.repo.revparse_single("HEAD") {
Ok(ref head) if head.id() == self.revision.0 => {
// See comments in reset() for why we check this
fs::metadata(self.location.join(".cargo-ok")).is_ok()
}
_ => false,
}
}
fn fetch(&self) -> CargoResult<()> {
info!("fetch {}", self.repo.path().display());
let url = try!(self.database.path.to_url().map_err(human));
let url = url.to_string();
let refspec = "refs/heads/*:refs/heads/*";
try!(fetch(&self.repo, &url, refspec));
Ok(())
}
fn reset(&self) -> CargoResult<()> {
// If we're interrupted while performing this reset (e.g. we die because
// of a signal) Cargo needs to be sure to try to check out this repo
// again on the next go-round.
//
// To enable this we have a dummy file in our checkout, .cargo-ok, which
// if present means that the repo has been successfully reset and is
// ready to go. Hence if we start to do a reset, we make sure this file
// *doesn't* exist, and then once we're done we create the file.
let ok_file = self.location.join(".cargo-ok");
let _ = fs::remove_file(&ok_file);
info!("reset {} to {}", self.repo.path().display(), self.revision);
let object = try!(self.repo.find_object(self.revision.0, None));
try!(self.repo.reset(&object, git2::ResetType::Hard, None));
try!(File::create(ok_file));
Ok(())
}
fn update_submodules(&self) -> CargoResult<()> {
return update_submodules(&self.repo);
fn update_submodules(repo: &git2::Repository) -> CargoResult<()> {
info!("update submodules for: {:?}", repo.workdir().unwrap());
for mut child in try!(repo.submodules()).into_iter() {
try!(child.init(false));
let url = try!(child.url().chain_error(|| {
internal("non-utf8 url for submodule")
}));
// A submodule which is listed in .gitmodules but not actually
// checked out will not have a head id, so we should ignore it.
let head = match child.head_id() {
Some(head) => head,
None => continue,
};
// If the submodule hasn't been checked out yet, we need to
// clone it. If it has been checked out and the head is the same
// as the submodule's head, then we can bail out and go to the
// next submodule.
let head_and_repo = child.open().and_then(|repo| {
let target = try!(repo.head()).target();
Ok((target, repo))
});
let repo = match head_and_repo {
Ok((head, repo)) => {
if child.head_id() == head {
continue
}
repo
}
Err(..) => {
let path = repo.workdir().unwrap().join(child.path());
try!(git2::Repository::clone(url, &path))
}
};
// Fetch data from origin and reset to the head commit
let refspec = "refs/heads/*:refs/heads/*";
try!(fetch(&repo, url, refspec).chain_error(|| {
internal(format!("failed to fetch submodule `{}` from {}",
child.name().unwrap_or(""), url))
}));
let obj = try!(repo.find_object(head, None));
try!(repo.reset(&obj, git2::ResetType::Hard, None));
try!(update_submodules(&repo));
}
Ok(())
}
}
}
fn with_authentication<T, F>(url: &str, cfg: &git2::Config, mut f: F)
-> CargoResult<T>
where F: FnMut(&mut git2::Credentials) -> CargoResult<T>
{
// Prepare the authentication callbacks.
//
// We check the `allowed` types of credentials, and we try to do as much as
// possible based on that:
//
// * Prioritize SSH keys from the local ssh agent as they're likely the most
// reliable. The username here is prioritized from the credential
// callback, then from whatever is configured in git itself, and finally
// we fall back to the generic user of `git`.
//
// * If a username/password is allowed, then we fallback to git2-rs's
// implementation of the credential helper. This is what is configured
// with `credential.helper` in git, and is the interface for the OSX
// keychain, for example.
//
// * After the above two have failed, we just kinda grapple attempting to
// return *something*.
//
// Note that we keep track of the number of times we've called this callback
// because libgit2 will repeatedly give us credentials until we give it a
// reason to not do so. If we've been called once and our credentials failed
// then we'll be called again, and in this case we assume that the reason
// was because the credentials were wrong.
let mut cred_helper = git2::CredentialHelper::new(url);
cred_helper.config(cfg);
let mut called = 0;
let res = f(&mut |url, username, allowed| {
called += 1;
if called >= 2 {
return Err(git2::Error::from_str("no authentication available"))
}
if allowed.contains(git2::SSH_KEY) ||
allowed.contains(git2::USERNAME) {
let user = username.map(|s| s.to_string())
.or_else(|| cred_helper.username.clone())
.unwrap_or("git".to_string());
if allowed.contains(git2::USERNAME) {
git2::Cred::username(&user)
} else {
git2::Cred::ssh_key_from_agent(&user)
}
} else if allowed.contains(git2::USER_PASS_PLAINTEXT) {
git2::Cred::credential_helper(cfg, url, username)
} else if allowed.contains(git2::DEFAULT) {
git2::Cred::default()
} else {
Err(git2::Error::from_str("no authentication available"))
}
});
if called > 0 {
res.chain_error(|| {
human("failed to authenticate when downloading repository")
})
} else {
res
}
}
pub fn fetch(repo: &git2::Repository, url: &str,
refspec: &str) -> CargoResult<()> {
// Create a local anonymous remote in the repository to fetch the url
with_authentication(url, &try!(repo.config()), |f| {<|fim▁hole|> let mut cb = git2::RemoteCallbacks::new();
cb.credentials(f);
let mut remote = try!(repo.remote_anonymous(&url));
let mut opts = git2::FetchOptions::new();
opts.remote_callbacks(cb)
.download_tags(git2::AutotagOption::All);
try!(remote.fetch(&[refspec], Some(&mut opts), None));
Ok(())
})
}<|fim▁end|> | |
<|file_name|>pygsf.py<|end_file_name|><|fim▁begin|>#name: pygsf
#created: July 2017
#by: p.kennedy@fugro.com
#description: python module to read and write a Generic Sensor Formaty (GSF) file natively
#notes: See main at end of script for example how to use this
#based on GSF Version 3.05
# See readme.md for more details
import sys
from glob import glob
import argparse
import os.path
import struct
import pprint
import time
import datetime
import math
import random
from datetime import datetime
from datetime import timedelta
from statistics import mean
import mmap
# for testing only...
# import matplotlib.pyplot as plt
import numpy as np
#/* The high order 4 bits are used to define the field size for this array */
GSF_FIELD_SIZE_DEFAULT = 0x00 #/* Default values for field size are used used for all beam arrays */
GSF_FIELD_SIZE_ONE = 0x10 #/* value saved as a one byte value after applying scale and offset */
GSF_FIELD_SIZE_TWO = 0x20 #/* value saved as a two byte value after applying scale and offset */
GSF_FIELD_SIZE_FOUR = 0x40 #/* value saved as a four byte value after applying scale and offset */
GSF_MAX_PING_ARRAY_SUBRECORDS = 26
# Record Decriptions (See page 82)
HEADER = 1
SWATH_BATHYMETRY = 2
SOUND_VELOCITY_PROFILE = 3
PROCESSING_PARAMETERS = 4
SENSOR_PARAMETERS = 5
COMMENT = 6
HISTORY = 7
NAVIGATION_ERROR = 8
SWATH_BATHY_SUMMARY = 9
SINGLE_BEAM_SOUNDING = 10
HV_NAVIGATION_ERROR = 11
ATTITUDE = 12
SNIPPET_NONE = 0 # extract the mean value from the snippet array
SNIPPET_MEAN = 1 # extract the mean value from the snippet array
SNIPPET_MAX = 2 # extract the maximum value from the snippet array
SNIPPET_DETECT = 3 # extract the bottom detect snippet value from the snippet array
SNIPPET_MEAN5DB = 4 # extract the mean of all snippets within 5dB of the mean
# the various frequencies we support in the R2Sonic multispectral files
ARCIdx = {100000: 0, 200000: 1, 400000: 2}
# the rejection flags used by this software
REJECT_CLIP = -1
REJECT_RANGE= -2
REJECT_INTENSITY= -4
###############################################################################
def main():
parser = argparse.ArgumentParser(description='Read GSF file and create a reflectivity image.')
parser.add_argument('-i', dest='inputFile', action='store', help='Input ALL filename to image. It can also be a wildcard, e.g. *.gsf')
if len(sys.argv)==1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
print ("processing with settings: ", args)
for filename in glob(args.inputFile):
if not filename.endswith('.gsf'):
print ("File %s is not a .all file, skipping..." % (filename))
continue
if not os.path.isfile(filename):
print ("file not found:", filename)
exit()
# testR2SonicAdjustment()
testreader(filename)
# conditioner()
###############################################################################
def testreader(filename):
'''
sample read script so we can see how to use the code
'''
start_time = time.time() # time the process so we can keep it quick
# filename = "C:/projects/multispectral/PatriciaBasin/20161130-1907 - 0001-2026_1.gsf"
# filename = "C:/development/python/sample_subset.gsf"
# filename = "F:/Projects/multispectral/_BedfordBasin2016/20160331 - 125110 - 0001-2026_1.gsf"
# filename = "F:/Projects/multispectral/_Newbex/20170524-134208 - 0001-2026_1.gsf"
# filename = "F:/Projects/multispectral/_BedfordBasin2017/20170502 - 131750 - 0001-2026_1.gsf"
# filename = "C:/projects/multispectral/_BedfordBasin2017/20170502 - 150058 - 0001-2026_1.gsf"
print (filename)
pingcount = 0
# create a GSFREADER class and pass the filename
r = GSFREADER(filename)
# r.loadnavigation()
# f1 = plt.figure()
# # f2 = plt.figure()
# # f3 = plt.figure()
# ax1 = f1.add_subplot(111)
# # ax2 = f2.add_subplot(111)
# # ax3 = f3.add_subplot(111)
print ("pingcount, pingnumber, 100kHz, 200kHz, 400kHz")
while r.moreData():
# read a datagram. If we support it, return the datagram type and aclass for that datagram
# The user then needs to call the read() method for the class to undertake a fileread and binary decode. This keeps the read super quick.
numberofbytes, recordidentifier, datagram = r.readDatagram()
# print(datagram)
if recordidentifier == SWATH_BATHYMETRY:
print(recordidentifier, end=',')
datagram.read()
datagram.snippettype = SNIPPET_NONE
# print ("%s Lat:%.3f Lon:%.3f Ping:%d Freq:%d Serial %s" % (datagram.currentRecordDateTime(), datagram.latitude, datagram.longitude, datagram.pingnumber, datagram.frequency, datagram.serialnumber))
# for cross profile plotting
# bs = []
# for s in datagram.MEAN_REL_AMPLITUDE_ARRAY:
# if s != 0:
# bs.append(20 * math.log10(s) - 100)
# else:
# bs.append(0)
# bs = [20 * math.log10(s) - 100 for s in datagram.MEAN_REL_AMPLITUDE_ARRAY]
samplearray = datagram.R2Soniccorrection()
if datagram.frequency == 100000:
freq100 = mean(samplearray)
if datagram.frequency == 200000:
freq200 = mean(samplearray)
if datagram.frequency == 400000:
freq400 = mean(samplearray)
# print ("%d,%d,%.3f,%.3f,%.3f" %(pingcount, datagram.pingnumber, freq100, freq200, freq400))
# print ("%d" %(pingcount))
pingcount += 1
# if len(bs) > 0:
# plt.plot(datagram.BEAM_ANGLE_ARRAY, bs, linewidth=0.25, color='blue')
# plt.ylim([-60,-5])
# plt.xlim([-60,60])
# # ax3.plot(datagram.BEAM_ANGLE_ARRAY, datagram.ALONG_TRACK_ARRAY)
# plt.pause(0.001)
# datagram.clippolar(-60, 60)
# print("Duration %.3fs" % (time.time() - start_time )) # time the process
# print ("PingCount:", pingcount)
return
###############################################################################
class UNKNOWN_RECORD:
'''used as a convenience tool for datagrams we have no bespoke classes. Better to make a bespoke class'''
def __init__(self, fileptr, numbytes, recordidentifier, hdrlen):
self.recordidentifier = recordidentifier
self.offset = fileptr.tell()
self.hdrlen = hdrlen
self.numbytes = numbytes
self.fileptr = fileptr
self.fileptr.seek(numbytes, 1) # set the file ptr to the end of the record
self.data = ""
self.name = "unknown"
def read(self):
self.data = self.fileptr.read(self.numberofbytes)
def __str__(self):
'''
pretty print this class
'''
return pprint.pformat(vars(self))
class SCALEFACTOR:
def __init__(self):
self.subrecordID = 0
self.compressionFlag = 0 #/* Specifies bytes of storage in high order nibble and type of compression in low order nibble */
self.multiplier = 0.0
self.offset = 0
self.name = "scaleFactor"
def __str__(self):
'''
pretty print this class
'''
return pprint.pformat(vars(self))
class SWATH_BATHYMETRY_PING :
def __init__(self, fileptr, numbytes, recordidentifier, hdrlen):
self.recordidentifier = recordidentifier # assign the GSF code for this datagram type
self.offset = fileptr.tell() # remember where this packet resides in the file so we can return if needed
self.hdrlen = hdrlen # remember the header length. it should be 8 bytes, bout if checksum then it is 12
self.numbytes = numbytes # remember how many bytes this packet contains
self.fileptr = fileptr # remember the file pointer so we do not need to pass from the host process
self.fileptr.seek(numbytes, 1) # move the file pointer to the end of the record so we can skip as the default actions
self.scalefactors = []
self.DEPTH_ARRAY = []
self.ACROSS_TRACK_ARRAY = []
self.ALONG_TRACK_ARRAY = []
self.TRAVEL_TIME_ARRAY = []
self.BEAM_ANGLE_ARRAY = []
self.MEAN_CAL_AMPLITUDE_ARRAY = []
self.MEAN_REL_AMPLITUDE_ARRAY = []
self.QUALITY_FACTOR_ARRAY = []
self.BEAM_FLAGS_ARRAY = []
self.BEAM_ANGLE_FORWARD_ARRAY = []
self.VERTICAL_ERROR_ARRAY = []
self.HORIZONTAL_ERROR_ARRAY = []
self.SECTOR_NUMBER_ARRAY = []
# self.INTENSITY_SERIES_ARRAY = []
self.SNIPPET_SERIES_ARRAY = []
self.perbeam = True
self.snippettype = SNIPPET_MAX
self.numbeams = 0
self.time = 0
self.pingnanotime = 0
self.name = "swath bathy ping"
###############################################################################
def __str__(self):
'''
pretty print this class
'''
return pprint.pformat(vars(self))
###############################################################################
def clippolar(self, leftclipdegrees, rightclipdegrees):
'''sets the processing flags to rejected if the beam angle is beyond the clip parameters'''
if self.numbeams == 0:
return
if len(self.QUALITY_FACTOR_ARRAY) != len(self.TRAVEL_TIME_ARRAY):
return
for i, s in enumerate(self.BEAM_ANGLE_ARRAY):
if (s <= leftclipdegrees) or (s >= rightclipdegrees):
self.QUALITY_FACTOR_ARRAY[i] += REJECT_CLIP
# self.MEAN_REL_AMPLITUDE_ARRAY[i] = 0
# self.ACROSS_TRACK_ARRAY[i] = 0
return
###############################################################################
def cliptwtt(self, minimumtraveltime=0.0):
'''sets the processing flags to rejected if the two way travel time is less than the clip parameters'''
if self.numbeams == 0:
return
if len(self.QUALITY_FACTOR_ARRAY) != len(self.TRAVEL_TIME_ARRAY):
return
for i, s in enumerate(self.TRAVEL_TIME_ARRAY):
if (s <= minimumtraveltime):
self.QUALITY_FACTOR_ARRAY[i] += REJECT_RANGE
return
###############################################################################
def clipintensity(self, minimumintenisty=0.0):
'''sets the processing flags to rejected if the two way travel time is less than the clip parameters'''
if self.numbeams == 0:
return
if len(self.QUALITY_FACTOR_ARRAY) != len(self.TRAVEL_TIME_ARRAY):
return
for i, s in enumerate(self.MEAN_REL_AMPLITUDE_ARRAY):
if (s <= minimumintenisty):
self.QUALITY_FACTOR_ARRAY[i] += REJECT_INTENSITY
return
###############################################################################
def read(self, headeronly=False):
self.fileptr.seek(self.offset + self.hdrlen, 0) # move the file pointer to the start of the record so we can read from disc
# read ping header
hdrfmt = '>llll5hlH3h2Hlllh'
hdrlen = struct.calcsize(hdrfmt)
rec_unpack = struct.Struct(hdrfmt).unpack
self.fileptr.seek(self.offset + self.hdrlen , 0) # move the file pointer to the start of the record so we can read from disc
data = self.fileptr.read(hdrlen)
s = rec_unpack(data)
self.time = s[0]
self.longitude = s[2] / 10000000
self.latitude = s[3] / 10000000
self.numbeams = s[4]
self.centrebeam = s[5]
self.pingflags = s[6]
self.reserved = s[7]
self.tidecorrector = s[8] / 100
self.depthcorrector = s[9] / 100
self.heading = s[10] / 100
self.pitch = s[11] / 100
self.roll = s[12] / 100
self.heave = s[13] / 100
self.course = s[14] / 100
self.speed = s[15] / 100
self.height = s[16] / 100
self.separation = s[17] / 100
self.gpstidecorrector = s[18] / 100
self.spare = s[19]
while (self.fileptr.tell() < self.offset + self.numbytes): #dont read past the end of the packet length. This should never happen!
fmt = '>l'
fmtlen = struct.calcsize(fmt)
rec_unpack = struct.Struct(fmt).unpack
data = self.fileptr.read(fmtlen) # read the record from disc
s = rec_unpack(data)
subrecord_id = (s[0] & 0xFF000000) >> 24
subrecord_size = s[0] & 0x00FFFFFF
# skip the record for performance reasons. Very handy in some circumstances
if headeronly:
if subrecord_id == 21:
self.fileptr.seek(self.offset + self.numbytes, 0) #move forwards to the end of the record as we cannot trust the record length from the 2024
else:
self.fileptr.seek(subrecord_size, 1) #move forwards to the end of teh record
continue
# now decode the subrecord
# curr = self.fileptr.tell()
scale, offset, compressionFlag, datatype = self.getscalefactor(subrecord_id, subrecord_size / int(self.numbeams))
if subrecord_id == 100:
self.readscalefactors()
elif subrecord_id == 1:
self.readarray(self.DEPTH_ARRAY, scale, offset, datatype)
elif subrecord_id == 2:
self.readarray(self.ACROSS_TRACK_ARRAY, scale, offset, datatype)
elif subrecord_id == 3:
self.readarray(self.ALONG_TRACK_ARRAY, scale, offset, datatype)
elif subrecord_id == 4:
self.readarray(self.TRAVEL_TIME_ARRAY, scale, offset, datatype)
elif subrecord_id == 5:
self.readarray(self.BEAM_ANGLE_ARRAY, scale, offset, datatype)
elif subrecord_id == 6:
self.readarray(self.MEAN_CAL_AMPLITUDE_ARRAY, scale, offset, datatype)
elif subrecord_id == 7:
self.readarray(self.MEAN_REL_AMPLITUDE_ARRAY, scale, offset, datatype)
elif subrecord_id == 9:
self.readarray(self.QUALITY_FACTOR_ARRAY, scale, offset, datatype)
elif subrecord_id == 16:
self.readarray(self.BEAM_FLAGS_ARRAY, scale, offset, datatype)
elif subrecord_id == 18:
self.readarray(self.BEAM_ANGLE_FORWARD_ARRAY, scale, offset, datatype)
elif subrecord_id == 19:
self.readarray(self.VERTICAL_ERROR_ARRAY, scale, offset, datatype)
elif subrecord_id == 20:
self.readarray(self.VERTICAL_ERROR_ARRAY, scale, offset, datatype)
elif subrecord_id == 21:
before = self.fileptr.tell()
self.readintensityarray(self.SNIPPET_SERIES_ARRAY, scale, offset, datatype, self.snippettype)
if subrecord_size % 4 > 0:
self.fileptr.seek(4 - (subrecord_size % 4), 1) #pkpk we should not need this!!!
elif subrecord_id == 22:
self.readarray(self.SECTOR_NUMBER_ARRAY, scale, offset, datatype)
else:
# read to the end of the record to keep in alignment. This permits us to not have all the decodes in place
self.fileptr.seek(subrecord_size, 1) #move forwards to the end of teh record
return
def getscalefactor(self, ID, bytes_per_value):
for s in self.scalefactors:
if s.subrecordID == ID: # DEPTH_ARRAY array
if bytes_per_value == 1:
datatype = 'B' #unsigned values
elif bytes_per_value == 2:
datatype = 'H' #unsigned values
if ID == 2: #ACROSS_TRACK_ARRAY array
datatype = 'h' #unsigned values
if ID == 3: #ACROSS_TRACK_ARRAY array
datatype = 'h' #unsigned values
if ID == 5: #beam angle array
datatype = 'h' #unsigned values
elif bytes_per_value == 4:
datatype = 'L' #unsigned values
if ID == 2: #ACROSS_TRACK_ARRAY array
datatype = 'l' #unsigned values
if ID == 5: #beam angle array
datatype = 'l' #unsigned values
else:
datatype = 'L' #unsigned values not sure about this one. needs test data
return s.multiplier, s.offset, s.compressionFlag, datatype
return 1,0,0, 'h'
def readscalefactors(self):
# /* First four byte integer contains the number of scale factors */
# now read all scale factors
scalefmt = '>l'
scalelen = struct.calcsize(scalefmt)
rec_unpack = struct.Struct(scalefmt).unpack
data = self.fileptr.read(scalelen)
s = rec_unpack(data)
self.numscalefactors = s[0]
scalefmt = '>lll'
scalelen = struct.calcsize(scalefmt)
rec_unpack = struct.Struct(scalefmt).unpack
for i in range(self.numscalefactors):
data = self.fileptr.read(scalelen)
s = rec_unpack(data)
sf = SCALEFACTOR()
sf.subrecordID = (s[0] & 0xFF000000) >> 24;
sf.compressionFlag = (s[0] & 0x00FF0000) >> 16;
sf.multiplier = s[1]
sf.offset = s[2]
self.scalefactors.append(sf)
# print (self.scalefactors)
return
def readintensityarray(self, snippets, scale, offset, datatype, snippettype):
'''
read the time series intensity array type 21 subrecord
'''
hdrfmt = '>bl16s'
hdrlen = struct.calcsize(hdrfmt)
rec_unpack = struct.Struct(hdrfmt).unpack
hdr = self.fileptr.read(hdrlen)
s = rec_unpack(hdr)
bitspersample = s[0]
appliedcorrections = s[1]
# before we decode the intentisty data, read the sensor specific header
#for now just read the r2sonic as that is what we need. For other sensors we need to implement decodes
self.decodeR2SonicImagerySpecific()
for b in range(self.numbeams):
hdrfmt = '>hh8s'
hdrlen = struct.calcsize(hdrfmt)
rec_unpack = struct.Struct(hdrfmt).unpack
hdr = self.fileptr.read(hdrlen)
s = rec_unpack(hdr)
numsamples = s[0]
bottomdetectsamplenumber = s[1]
spare = s[2]
fmt = '>' + str(numsamples) + 'H'
l = struct.calcsize(fmt)
rec_unpack = struct.Struct(fmt).unpack
data = self.fileptr.read(l)
raw = rec_unpack(data)
# strip out zero values
raw = [s for s in raw if s != 0]
if snippettype == SNIPPET_NONE:
snippets.append(0)
continue
elif snippettype == SNIPPET_MEAN5DB:
# populate the array with the mean of all samples withing a 5dB range of the mean. As per QPS
if len(raw) > 0:
raw2 = [20.0 * math.log10(s / scale + offset) for s in raw]
mean = (sum(raw2) / float(len(raw2) ))
highcut = [s for s in raw2 if s < mean + 5] #high cut +5dB
highlowcut = [s for s in highcut if s > mean - 5] #low cut -5dB
else:
snippets.append(0)
continue
if len(highlowcut) > 0:
snippets.append((sum(highlowcut) / float(len(highlowcut) / scale) + offset))
else:
snippets.append((mean / scale) + offset)
elif snippettype == SNIPPET_MEAN:
# populate the array with the mean of all samples
if len(raw) > 0:
snippets.append((sum(raw) / float(len(raw) / scale) + offset))
else:
snippets.append(0)
elif snippettype == SNIPPET_MAX:
# populate the array with the MAX of all samples
if len(raw) > 0:
snippets.append(max(raw) / scale + offset)
else:
snippets.append(0)
elif snippettype == SNIPPET_MEAN:
# populate with a single value as identified by the bottom detect
if bottomdetectsamplenumber > 0:
snippets.append ((raw[bottomdetectsamplenumber] / scale) + offset)
else:
snippets.append (0)
return
###############################################################################
def R2Soniccorrection(self):
'''entry point for r2sonic backscatter TVG, Gain and footprint correction algorithm'''
if self.perbeam:
samplearray = self.MEAN_REL_AMPLITUDE_ARRAY
return samplearray
else:
samplearray = self.SNIPPET_SERIES_ARRAY
return samplearray
# an implementation of the backscatter correction algorithm from Norm Campbell at CSIRO
H0_TxPower = self.transmitsourcelevel
H0_SoundSpeed = self.soundspeed
H0_RxAbsorption = self.absorptioncoefficient
H0_TxBeamWidthVert = self.beamwidthvertical
H0_TxBeamWidthHoriz = self.beamwidthhorizontal
H0_TxPulseWidth = self.pulsewidth
H0_RxSpreading = self.receiverspreadingloss
H0_RxGain = self.receivergain
H0_VTX_Offset = self.vtxoffset
for i in range(self.numbeams):
if self.BEAM_FLAGS_ARRAY[i] < 0:
continue
S1_angle = self.BEAM_ANGLE_ARRAY[i] #angle in degrees
S1_twtt = self.TRAVEL_TIME_ARRAY[i]
S1_range = math.sqrt((self.ACROSS_TRACK_ARRAY[i] ** 2) + (self.ALONG_TRACK_ARRAY[i] ** 2))
if samplearray[i] != 0:
S1_uPa = samplearray[i]
# adjusted = 0
# a test on request from Norm....
# adjusted = 20 * math.log10(S1_uPa) - 100
# the formal adjustment from Norm Campbell...
# if i == 127:
adjusted = self.backscatteradjustment( S1_angle, S1_twtt, S1_range, S1_uPa, H0_TxPower, H0_SoundSpeed, H0_RxAbsorption, H0_TxBeamWidthVert, H0_TxBeamWidthHoriz, H0_TxPulseWidth, H0_RxSpreading, H0_RxGain, H0_VTX_Offset)
samplearray[i] = adjusted
return samplearray
###############################################################################
def backscatteradjustment(self, S1_angle, S1_twtt, S1_range, S1_Magnitude, H0_TxPower, H0_SoundSpeed, H0_RxAbsorption, H0_TxBeamWidthVert, H0_TxBeamWidthHoriz, H0_TxPulseWidth, H0_RxSpreading, H0_RxGain, H0_VTX_Offset):
'''R2Sonic backscatter correction algorithm from Norm Camblell at CSIRO. This is a port from F77 fortran code, and has been tested and confirmed to provide identical results'''
# the following code uses the names for the various packets as listed in the R2Sonic SONIC 2024 Operation Manual v6.0
# so names beginning with
# H0_ denote parameters from the BATHY (BTH) and Snippet (SNI) packets from section H0
# R0_ denote parameters from the BATHY (BTH) packets from section R0
# S1_ denote parameters from the Snippet (SNI) packets from section S1
# names beginning with
# z_ denote values derived from the packet parameters
# the range, z_range_m, can be found from the two-way travel time (and scaling factor), and the sound speed, as follows:
one_rad = 57.29577951308232
S1_angle_rad = S1_angle / one_rad
z_one_way_travel_secs = S1_twtt / 2.0
z_range_m = z_one_way_travel_secs * H0_SoundSpeed
# there is a range of zero, so this is an invalid beam, so quit
if z_range_m == 0:
return 0
###### TRANSMISSION LOSS CORRECTION ##########################################
# according to Lurton, Augustin and Le Bouffant (Femme 2011), the basic Sonar equation is
# received_level = source_level - 2 * transmission_loss + target_strength + receiver_gain
# note that this last term does not always appear explicitly in the sonar equation
# more specifically:
# transmission_loss = H0_RxAbsorption * range_m + 40 log10 ( range_m )
# target_strength = backscatter_dB_m + 10 log10 ( z_area_of_insonification )
# receiver_gain = TVG + H0_RxGain
# the components of the Sonar equation can be calculated as follows:
# u16 S1_Magnitude[S1_Samples]; // [micropascals] = S1_Magnitude[n]
z_received_level = 20.0 * math.log10 ( S1_Magnitude )
z_source_level = H0_TxPower # [dB re 1 uPa at 1 meter]
z_transmission_loss_t1 = 2.0 * H0_RxAbsorption * z_range_m / 1000.0 # [dB per kilometer]
z_transmission_loss_t2 = 40.0 * math.log10(z_range_m)
z_transmission_loss = z_transmission_loss_t1 + z_transmission_loss_t2
###### INSONIFICATION AREA CORRECTION Checked 19 August 2017 p.kennedy@fugr.com ##########################################
# for oblique angles
# area_of_insonification = along_track_beam_width * range * sound_speed * pulse_width / 2 sin ( incidence_angle)
# for normal incidence
# area_of_insonification = along_track_beam_width * across_track_beam_width * range ** 2
sin_S1_angle = math.sin ( abs ( S1_angle_rad ) )
# from Hammerstad 00 EM Technical Note Backscattering and Seabed Image Reflectivity.pdf
# A = ψTψr*R^2 around normal incidence
z_area_of_insonification_nml = H0_TxBeamWidthVert * H0_TxBeamWidthHoriz * z_range_m **2
# A = ½cτ ψTR/sinφ elsewhere
if ( abs ( S1_angle ) >= 0.001 ):
z_area_of_insonification_obl = 0.5 * H0_SoundSpeed * H0_TxPulseWidth * H0_TxBeamWidthVert * z_range_m / sin_S1_angle
if ( abs ( S1_angle ) < 25. ):
z_area_of_insonification = z_area_of_insonification_nml
else:
z_area_of_insonification = z_area_of_insonification_obl
if ( abs ( S1_angle ) < 0.001 ):
z_area_of_insonification = z_area_of_insonification_nml
elif ( z_area_of_insonification_nml < z_area_of_insonification_obl ):
z_area_of_insonification = z_area_of_insonification_nml
else:
z_area_of_insonification = z_area_of_insonification_obl
###### TIME VARIED GAIN CORRECTION 19 August 2017 p.kennedy@fugr.com ##########################################
# note that the first equation refers to the along-track beam width
# the R2Sonic Operation Manual refers on p21 to the Beamwidth - Along Track -- moreover, for the 2024, the Beamwidth Along Track is twice
# the Beamwidth Across Track
# according to the R2Sonic Operation Manual in Section 5.6.3 on p88, the TVG equation is:
# TVG = 2*R* α/1000 + Sp*log(R) + G
# where:
# α = Absorption Loss db/km (H0_RxAbsorption)
# R = Range in metres (range_m)
# Sp = Spreading loss coefficient (H0_RxSpreading)
# G = Gain from Sonar Control setting (H0_RxGain)
TVG_1 = 2.0 * z_range_m * H0_RxAbsorption / 1000.
TVG_2 = H0_RxSpreading * math.log10 ( z_range_m )
TVG = TVG_1 + TVG_2 + H0_RxGain
# as per email from Beaudoin, clip the TVG between 4 and 83 dB
TVG = min(max(4, TVG ), 83)
###### NOW COMPUTE THE CORRECTED BACKSCATTER ##########################################
backscatter_dB_m = z_received_level - z_source_level + z_transmission_loss - (10.0 * math.log10 ( z_area_of_insonification )) - TVG - H0_VTX_Offset + 100.0
return backscatter_dB_m
###############################################################################
def decodeR2SonicImagerySpecific(self):
'''
read the imagery information for the r2sonic 2024
'''
fmt = '>12s12slll lllll llllhh lllll lllhh lllll l32s'
l = struct.calcsize(fmt)
rec_unpack = struct.Struct(fmt).unpack
data = self.fileptr.read(l)
raw = rec_unpack(data)
self.modelnumber = raw[0]
self.serialnumber = raw[1].decode('utf-8').rstrip('\x00')
self.pingtime = raw[2]
self.pingnanotime = raw[3]
self.pingnumber = raw[4]
self.pingperiod = raw[5] / 1.0e6
self.soundspeed = raw[6] / 1.0e2
self.frequency = raw[7] / 1.0e3
self.transmitsourcelevel = raw[8] / 1.0e2
self.pulsewidth = raw[9] / 1.0e7
self.beamwidthvertical = math.radians(raw[10] / 1.0e6)
self.beamwidthhorizontal = math.radians(raw[11] / 1.0e6)
#apply scaling as per email from Beaudoin https://jira.qps.nl/browse/SFM-2857
self.beamwidthvertical = math.radians(raw[10] / 1.0e6 * (400000 / self.frequency))
self.beamwidthhorizontal = math.radians(raw[11] / 1.0e6 * (400000 / self.frequency))
transmitsteeringvertical = raw[12] / 1.0e6
transmitsteeringhorizontal = raw[13] / 1.0e6
transmitinfo = raw[14]
self.vtxoffset = raw[15] / 100
receiverbandwidth = raw[16] / 1.0e4
receiversamplerate = raw[17] / 1.0e3
receiverrange = raw[18] / 1.0e5
# The GSF file preserves R2Sonic's native scaling of their gain parameter at 0.5 dB resolution, so you need to take the gain and multiply by 2.
self.receivergain = raw[19] / 1.0e2 * 2.0
self.receiverspreadingloss = raw[20] / 1.0e3
self.absorptioncoefficient = raw[21]/ 1.0e3 #dB/kilometre
mounttiltangle = raw[22] / 1.0e6
# print ("ping %d Date %s freq %d absorption %.3f" % (self.pingnumber, self.currentRecordDateTime(), self.frequency, self.absorptioncoefficient))
receiverinfo = raw[23]
reserved = raw[24]
numbeams = raw[25]
moreinfo1 = raw[26] / 1.0e6
moreinfo2 = raw[27] / 1.0e6
moreinfo3 = raw[28] / 1.0e6
moreinfo4 = raw[29] / 1.0e6
moreinfo5 = raw[30] / 1.0e6
moreinfo6 = raw[31] / 1.0e6
spare = raw[32]
return
def readarray(self, values, scale, offset, datatype):
'''
read the ping array data
'''
fmt = '>' + str(self.numbeams) + datatype
l = struct.calcsize(fmt)
rec_unpack = struct.Struct(fmt).unpack
data = self.fileptr.read(l)
raw = rec_unpack(data)
for d in raw:
values.append((d / scale) + offset)
return values
def currentRecordDateTime(self):
return self.from_timestamp(self.time)
def to_timestamp(self, recordDate):
return (recordDate - datetime(1970, 1, 1)).total_seconds()
def from_timestamp(self, unixtime):
return datetime(1970, 1 ,1) + timedelta(seconds=unixtime)
###############################################################################
class GSFHEADER:
def __init__(self, fileptr, numbytes, recordidentifier, hdrlen):
self.recordidentifier = recordidentifier # assign the GSF code for this datagram type
self.offset = fileptr.tell() # remember where this packet resides in the file so we can return if needed
self.hdrlen = hdrlen # remember where this packet resides in the file so we can return if needed
self.numbytes = numbytes # remember how many bytes this packet contains
self.fileptr = fileptr # remember the file pointer so we do not need to pass from the host process
self.fileptr.seek(numbytes, 1) # move the file pointer to the end of the record so we can skip as the default actions
self.name = "GSFHeader"
def __str__(self):
'''
pretty print this class
'''
return pprint.pformat(vars(self))
def read(self):
rec_fmt = '=12s'
rec_len = struct.calcsize(rec_fmt)
rec_unpack = struct.Struct(rec_fmt).unpack
self.fileptr.seek(self.offset + self.hdrlen, 0) # move the file pointer to the start of the record so we can read from disc
data = self.fileptr.read(rec_len)
bytesRead = rec_len
s = rec_unpack(data)
self.version = s[0].decode('utf-8').rstrip('\x00')
return
###############################################################################
class GSFREADER:
def __init__(self, filename, loadscalefactors=False):
'''
class to read generic sensor format files.
'''
if not os.path.isfile(filename):
print ("file not found:", filename)
self.fileName = filename
self.fileSize = os.path.getsize(filename)
f = open(filename, 'r+b')
self.fileptr = mmap.mmap(f.fileno(), 0)
self.hdrfmt = ">LL"
self.hdrlen = struct.calcsize(self.hdrfmt)
self.scalefactors = []
if loadscalefactors:
self.scalefactors = self.loadscalefactors()
def moreData(self):
bytesRemaining = self.fileSize - self.fileptr.tell()
# print ("current file ptr position: %d size %d" % ( self.fileptr.tell(), self.fileSize))
return bytesRemaining
def currentPtr(self):
return self.fileptr.tell()
def close(self):
'''
close the file
'''
self.fileptr.close()
def rewind(self):
'''
go back to start of file
'''
self.fileptr.seek(0, 0)
def __str__(self):
'''
pretty print this class
'''
return pprint.pformat(vars(self))
def readDatagramBytes(self, offset, byteCount):
'''read the entire raw bytes for the datagram without changing the file pointer. this is used for file conditioning'''
curr = self.fileptr.tell()
self.fileptr.seek(offset, 0) # move the file pointer to the start of the record so we can read from disc
data = self.fileptr.read(byteCount)
self.fileptr.seek(curr, 0)
return data
def loadscalefactors(self):
'''
rewind, load the scale factors array and rewind to the original position. We can then use these scalefactors for every ping
'''
curr = self.fileptr.tell()
self.rewind()
while self.moreData():
numberofbytes, recordidentifier, datagram = self.readDatagram()
if recordidentifier == SWATH_BATHYMETRY:
datagram.read()
self.fileptr.seek(curr, 0)
return datagram.scalefactors
self.fileptr.seek(curr, 0)
return None
def loadnavigation(self):
'''
rewind, load the navigation from the bathy records and rewind
'''
navigation = []
curr = self.fileptr.tell()
self.rewind()
while self.moreData():
numberofbytes, recordidentifier, datagram = self.readDatagram()
if recordidentifier == SWATH_BATHYMETRY:
datagram.read(True)
navigation.append([datagram.time + datagram.pingnanotime/1000000000.0, datagram.longitude, datagram.latitude])
self.fileptr.seek(curr, 0)
print ("Navigation records loaded:", len(navigation))
return navigation
def getrecordcount(self):
'''
rewind, count the number of ping records as fast as possible. useful for progress bars
'''
numpings = 0
curr = self.fileptr.tell()
self.rewind()
while self.moreData():
numberofbytes, recordidentifier, datagram = self.readDatagram()
if recordidentifier == SWATH_BATHYMETRY:
numpings += 1
self.fileptr.seek(curr, 0)
return numpings
def readDatagram(self):
# read the datagram header. This permits us to skip datagrams we do not support
numberofbytes, recordidentifier, haschecksumnumberofbytes, hdrlen = self.sniffDatagramHeader()
if recordidentifier == HEADER:
# create a class for this datagram, but only decode if the resulting class if called by the user. This makes it much faster
dg = GSFHEADER(self.fileptr, numberofbytes, recordidentifier, hdrlen)
return numberofbytes, recordidentifier, dg
elif recordidentifier == SWATH_BATHYMETRY:
dg = SWATH_BATHYMETRY_PING(self.fileptr, numberofbytes, recordidentifier, hdrlen)
dg.scalefactors = self.scalefactors
return numberofbytes, recordidentifier, dg
# elif recordidentifier == 3: # SOUND_VELOCITY_PROFILE
# dg = SOUND_VELOCITY_PROFILE(self.fileptr, numberofbytes)
# return dg.recordidentifier, dg
else:
dg = UNKNOWN_RECORD(self.fileptr, numberofbytes, recordidentifier, hdrlen)
# self.fileptr.seek(numberofbytes, 1) # set the file ptr to the end of the record
return numberofbytes, recordidentifier, dg
def sniffDatagramHeader(self):
'''
read the las file header from disc
'''
curr = self.fileptr.tell()
if (self.fileSize - curr) < self.hdrlen:
# we have reached the end of the fle, so quit
self.fileptr.seek(self.fileSize,0)
return (0, 0, False, 0)
# version header format
data = self.fileptr.read(self.hdrlen)
s = struct.unpack(self.hdrfmt, data)
sizeofdata = s[0]
recordidentifier = s[1]
haschecksum = recordidentifier & 0x80000000
temp = recordidentifier & 0x7FC00000
reserved = (temp >> 22)
recordidentifier = (recordidentifier & 0x003FFFFF)
if haschecksum:
# read the checksum of 4 bytes if required
chksum = self.fileptr.read(4)
return (sizeofdata + self.hdrlen + 4, recordidentifier, haschecksum)
# now reset file pointer to the start of the record
self.fileptr.seek(curr, 0)
if haschecksum:
return (sizeofdata + self.hdrlen + 4, recordidentifier, haschecksum, self.hdrlen + 4)
else:
return (sizeofdata + self.hdrlen, recordidentifier, haschecksum, self.hdrlen )
def isBitSet(int_type, offset):
'''testBit() returns a nonzero result, 2**offset, if the bit at 'offset' is one.'''
mask = 1 << offset
return (int_type & (1 << offset)) != 0
###############################################################################<|fim▁hole|>def createOutputFileName(path):
'''Create a valid output filename. if the name of the file already exists the file name is auto-incremented.'''
path = os.path.expanduser(path)
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
if not os.path.exists(path):
return path
root, ext = os.path.splitext(os.path.expanduser(path))
dir = os.path.dirname(root)
fname = os.path.basename(root)
candidate = fname+ext
index = 1
ls = set(os.listdir(dir))
while candidate in ls:
candidate = "{}_{}{}".format(fname,index,ext)
index += 1
return os.path.join(dir, candidate)
###############################################################################
class cBeam:
def __init__(self, beamDetail, angle):
self.sortingDirection = beamDetail[0]
self.detectionInfo = beamDetail[1]
self.numberOfSamplesPerBeam = beamDetail[2]
self.centreSampleNumber = beamDetail[3]
self.sector = 0
self.takeOffAngle = angle # used for ARC computation
self.sampleSum = 0 # used for backscatter ARC computation process
self.sampleMin = 999
self.sampleMax = -999
self.samples = []
self.name = "beam"
def __str__(self):
'''
pretty print this class
'''
return pprint.pformat(vars(self))
###############################################################################
if __name__ == "__main__":
main()
# def testR2SonicAdjustment():
# '''
# This test code confirms the results are in alignment with those from Norm Campbell at CSIRO who kindly provided the code in F77
# '''
# # adjusted backscatter -38.6
# # adjusted backscatter -47.6
# # adjusted backscatter -27.5
# # adjusted backscatter -36.6
# # adjusted backscatter -35.5
# S1_angle = -58.0
# S1_twtt = 0.20588
# S1_range = 164.8
# H0_TxPower = 197.0
# H0_SoundSpeed = 1468.59
# H0_RxAbsorption = 80.0
# H0_TxBeamWidthVert = 0.0174533
# H0_TxBeamWidthHoriz = 0.0087266
# H0_TxPulseWidth = 0.000275
# H0_RxSpreading = 35.0
# H0_RxGain = 8.0
# H0_VTX_Offset = -21.0 / 100.
# n_snpt_val = 470
# S1_uPa = n_snpt_val
# z_snpt_BS_dB = 20. * math.log10(S1_uPa)
# adjusted = backscatteradjustment( S1_angle, S1_twtt, S1_range, S1_uPa, H0_TxPower, H0_SoundSpeed, H0_RxAbsorption, H0_TxBeamWidthVert, H0_TxBeamWidthHoriz, H0_TxPulseWidth, H0_RxSpreading, H0_RxGain, H0_VTX_Offset, z_snpt_BS_dB)
# print (adjusted)
# S1_angle = -58.0
# S1_twtt = 0.20588
# S1_range = 164.8
# H0_TxPower = 206.0
# H0_SoundSpeed = 1468.59
# H0_RxAbsorption = 80.0
# H0_TxBeamWidthVert = 0.0174533
# H0_TxBeamWidthHoriz = 0.0087266
# H0_TxPulseWidth = 0.000275
# H0_RxSpreading = 35.0
# H0_RxGain = 8.0
# H0_VTX_Offset = -21.0 / 100.
# n_snpt_val = 470
# S1_uPa = n_snpt_val
# z_snpt_BS_dB = 20. * math.log10 ( S1_uPa )
# adjusted = backscatteradjustment( S1_angle, S1_twtt, S1_range, S1_uPa, H0_TxPower, H0_SoundSpeed, H0_RxAbsorption, H0_TxBeamWidthVert, H0_TxBeamWidthHoriz, H0_TxPulseWidth, H0_RxSpreading, H0_RxGain, H0_VTX_Offset, z_snpt_BS_dB)
# print (adjusted)
# S1_angle = - 58.0
# S1_twtt = 0.20588
# S1_range = 164.8
# H0_TxPower = 197.0
# H0_SoundSpeed = 1468.59
# H0_RxAbsorption = 80.0
# H0_TxBeamWidthVert = 0.0174533
# H0_TxBeamWidthHoriz = 0.0087266
# H0_TxPulseWidth = 0.000275
# H0_RxSpreading = 30.0
# H0_RxGain = 8.0
# H0_VTX_Offset = -21.0 / 100.
# n_snpt_val = 470
# S1_uPa = n_snpt_val
# z_snpt_BS_dB = 20. * math.log10 ( S1_uPa )
# adjusted = backscatteradjustment( S1_angle, S1_twtt, S1_range, S1_uPa, H0_TxPower, H0_SoundSpeed, H0_RxAbsorption, H0_TxBeamWidthVert, H0_TxBeamWidthHoriz, H0_TxPulseWidth, H0_RxSpreading, H0_RxGain, H0_VTX_Offset, z_snpt_BS_dB)
# print (adjusted)
# S1_angle = - 58.0
# S1_twtt = 0.20588
# S1_range = 164.8
# H0_TxPower = 197.0
# H0_SoundSpeed = 1468.59
# H0_RxAbsorption = 80.0
# H0_TxBeamWidthVert = 0.0174533
# H0_TxBeamWidthHoriz = 0.0087266
# H0_TxPulseWidth = 0.000275
# H0_RxSpreading = 35.0
# H0_RxGain = 6.0
# H0_VTX_Offset = -21.0 / 100.
# n_snpt_val = 470
# S1_uPa = n_snpt_val
# z_snpt_BS_dB = 20. * math.log10 ( S1_uPa )
# adjusted = backscatteradjustment( S1_angle, S1_twtt, S1_range, S1_uPa, H0_TxPower, H0_SoundSpeed, H0_RxAbsorption, H0_TxBeamWidthVert, H0_TxBeamWidthHoriz, H0_TxPulseWidth, H0_RxSpreading, H0_RxGain, H0_VTX_Offset, z_snpt_BS_dB)
# print (adjusted)
# S1_angle = - 58.0
# S1_twtt = 0.20588
# S1_range = 164.8
# H0_TxPower = 207.0
# H0_SoundSpeed = 1468.59
# H0_RxAbsorption = 80.0
# H0_TxBeamWidthVert = 0.0174533
# H0_TxBeamWidthHoriz = 0.0087266
# H0_TxPulseWidth = 0.000275
# H0_RxSpreading = 30.0
# H0_RxGain = 6.0
# H0_VTX_Offset = -21.0 / 100.
# n_snpt_val = 470
# S1_uPa = n_snpt_val
# z_snpt_BS_dB = 20. * math.log10 ( S1_uPa )
# adjusted = backscatteradjustment( S1_angle, S1_twtt, S1_range, S1_uPa, H0_TxPower, H0_SoundSpeed, H0_RxAbsorption, H0_TxBeamWidthVert, H0_TxBeamWidthHoriz, H0_TxPulseWidth, H0_RxSpreading, H0_RxGain, H0_VTX_Offset, z_snpt_BS_dB)
# print (adjusted)
# return
###############################################################################<|fim▁end|> | |
<|file_name|>wallet.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2011 thomasv@gitorious
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import os
import hashlib
import ast
import threading
import random
import time
import math
import json
import copy
from operator import itemgetter
from util import print_msg, print_error, NotEnoughFunds
from util import profiler
from bitcoin import *
from account import *
from version import *
from transaction import Transaction
from plugins import run_hook
import bitcoin
from synchronizer import WalletSynchronizer
from mnemonic import Mnemonic
import paymentrequest
# internal ID for imported account
IMPORTED_ACCOUNT = '/x'
class WalletStorage(object):
def __init__(self, path):
self.lock = threading.RLock()
self.data = {}
self.path = path
self.file_exists = False
print_error( "wallet path", self.path )
if self.path:
self.read(self.path)
def read(self, path):
"""Read the contents of the wallet file."""
try:
with open(self.path, "r") as f:
data = f.read()
except IOError:
return
try:
self.data = json.loads(data)
except:
try:
d = ast.literal_eval(data) #parse raw data from reading wallet file
except Exception as e:
raise IOError("Cannot read wallet file '%s'" % self.path)
self.data = {}
# In old versions of Electrum labels were latin1 encoded, this fixes breakage.
labels = d.get('labels', {})
for i, label in labels.items():
try:
unicode(label)
except UnicodeDecodeError:
d['labels'][i] = unicode(label.decode('latin1'))
for key, value in d.items():
try:
json.dumps(key)
json.dumps(value)
except:
print_error('Failed to convert label to json format', key)
continue
self.data[key] = value
self.file_exists = True
def get(self, key, default=None):
with self.lock:
v = self.data.get(key)
if v is None:
v = default
else:
v = copy.deepcopy(v)
return v
def put(self, key, value, save = True):
try:
json.dumps(key)
json.dumps(value)
except:
print_error("json error: cannot save", key)
return
with self.lock:
if value is not None:
self.data[key] = copy.deepcopy(value)
elif key in self.data:
self.data.pop(key)
if save:
self.write()
def write(self):
assert not threading.currentThread().isDaemon()
temp_path = "%s.tmp.%s" % (self.path, os.getpid())
s = json.dumps(self.data, indent=4, sort_keys=True)
with open(temp_path, "w") as f:
f.write(s)
f.flush()
os.fsync(f.fileno())
# perform atomic write on POSIX systems
try:
os.rename(temp_path, self.path)
except:
os.remove(self.path)
os.rename(temp_path, self.path)
if 'ANDROID_DATA' not in os.environ:
import stat
os.chmod(self.path,stat.S_IREAD | stat.S_IWRITE)
class Abstract_Wallet(object):
"""
Wallet classes are created to handle various address generation methods.
Completion states (watching-only, single account, no seed, etc) are handled inside classes.
"""
def __init__(self, storage):
self.storage = storage
self.electrum_version = ELECTRUM_VERSION
self.gap_limit_for_change = 6 # constant
# saved fields
self.seed_version = storage.get('seed_version', NEW_SEED_VERSION)
self.use_change = storage.get('use_change',True)
self.use_encryption = storage.get('use_encryption', False)
self.seed = storage.get('seed', '') # encrypted
self.labels = storage.get('labels', {})
self.frozen_addresses = set(storage.get('frozen_addresses',[]))
self.stored_height = storage.get('stored_height', 0) # last known height (for offline mode)
self.history = storage.get('addr_history',{}) # address -> list(txid, height)
self.fee_per_kb = int(storage.get('fee_per_kb', RECOMMENDED_FEE))
# This attribute is set when wallet.start_threads is called.
self.synchronizer = None
# imported_keys is deprecated. The GUI should call convert_imported_keys
self.imported_keys = self.storage.get('imported_keys',{})
self.load_accounts()
self.load_transactions()
self.build_reverse_history()
# load requests
self.receive_requests = self.storage.get('payment_requests', {})
# spv
self.verifier = None
# Transactions pending verification. Each value is the transaction height. Access with self.lock.
self.unverified_tx = {}
# Verified transactions. Each value is a (height, timestamp, block_pos) tuple. Access with self.lock.
self.verified_tx = storage.get('verified_tx3',{})
# there is a difference between wallet.up_to_date and interface.is_up_to_date()
# interface.is_up_to_date() returns true when all requests have been answered and processed
# wallet.up_to_date is true when the wallet is synchronized (stronger requirement)
self.up_to_date = False
self.lock = threading.Lock()
self.transaction_lock = threading.Lock()
self.tx_event = threading.Event()
self.check_history()
# save wallet type the first time
if self.storage.get('wallet_type') is None:
self.storage.put('wallet_type', self.wallet_type, True)
@profiler
def load_transactions(self):
self.txi = self.storage.get('txi', {})
self.txo = self.storage.get('txo', {})
self.pruned_txo = self.storage.get('pruned_txo', {})
tx_list = self.storage.get('transactions', {})
self.transactions = {}
for tx_hash, raw in tx_list.items():
tx = Transaction(raw)
self.transactions[tx_hash] = tx
if self.txi.get(tx_hash) is None and self.txo.get(tx_hash) is None and (tx_hash not in self.pruned_txo.values()):
print_error("removing unreferenced tx", tx_hash)
self.transactions.pop(tx_hash)
@profiler
def save_transactions(self):
with self.transaction_lock:
tx = {}
for k,v in self.transactions.items():
tx[k] = str(v)
# Flush storage only with the last put
self.storage.put('transactions', tx, False)
self.storage.put('txi', self.txi, False)
self.storage.put('txo', self.txo, False)
self.storage.put('pruned_txo', self.pruned_txo, True)
def clear_history(self):
with self.transaction_lock:
self.txi = {}
self.txo = {}
self.pruned_txo = {}
self.save_transactions()
with self.lock:
self.history = {}
self.tx_addr_hist = {}
self.storage.put('addr_history', self.history, True)
@profiler
def build_reverse_history(self):
self.tx_addr_hist = {}
for addr, hist in self.history.items():
for tx_hash, h in hist:
s = self.tx_addr_hist.get(tx_hash, set())
s.add(addr)
self.tx_addr_hist[tx_hash] = s
@profiler
def check_history(self):
save = False
for addr, hist in self.history.items():
if not self.is_mine(addr):
self.history.pop(addr)
save = True
continue
for tx_hash, tx_height in hist:
if tx_hash in self.pruned_txo.values() or self.txi.get(tx_hash) or self.txo.get(tx_hash):
continue
tx = self.transactions.get(tx_hash)
if tx is not None:
tx.deserialize()
self.add_transaction(tx_hash, tx, tx_height)
if save:
self.storage.put('addr_history', self.history, True)
# wizard action
def get_action(self):
pass
def basename(self):
return os.path.basename(self.storage.path)
def convert_imported_keys(self, password):
for k, v in self.imported_keys.items():
sec = pw_decode(v, password)
pubkey = public_key_from_private_key(sec)
address = public_key_to_bc_address(pubkey.decode('hex'))
if address != k:
raise InvalidPassword()
self.import_key(sec, password)
self.imported_keys.pop(k)
self.storage.put('imported_keys', self.imported_keys)
def load_accounts(self):
self.accounts = {}
d = self.storage.get('accounts', {})
for k, v in d.items():
if self.wallet_type == 'old' and k in [0, '0']:
v['mpk'] = self.storage.get('master_public_key')
self.accounts['0'] = OldAccount(v)
elif v.get('imported'):
self.accounts[k] = ImportedAccount(v)
elif v.get('xpub'):
self.accounts[k] = BIP32_Account(v)
elif v.get('pending'):
try:
self.accounts[k] = PendingAccount(v)
except:
pass
else:
print_error("cannot load account", v)
def synchronize(self):
pass
def can_create_accounts(self):
return False
def set_up_to_date(self,b):
with self.lock: self.up_to_date = b
def is_up_to_date(self):
with self.lock: return self.up_to_date
def update(self):
self.up_to_date = False
while not self.is_up_to_date():
time.sleep(0.1)
def is_imported(self, addr):
account = self.accounts.get(IMPORTED_ACCOUNT)
if account:
return addr in account.get_addresses(0)
else:
return False
def has_imported_keys(self):
account = self.accounts.get(IMPORTED_ACCOUNT)
return account is not None
def import_key(self, sec, password):
assert self.can_import(), 'This wallet cannot import private keys'
try:
pubkey = public_key_from_private_key(sec)
address = public_key_to_bc_address(pubkey.decode('hex'))
except Exception:
raise Exception('Invalid private key')
if self.is_mine(address):
raise Exception('Address already in wallet')
if self.accounts.get(IMPORTED_ACCOUNT) is None:
self.accounts[IMPORTED_ACCOUNT] = ImportedAccount({'imported':{}})
self.accounts[IMPORTED_ACCOUNT].add(address, pubkey, sec, password)
self.save_accounts()
# force resynchronization, because we need to re-run add_transaction
if address in self.history:
self.history.pop(address)
if self.synchronizer:
self.synchronizer.add(address)
return address
def delete_imported_key(self, addr):
account = self.accounts[IMPORTED_ACCOUNT]
account.remove(addr)
if not account.get_addresses(0):
self.accounts.pop(IMPORTED_ACCOUNT)
self.save_accounts()
def set_label(self, name, text = None):
changed = False
old_text = self.labels.get(name)
if text:
if old_text != text:
self.labels[name] = text
changed = True
else:
if old_text:
self.labels.pop(name)
changed = True
if changed:
self.storage.put('labels', self.labels, True)
run_hook('set_label', name, text, changed)
return changed
def addresses(self, include_change = True):
return list(addr for acc in self.accounts for addr in self.get_account_addresses(acc, include_change))
def is_mine(self, address):
return address in self.addresses(True)
def is_change(self, address):
if not self.is_mine(address): return False
acct, s = self.get_address_index(address)
if s is None: return False
return s[0] == 1
def get_address_index(self, address):
for acc_id in self.accounts:
for for_change in [0,1]:
addresses = self.accounts[acc_id].get_addresses(for_change)
if address in addresses:
return acc_id, (for_change, addresses.index(address))
raise Exception("Address not found", address)
def get_private_key(self, address, password):
if self.is_watching_only():
return []
account_id, sequence = self.get_address_index(address)
return self.accounts[account_id].get_private_key(sequence, self, password)
def get_public_keys(self, address):
account_id, sequence = self.get_address_index(address)
return self.accounts[account_id].get_pubkeys(*sequence)
def sign_message(self, address, message, password):
keys = self.get_private_key(address, password)
assert len(keys) == 1
sec = keys[0]
key = regenerate_key(sec)
compressed = is_compressed(sec)
return key.sign_message(message, compressed, address)
def decrypt_message(self, pubkey, message, password):
address = public_key_to_bc_address(pubkey.decode('hex'))
keys = self.get_private_key(address, password)
secret = keys[0]
ec = regenerate_key(secret)
decrypted = ec.decrypt_message(message)
return decrypted
def add_unverified_tx(self, tx_hash, tx_height):
if tx_height > 0:
with self.lock:
self.unverified_tx[tx_hash] = tx_height
def add_verified_tx(self, tx_hash, info):
with self.lock:
self.verified_tx[tx_hash] = info # (tx_height, timestamp, pos)
self.storage.put('verified_tx3', self.verified_tx, True)
self.network.trigger_callback('updated')
def get_unverified_txs(self):
'''Returns a list of tuples (tx_hash, height) that are unverified and not beyond local height'''
txs = []
with self.lock:
for tx_hash, tx_height in self.unverified_tx.items():
# do not request merkle branch before headers are available
if tx_hash not in self.verified_tx and tx_height <= self.get_local_height():
txs.append((tx_hash, tx_height))
return txs
def undo_verifications(self, height):
'''Used by the verifier when a reorg has happened'''
txs = []
with self.lock:
for tx_hash, item in self.verified_tx:
tx_height, timestamp, pos = item
if tx_height >= height:
self.verified_tx.pop(tx_hash, None)
txs.append(tx_hash)
return txs
def get_local_height(self):
""" return last known height if we are offline """
return self.network.get_local_height() if self.network else self.stored_height
def get_confirmations(self, tx):
""" return the number of confirmations of a monitored transaction. """
with self.lock:
if tx in self.verified_tx:
height, timestamp, pos = self.verified_tx[tx]
conf = (self.get_local_height() - height + 1)
if conf <= 0: timestamp = None
elif tx in self.unverified_tx:
conf = -1
timestamp = None
else:
conf = 0
timestamp = None
return conf, timestamp
def get_txpos(self, tx_hash):
"return position, even if the tx is unverified"
with self.lock:
x = self.verified_tx.get(tx_hash)
y = self.unverified_tx.get(tx_hash)
if x:
height, timestamp, pos = x
return height, pos
elif y:
return y, 0
else:
return 1e12, 0
def is_found(self):
return self.history.values() != [[]] * len(self.history)
def get_num_tx(self, address):
""" return number of transactions where address is involved """
return len(self.history.get(address, []))
def get_tx_delta(self, tx_hash, address):
"effect of tx on address"
# pruned
if tx_hash in self.pruned_txo.values():
return None
delta = 0
# substract the value of coins sent from address
d = self.txi.get(tx_hash, {}).get(address, [])
for n, v in d:
delta -= v
# add the value of the coins received at address
d = self.txo.get(tx_hash, {}).get(address, [])
for n, v, cb in d:
delta += v
return delta
def get_wallet_delta(self, tx):
""" effect of tx on wallet """
addresses = self.addresses(True)
is_relevant = False
is_send = False
is_pruned = False
is_partial = False
v_in = v_out = v_out_mine = 0
for item in tx.inputs:
addr = item.get('address')
if addr in addresses:
is_send = True
is_relevant = True
d = self.txo.get(item['prevout_hash'], {}).get(addr, [])
for n, v, cb in d:
if n == item['prevout_n']:
value = v
break
else:
value = None
if value is None:
is_pruned = True
else:
v_in += value
else:
is_partial = True
if not is_send:
is_partial = False
for addr, value in tx.get_outputs():
v_out += value
if addr in addresses:
v_out_mine += value
is_relevant = True
if is_pruned:
# some inputs are mine:
fee = None
if is_send:
v = v_out_mine - v_out
else:
# no input is mine
v = v_out_mine
else:
v = v_out_mine - v_in
if is_partial:
# some inputs are mine, but not all
fee = None
is_send = v < 0
else:
# all inputs are mine
fee = v_out - v_in
return is_relevant, is_send, v, fee
def get_addr_io(self, address):
h = self.history.get(address, [])
received = {}
sent = {}
for tx_hash, height in h:
l = self.txo.get(tx_hash, {}).get(address, [])
for n, v, is_cb in l:
received[tx_hash + ':%d'%n] = (height, v, is_cb)
for tx_hash, height in h:
l = self.txi.get(tx_hash, {}).get(address, [])
for txi, v in l:
sent[txi] = height
return received, sent
def get_addr_utxo(self, address):
coins, spent = self.get_addr_io(address)
for txi in spent:
coins.pop(txi)
return coins
# return the total amount ever received by an address
def get_addr_received(self, address):
received, sent = self.get_addr_io(address)
return sum([v for height, v, is_cb in received.values()])
# return the balance of a bitcoin address: confirmed and matured, unconfirmed, unmatured
def get_addr_balance(self, address):
received, sent = self.get_addr_io(address)
c = u = x = 0
for txo, (tx_height, v, is_cb) in received.items():
if is_cb and tx_height + COINBASE_MATURITY > self.get_local_height():
x += v
elif tx_height > 0:
c += v
else:
u += v
if txo in sent:
if sent[txo] > 0:
c -= v
else:
u -= v
return c, u, x
def get_spendable_coins(self, domain = None, exclude_frozen = True):
coins = []
if domain is None:
domain = self.addresses(True)
if exclude_frozen:
domain = set(domain) - self.frozen_addresses
for addr in domain:
c = self.get_addr_utxo(addr)
for txo, v in c.items():
tx_height, value, is_cb = v
if is_cb and tx_height + COINBASE_MATURITY > self.get_local_height():
continue
prevout_hash, prevout_n = txo.split(':')
output = {
'address':addr,
'value':value,
'prevout_n':int(prevout_n),
'prevout_hash':prevout_hash,
'height':tx_height,
'coinbase':is_cb
}
coins.append((tx_height, output))
continue
# sort by age
if coins:
coins = sorted(coins)
if coins[-1][0] != 0:
while coins[0][0] == 0:
coins = coins[1:] + [ coins[0] ]
return [value for height, value in coins]
def get_account_name(self, k):
return self.labels.get(k, self.accounts[k].get_name(k))
def get_account_names(self):
account_names = {}
for k in self.accounts.keys():
account_names[k] = self.get_account_name(k)
return account_names
def get_account_addresses(self, acc_id, include_change=True):
if acc_id is None:
addr_list = self.addresses(include_change)
elif acc_id in self.accounts:
acc = self.accounts[acc_id]
addr_list = acc.get_addresses(0)
if include_change:
addr_list += acc.get_addresses(1)
return addr_list
def get_account_from_address(self, addr):
"Returns the account that contains this address, or None"
for acc_id in self.accounts: # similar to get_address_index but simpler
if addr in self.get_account_addresses(acc_id):
return acc_id
return None
def get_account_balance(self, account):
return self.get_balance(self.get_account_addresses(account))
def get_frozen_balance(self):
return self.get_balance(self.frozen_addresses)
def get_balance(self, domain=None):
if domain is None:
domain = self.addresses(True)
cc = uu = xx = 0
for addr in domain:
c, u, x = self.get_addr_balance(addr)
cc += c
uu += u
xx += x
return cc, uu, xx
def set_fee(self, fee, save = True):
self.fee_per_kb = fee
self.storage.put('fee_per_kb', self.fee_per_kb, save)
def get_address_history(self, address):
with self.lock:
return self.history.get(address, [])
def get_status(self, h):
if not h:
return None
status = ''
for tx_hash, height in h:
status += tx_hash + ':%d:' % height
return hashlib.sha256( status ).digest().encode('hex')
def find_pay_to_pubkey_address(self, prevout_hash, prevout_n):
dd = self.txo.get(prevout_hash, {})
for addr, l in dd.items():
for n, v, is_cb in l:
if n == prevout_n:
print_error("found pay-to-pubkey address:", addr)
return addr
def add_transaction(self, tx_hash, tx, tx_height):
is_coinbase = tx.inputs[0].get('is_coinbase') == True
with self.transaction_lock:
# add inputs
self.txi[tx_hash] = d = {}
for txi in tx.inputs:
addr = txi.get('address')
if not txi.get('is_coinbase'):
prevout_hash = txi['prevout_hash']
prevout_n = txi['prevout_n']
ser = prevout_hash + ':%d'%prevout_n
if addr == "(pubkey)":
addr = self.find_pay_to_pubkey_address(prevout_hash, prevout_n)
# find value from prev output
if addr and self.is_mine(addr):
dd = self.txo.get(prevout_hash, {})
for n, v, is_cb in dd.get(addr, []):
if n == prevout_n:
if d.get(addr) is None:
d[addr] = []
d[addr].append((ser, v))
break
else:
self.pruned_txo[ser] = tx_hash
# add outputs
self.txo[tx_hash] = d = {}
for n, txo in enumerate(tx.outputs):
ser = tx_hash + ':%d'%n
_type, x, v = txo
if _type == 'address':
addr = x
elif _type == 'pubkey':
addr = public_key_to_bc_address(x.decode('hex'))
else:
addr = None
if addr and self.is_mine(addr):
if d.get(addr) is None:
d[addr] = []
d[addr].append((n, v, is_coinbase))
# give v to txi that spends me
next_tx = self.pruned_txo.get(ser)
if next_tx is not None:
self.pruned_txo.pop(ser)
dd = self.txi.get(next_tx, {})
if dd.get(addr) is None:
dd[addr] = []
dd[addr].append((ser, v))
# save
self.transactions[tx_hash] = tx
def remove_transaction(self, tx_hash, tx_height):
with self.transaction_lock:
print_error("removing tx from history", tx_hash)
#tx = self.transactions.pop(tx_hash)
for ser, hh in self.pruned_txo.items():
if hh == tx_hash:
self.pruned_txo.pop(ser)
# add tx to pruned_txo, and undo the txi addition
for next_tx, dd in self.txi.items():
for addr, l in dd.items():
ll = l[:]
for item in ll:
ser, v = item
prev_hash, prev_n = ser.split(':')
if prev_hash == tx_hash:
l.remove(item)
self.pruned_txo[ser] = next_tx
if l == []:
dd.pop(addr)
else:
dd[addr] = l
self.txi.pop(tx_hash)
self.txo.pop(tx_hash)
def receive_tx_callback(self, tx_hash, tx, tx_height):
self.add_transaction(tx_hash, tx, tx_height)
#self.network.pending_transactions_for_notifications.append(tx)
self.add_unverified_tx(tx_hash, tx_height)
def receive_history_callback(self, addr, hist):
with self.lock:
old_hist = self.history.get(addr, [])
for tx_hash, height in old_hist:
if (tx_hash, height) not in hist:
# remove tx if it's not referenced in histories
self.tx_addr_hist[tx_hash].remove(addr)
if not self.tx_addr_hist[tx_hash]:
self.remove_transaction(tx_hash, height)
self.history[addr] = hist
self.storage.put('addr_history', self.history, True)
for tx_hash, tx_height in hist:
# add it in case it was previously unconfirmed
self.add_unverified_tx(tx_hash, tx_height)
# add reference in tx_addr_hist
s = self.tx_addr_hist.get(tx_hash, set())
s.add(addr)
self.tx_addr_hist[tx_hash] = s
# if addr is new, we have to recompute txi and txo
tx = self.transactions.get(tx_hash)
if tx is not None and self.txi.get(tx_hash, {}).get(addr) is None and self.txo.get(tx_hash, {}).get(addr) is None:
tx.deserialize()
self.add_transaction(tx_hash, tx, tx_height)
def get_history(self, domain=None):
from collections import defaultdict
# get domain
if domain is None:
domain = self.get_account_addresses(None)
# 1. Get the history of each address in the domain, maintain the
# delta of a tx as the sum of its deltas on domain addresses
tx_deltas = defaultdict(int)
for addr in domain:
h = self.get_address_history(addr)
for tx_hash, height in h:
delta = self.get_tx_delta(tx_hash, addr)
if delta is None or tx_deltas[tx_hash] is None:
tx_deltas[tx_hash] = None
else:
tx_deltas[tx_hash] += delta
# 2. create sorted history
history = []
for tx_hash, delta in tx_deltas.items():
conf, timestamp = self.get_confirmations(tx_hash)
history.append((tx_hash, conf, delta, timestamp))
history.sort(key = lambda x: self.get_txpos(x[0]))
history.reverse()
# 3. add balance
c, u, x = self.get_balance(domain)
balance = c + u + x
h2 = []
for item in history:
tx_hash, conf, delta, timestamp = item
h2.append((tx_hash, conf, delta, timestamp, balance))
if balance is None or delta is None:
balance = None
else:
balance -= delta
h2.reverse()
# fixme: this may happen if history is incomplete
if balance not in [None, 0]:
print_error("Error: history not synchronized")
return []
return h2
def get_label(self, tx_hash):
label = self.labels.get(tx_hash)
is_default = (label == '') or (label is None)
if is_default:
label = self.get_default_label(tx_hash)
return label, is_default
def get_default_label(self, tx_hash):
if self.txi.get(tx_hash) == {}:
d = self.txo.get(tx_hash, {})
labels = []
for addr in d.keys():
label = self.labels.get(addr)
if label:
labels.append(label)
return ', '.join(labels)
return ''
def get_tx_fee(self, tx):
# this method can be overloaded
return tx.get_fee()
def estimated_fee(self, tx):
estimated_size = len(tx.serialize(-1))/2
fee = int(self.fee_per_kb*estimated_size/1000.)
if fee < MIN_RELAY_TX_FEE: # and tx.requires_fee(self):
fee = MIN_RELAY_TX_FEE
return fee
def make_unsigned_transaction(self, coins, outputs, fixed_fee=None, change_addr=None):
# check outputs
for type, data, value in outputs:
if type == 'address':
assert is_address(data), "Address " + data + " is invalid!"
amount = sum(map(lambda x:x[2], outputs))
total = fee = 0
inputs = []
tx = Transaction.from_io(inputs, outputs)
# add old inputs first
for item in coins:
v = item.get('value')
total += v
self.add_input_info(item)
tx.add_input(item)
# no need to estimate fee until we have reached desired amount
if total < amount:
continue
fee = fixed_fee if fixed_fee is not None else self.estimated_fee(tx)
if total >= amount + fee:
break
else:
raise NotEnoughFunds()
# remove unneeded inputs
for item in sorted(tx.inputs, key=itemgetter('value')):
v = item.get('value')
if total - v >= amount + fee:
tx.inputs.remove(item)
total -= v
fee = fixed_fee if fixed_fee is not None else self.estimated_fee(tx)
else:
break
print_error("using %d inputs"%len(tx.inputs))
# change address
if not change_addr:
# send change to one of the accounts involved in the tx
address = inputs[0].get('address')
account, _ = self.get_address_index(address)
if self.use_change and self.accounts[account].has_change():
# New change addresses are created only after a few confirmations.
# Choose an unused change address if any, otherwise take one at random
change_addrs = self.accounts[account].get_addresses(1)[-self.gap_limit_for_change:]
for change_addr in change_addrs:
if self.get_num_tx(change_addr) == 0:
break
else:
change_addr = random.choice(change_addrs)
else:
change_addr = address
# if change is above dust threshold, add a change output.
change_amount = total - ( amount + fee )
if fixed_fee is not None and change_amount > 0:
tx.outputs.append(('address', change_addr, change_amount))
elif change_amount > DUST_THRESHOLD:
tx.outputs.append(('address', change_addr, change_amount))
# recompute fee including change output
fee = self.estimated_fee(tx)
# remove change output
tx.outputs.pop()
# if change is still above dust threshold, re-add change output.
change_amount = total - ( amount + fee )
if change_amount > DUST_THRESHOLD:
tx.outputs.append(('address', change_addr, change_amount))
print_error('change', change_amount)
else:
print_error('not keeping dust', change_amount)
else:
print_error('not keeping dust', change_amount)
# Sort the inputs and outputs deterministically
tx.BIP_LI01_sort()
run_hook('make_unsigned_transaction', tx)
return tx
def mktx(self, outputs, password, fee=None, change_addr=None, domain=None):
coins = self.get_spendable_coins(domain)
tx = self.make_unsigned_transaction(coins, outputs, fee, change_addr)
self.sign_transaction(tx, password)
return tx
def add_input_info(self, txin):
address = txin['address']
account_id, sequence = self.get_address_index(address)
account = self.accounts[account_id]
redeemScript = account.redeem_script(*sequence)
pubkeys = account.get_pubkeys(*sequence)
x_pubkeys = account.get_xpubkeys(*sequence)
# sort pubkeys and x_pubkeys, using the order of pubkeys
pubkeys, x_pubkeys = zip( *sorted(zip(pubkeys, x_pubkeys)))
txin['pubkeys'] = list(pubkeys)
txin['x_pubkeys'] = list(x_pubkeys)
txin['signatures'] = [None] * len(pubkeys)
if redeemScript:
txin['redeemScript'] = redeemScript
txin['num_sig'] = account.m
else:
txin['redeemPubkey'] = account.get_pubkey(*sequence)
txin['num_sig'] = 1
def sign_transaction(self, tx, password):
if self.is_watching_only():
return
# check that the password is correct. This will raise if it's not.
self.check_password(password)
keypairs = {}
x_pubkeys = tx.inputs_to_sign()
for x in x_pubkeys:
sec = self.get_private_key_from_xpubkey(x, password)
if sec:
keypairs[ x ] = sec
if keypairs:
tx.sign(keypairs)
run_hook('sign_transaction', tx, password)
def sendtx(self, tx):
# synchronous
h = self.send_tx(tx)
self.tx_event.wait()
return self.receive_tx(h, tx)
def send_tx(self, tx):
# asynchronous
self.tx_event.clear()
self.network.send([('blockchain.transaction.broadcast', [str(tx)])], self.on_broadcast)
return tx.hash()
def on_broadcast(self, r):
self.tx_result = r.get('result')
self.tx_event.set()
def receive_tx(self, tx_hash, tx):
out = self.tx_result
if out != tx_hash:
return False, "error: " + out
run_hook('receive_tx', tx, self)
return True, out
def update_password(self, old_password, new_password):
if new_password == '':
new_password = None
if self.has_seed():
decoded = self.get_seed(old_password)
self.seed = pw_encode( decoded, new_password)
self.storage.put('seed', self.seed, True)
imported_account = self.accounts.get(IMPORTED_ACCOUNT)
if imported_account:
imported_account.update_password(old_password, new_password)
self.save_accounts()
if hasattr(self, 'master_private_keys'):
for k, v in self.master_private_keys.items():
b = pw_decode(v, old_password)
c = pw_encode(b, new_password)
self.master_private_keys[k] = c
self.storage.put('master_private_keys', self.master_private_keys, True)
self.use_encryption = (new_password != None)
self.storage.put('use_encryption', self.use_encryption,True)
def is_frozen(self, addr):
return addr in self.frozen_addresses
def set_frozen_state(self, addrs, freeze):
'''Set frozen state of the addresses to FREEZE, True or False'''
if all(self.is_mine(addr) for addr in addrs):
if freeze:
self.frozen_addresses |= set(addrs)
else:
self.frozen_addresses -= set(addrs)
self.storage.put('frozen_addresses', list(self.frozen_addresses), True)
return True
return False
def set_verifier(self, verifier):
self.verifier = verifier
# review transactions that are in the history
for addr, hist in self.history.items():
for tx_hash, tx_height in hist:
# add it in case it was previously unconfirmed
self.add_unverified_tx (tx_hash, tx_height)
# if we are on a pruning server, remove unverified transactions
with self.lock:
vr = self.verified_tx.keys() + self.unverified_tx.keys()
for tx_hash in self.transactions.keys():
if tx_hash not in vr:
print_error("removing transaction", tx_hash)
self.transactions.pop(tx_hash)
def start_threads(self, network):
from verifier import SPV
self.network = network
if self.network is not None:
self.verifier = SPV(self.network, self)
self.verifier.start()
self.set_verifier(self.verifier)
self.synchronizer = WalletSynchronizer(self, network)
network.jobs.append(self.synchronizer.main_loop)
else:
self.verifier = None
self.synchronizer = None
def stop_threads(self):
if self.network:
self.verifier.stop()
self.network.jobs.remove(self.synchronizer.main_loop)
self.synchronizer = None
self.storage.put('stored_height', self.get_local_height(), True)
def restore(self, cb):
pass
def get_accounts(self):
return self.accounts
def add_account(self, account_id, account):
self.accounts[account_id] = account
self.save_accounts()
def save_accounts(self):
d = {}
for k, v in self.accounts.items():
d[k] = v.dump()
self.storage.put('accounts', d, True)
def can_import(self):
return not self.is_watching_only()
def can_export(self):
return not self.is_watching_only()
def is_used(self, address):
h = self.history.get(address,[])
c, u, x = self.get_addr_balance(address)
return len(h), len(h) > 0 and c + u + x == 0
def is_empty(self, address):
c, u, x = self.get_addr_balance(address)
return c+u+x == 0
def address_is_old(self, address, age_limit=2):
age = -1
h = self.history.get(address, [])
for tx_hash, tx_height in h:
if tx_height == 0:
tx_age = 0
else:
tx_age = self.get_local_height() - tx_height + 1
if tx_age > age:
age = tx_age
return age > age_limit
def can_sign(self, tx):
if self.is_watching_only():
return False
if tx.is_complete():
return False
for x in tx.inputs_to_sign():
if self.can_sign_xpubkey(x):
return True
return False
def get_private_key_from_xpubkey(self, x_pubkey, password):
if x_pubkey[0:2] in ['02','03','04']:
addr = bitcoin.public_key_to_bc_address(x_pubkey.decode('hex'))
if self.is_mine(addr):
return self.get_private_key(addr, password)[0]
elif x_pubkey[0:2] == 'ff':
xpub, sequence = BIP32_Account.parse_xpubkey(x_pubkey)
for k, v in self.master_public_keys.items():
if v == xpub:
xprv = self.get_master_private_key(k, password)
if xprv:
_, _, _, c, k = deserialize_xkey(xprv)
return bip32_private_key(sequence, k, c)
elif x_pubkey[0:2] == 'fe':
xpub, sequence = OldAccount.parse_xpubkey(x_pubkey)
for k, account in self.accounts.items():
if xpub in account.get_master_pubkeys():
pk = account.get_private_key(sequence, self, password)
return pk[0]
elif x_pubkey[0:2] == 'fd':
addrtype = ord(x_pubkey[2:4].decode('hex'))
addr = hash_160_to_bc_address(x_pubkey[4:].decode('hex'), addrtype)
if self.is_mine(addr):
return self.get_private_key(addr, password)[0]
else:
raise BaseException("z")
def can_sign_xpubkey(self, x_pubkey):
if x_pubkey[0:2] in ['02','03','04']:
addr = bitcoin.public_key_to_bc_address(x_pubkey.decode('hex'))
return self.is_mine(addr)
elif x_pubkey[0:2] == 'ff':
if not isinstance(self, BIP32_Wallet): return False
xpub, sequence = BIP32_Account.parse_xpubkey(x_pubkey)
return xpub in [ self.master_public_keys[k] for k in self.master_private_keys.keys() ]
elif x_pubkey[0:2] == 'fe':
if not isinstance(self, OldWallet): return False
xpub, sequence = OldAccount.parse_xpubkey(x_pubkey)
return xpub == self.get_master_public_key()
elif x_pubkey[0:2] == 'fd':
addrtype = ord(x_pubkey[2:4].decode('hex'))
addr = hash_160_to_bc_address(x_pubkey[4:].decode('hex'), addrtype)
return self.is_mine(addr)
else:
raise BaseException("z")
def is_watching_only(self):
False
def can_change_password(self):
return not self.is_watching_only()
def get_unused_address(self, account):
# fixme: use slots from expired requests
domain = self.get_account_addresses(account, include_change=False)
for addr in domain:
if not self.history.get(addr) and addr not in self.receive_requests.keys():
return addr
def get_payment_request(self, addr, config):
import util
r = self.receive_requests.get(addr)
if not r:
return
out = copy.copy(r)
out['URI'] = 'verge:' + addr + '?amount=' + util.format_satoshis(out.get('amount'))
out['status'] = self.get_request_status(addr)
# check if bip70 file exists
rdir = config.get('requests_dir')
if rdir:
key = out.get('id', addr)
path = os.path.join(rdir, key)
if os.path.exists(path):
baseurl = 'file://' + rdir
rewrite = config.get('url_rewrite')
if rewrite:
baseurl = baseurl.replace(*rewrite)
out['request_url'] = os.path.join(baseurl, key)
out['URI'] += '&r=' + out['request_url']
out['index_url'] = os.path.join(baseurl, 'index.html') + '?id=' + key
return out
def get_request_status(self, key):
from paymentrequest import PR_PAID, PR_UNPAID, PR_UNKNOWN, PR_EXPIRED
r = self.receive_requests[key]
address = r['address']
amount = r.get('amount')
timestamp = r.get('time', 0)
if timestamp and type(timestamp) != int:
timestamp = 0
expiration = r.get('exp')
if expiration and type(expiration) != int:
expiration = 0
if amount:
if self.up_to_date:
paid = amount <= self.get_addr_received(address)
status = PR_PAID if paid else PR_UNPAID
if status == PR_UNPAID and expiration is not None and time.time() > timestamp + expiration:
status = PR_EXPIRED
else:
status = PR_UNKNOWN
else:
status = PR_UNKNOWN
return status
def make_payment_request(self, addr, amount, message, expiration):
timestamp = int(time.time())
_id = Hash(addr + "%d"%timestamp).encode('hex')[0:10]
r = {'time':timestamp, 'amount':amount, 'exp':expiration, 'address':addr, 'memo':message, 'id':_id}
return r
def sign_payment_request(self, key, alias, alias_addr, password):
req = self.receive_requests.get(key)
alias_privkey = self.get_private_key(alias_addr, password)[0]
pr = paymentrequest.make_unsigned_request(req)
paymentrequest.sign_request_with_alias(pr, alias, alias_privkey)
req['name'] = pr.pki_data
req['sig'] = pr.signature.encode('hex')
self.receive_requests[key] = req<|fim▁hole|> def add_payment_request(self, req, config):
import os
addr = req['address']
amount = req.get('amount')
message = req.get('memo')
self.receive_requests[addr] = req
self.storage.put('payment_requests', self.receive_requests)
self.set_label(addr, message) # should be a default label
rdir = config.get('requests_dir')
if rdir and amount is not None:
key = req.get('id', addr)
pr = paymentrequest.make_request(config, req)
path = os.path.join(rdir, key)
with open(path, 'w') as f:
f.write(pr.SerializeToString())
# reload
req = self.get_payment_request(addr, config)
with open(os.path.join(rdir, key + '.json'), 'w') as f:
f.write(json.dumps(req))
return req
def remove_payment_request(self, addr, config):
if addr not in self.receive_requests:
return False
r = self.receive_requests.pop(addr)
rdir = config.get('requests_dir')
if rdir:
key = r.get('id', addr)
for s in ['.json', '']:
n = os.path.join(rdir, key + s)
if os.path.exists(n):
os.unlink(n)
self.storage.put('payment_requests', self.receive_requests)
return True
def get_sorted_requests(self, config):
return sorted(map(lambda x: self.get_payment_request(x, config), self.receive_requests.keys()), key=lambda x: x.get('time', 0))
class Imported_Wallet(Abstract_Wallet):
wallet_type = 'imported'
def __init__(self, storage):
Abstract_Wallet.__init__(self, storage)
a = self.accounts.get(IMPORTED_ACCOUNT)
if not a:
self.accounts[IMPORTED_ACCOUNT] = ImportedAccount({'imported':{}})
def is_watching_only(self):
acc = self.accounts[IMPORTED_ACCOUNT]
n = acc.keypairs.values()
return len(n) > 0 and n == [[None, None]] * len(n)
def has_seed(self):
return False
def is_deterministic(self):
return False
def check_password(self, password):
self.accounts[IMPORTED_ACCOUNT].get_private_key((0,0), self, password)
def is_used(self, address):
h = self.history.get(address,[])
return len(h), False
def get_master_public_keys(self):
return {}
def is_beyond_limit(self, address, account, is_change):
return False
class Deterministic_Wallet(Abstract_Wallet):
def __init__(self, storage):
Abstract_Wallet.__init__(self, storage)
def has_seed(self):
return self.seed != ''
def is_deterministic(self):
return True
def is_watching_only(self):
return not self.has_seed()
def add_seed(self, seed, password):
if self.seed:
raise Exception("a seed exists")
self.seed_version, self.seed = self.format_seed(seed)
if password:
self.seed = pw_encode( self.seed, password)
self.use_encryption = True
else:
self.use_encryption = False
self.storage.put('seed', self.seed, False)
self.storage.put('seed_version', self.seed_version, False)
self.storage.put('use_encryption', self.use_encryption,True)
def get_seed(self, password):
return pw_decode(self.seed, password)
def get_mnemonic(self, password):
return self.get_seed(password)
def change_gap_limit(self, value):
assert isinstance(value, int), 'gap limit must be of type int, not of %s'%type(value)
if value >= self.gap_limit:
self.gap_limit = value
self.storage.put('gap_limit', self.gap_limit, True)
return True
elif value >= self.min_acceptable_gap():
for key, account in self.accounts.items():
addresses = account.get_addresses(False)
k = self.num_unused_trailing_addresses(addresses)
n = len(addresses) - k + value
account.receiving_pubkeys = account.receiving_pubkeys[0:n]
account.receiving_addresses = account.receiving_addresses[0:n]
self.gap_limit = value
self.storage.put('gap_limit', self.gap_limit, True)
self.save_accounts()
return True
else:
return False
def num_unused_trailing_addresses(self, addresses):
k = 0
for a in addresses[::-1]:
if self.history.get(a):break
k = k + 1
return k
def min_acceptable_gap(self):
# fixme: this assumes wallet is synchronized
n = 0
nmax = 0
for account in self.accounts.values():
addresses = account.get_addresses(0)
k = self.num_unused_trailing_addresses(addresses)
for a in addresses[0:-k]:
if self.history.get(a):
n = 0
else:
n += 1
if n > nmax: nmax = n
return nmax + 1
def default_account(self):
return self.accounts['0']
def create_new_address(self, account=None, for_change=0):
if account is None:
account = self.default_account()
address = account.create_new_address(for_change)
self.add_address(address)
return address
def add_address(self, address):
if address not in self.history:
self.history[address] = []
if self.synchronizer:
self.synchronizer.add(address)
self.save_accounts()
def synchronize(self):
with self.lock:
for account in self.accounts.values():
account.synchronize(self)
def restore(self, callback):
from i18n import _
def wait_for_wallet():
self.set_up_to_date(False)
while not self.is_up_to_date():
msg = "%s\n%s %d"%(
_("Please wait..."),
_("Addresses generated:"),
len(self.addresses(True)))
apply(callback, (msg,))
time.sleep(0.1)
def wait_for_network():
while not self.network.is_connected():
msg = "%s \n" % (_("Connecting..."))
apply(callback, (msg,))
time.sleep(0.1)
# wait until we are connected, because the user might have selected another server
if self.network:
wait_for_network()
wait_for_wallet()
else:
self.synchronize()
def is_beyond_limit(self, address, account, is_change):
if type(account) == ImportedAccount:
return False
addr_list = account.get_addresses(is_change)
i = addr_list.index(address)
prev_addresses = addr_list[:max(0, i)]
limit = self.gap_limit_for_change if is_change else self.gap_limit
if len(prev_addresses) < limit:
return False
prev_addresses = prev_addresses[max(0, i - limit):]
for addr in prev_addresses:
if self.history.get(addr):
return False
return True
def get_action(self):
if not self.get_master_public_key():
return 'create_seed'
if not self.accounts:
return 'create_accounts'
def get_master_public_keys(self):
out = {}
for k, account in self.accounts.items():
if type(account) == ImportedAccount:
continue
name = self.get_account_name(k)
mpk_text = '\n\n'.join(account.get_master_pubkeys())
out[name] = mpk_text
return out
class BIP32_Wallet(Deterministic_Wallet):
# abstract class, bip32 logic
root_name = 'x/'
def __init__(self, storage):
Deterministic_Wallet.__init__(self, storage)
self.master_public_keys = storage.get('master_public_keys', {})
self.master_private_keys = storage.get('master_private_keys', {})
self.gap_limit = storage.get('gap_limit', 20)
def is_watching_only(self):
return not bool(self.master_private_keys)
def can_import(self):
return False
def get_master_public_key(self):
return self.master_public_keys.get(self.root_name)
def get_master_private_key(self, account, password):
k = self.master_private_keys.get(account)
if not k: return
xprv = pw_decode(k, password)
try:
deserialize_xkey(xprv)
except:
raise InvalidPassword()
return xprv
def check_password(self, password):
xpriv = self.get_master_private_key(self.root_name, password)
xpub = self.master_public_keys[self.root_name]
if deserialize_xkey(xpriv)[3] != deserialize_xkey(xpub)[3]:
raise InvalidPassword()
def add_master_public_key(self, name, xpub):
if xpub in self.master_public_keys.values():
raise BaseException('Duplicate master public key')
self.master_public_keys[name] = xpub
self.storage.put('master_public_keys', self.master_public_keys, True)
def add_master_private_key(self, name, xpriv, password):
self.master_private_keys[name] = pw_encode(xpriv, password)
self.storage.put('master_private_keys', self.master_private_keys, True)
def derive_xkeys(self, root, derivation, password):
x = self.master_private_keys[root]
root_xprv = pw_decode(x, password)
xprv, xpub = bip32_private_derivation(root_xprv, root, derivation)
return xpub, xprv
def create_master_keys(self, password):
seed = self.get_seed(password)
self.add_cosigner_seed(seed, self.root_name, password)
def add_cosigner_seed(self, seed, name, password, passphrase=''):
# we don't store the seed, only the master xpriv
xprv, xpub = bip32_root(self.mnemonic_to_seed(seed, passphrase))
xprv, xpub = bip32_private_derivation(xprv, "m/", self.root_derivation)
self.add_master_public_key(name, xpub)
self.add_master_private_key(name, xprv, password)
def add_cosigner_xpub(self, seed, name):
# store only master xpub
xprv, xpub = bip32_root(self.mnemonic_to_seed(seed,''))
xprv, xpub = bip32_private_derivation(xprv, "m/", self.root_derivation)
self.add_master_public_key(name, xpub)
def mnemonic_to_seed(self, seed, password):
return Mnemonic.mnemonic_to_seed(seed, password)
def make_seed(self, lang=None):
return Mnemonic(lang).make_seed()
def format_seed(self, seed):
return NEW_SEED_VERSION, ' '.join(seed.split())
class BIP32_Simple_Wallet(BIP32_Wallet):
# Wallet with a single BIP32 account, no seed
# gap limit 20
wallet_type = 'xpub'
def create_xprv_wallet(self, xprv, password):
xpub = bitcoin.xpub_from_xprv(xprv)
account = BIP32_Account({'xpub':xpub})
self.storage.put('seed_version', self.seed_version, True)
self.add_master_private_key(self.root_name, xprv, password)
self.add_master_public_key(self.root_name, xpub)
self.add_account('0', account)
self.use_encryption = (password != None)
self.storage.put('use_encryption', self.use_encryption,True)
def create_xpub_wallet(self, xpub):
account = BIP32_Account({'xpub':xpub})
self.storage.put('seed_version', self.seed_version, True)
self.add_master_public_key(self.root_name, xpub)
self.add_account('0', account)
class BIP32_HD_Wallet(BIP32_Wallet):
# wallet that can create accounts
def __init__(self, storage):
self.next_account = storage.get('next_account2', None)
BIP32_Wallet.__init__(self, storage)
def can_create_accounts(self):
return self.root_name in self.master_private_keys.keys()
def addresses(self, b=True):
l = BIP32_Wallet.addresses(self, b)
if self.next_account:
_, _, _, next_address = self.next_account
if next_address not in l:
l.append(next_address)
return l
def get_address_index(self, address):
if self.next_account:
next_id, next_xpub, next_pubkey, next_address = self.next_account
if address == next_address:
return next_id, (0,0)
return BIP32_Wallet.get_address_index(self, address)
def num_accounts(self):
keys = []
for k, v in self.accounts.items():
if type(v) != BIP32_Account:
continue
keys.append(k)
i = 0
while True:
account_id = '%d'%i
if account_id not in keys:
break
i += 1
return i
def get_next_account(self, password):
account_id = '%d'%self.num_accounts()
derivation = self.root_name + "%d'"%int(account_id)
xpub, xprv = self.derive_xkeys(self.root_name, derivation, password)
self.add_master_public_key(derivation, xpub)
if xprv:
self.add_master_private_key(derivation, xprv, password)
account = BIP32_Account({'xpub':xpub})
addr, pubkey = account.first_address()
self.add_address(addr)
return account_id, xpub, pubkey, addr
def create_main_account(self, password):
# First check the password is valid (this raises if it isn't).
self.check_password(password)
assert self.num_accounts() == 0
self.create_account('Main account', password)
def create_account(self, name, password):
account_id, xpub, _, _ = self.get_next_account(password)
account = BIP32_Account({'xpub':xpub})
self.add_account(account_id, account)
self.set_label(account_id, name)
# add address of the next account
self.next_account = self.get_next_account(password)
self.storage.put('next_account2', self.next_account)
def account_is_pending(self, k):
return type(self.accounts.get(k)) == PendingAccount
def delete_pending_account(self, k):
assert type(self.accounts.get(k)) == PendingAccount
self.accounts.pop(k)
self.save_accounts()
def create_pending_account(self, name, password):
if self.next_account is None:
self.next_account = self.get_next_account(password)
self.storage.put('next_account2', self.next_account)
next_id, next_xpub, next_pubkey, next_address = self.next_account
if name:
self.set_label(next_id, name)
self.accounts[next_id] = PendingAccount({'pending':True, 'address':next_address, 'pubkey':next_pubkey})
self.save_accounts()
def synchronize(self):
# synchronize existing accounts
BIP32_Wallet.synchronize(self)
if self.next_account is None and not self.use_encryption:
try:
self.next_account = self.get_next_account(None)
self.storage.put('next_account2', self.next_account)
except:
print_error('cannot get next account')
# check pending account
if self.next_account is not None:
next_id, next_xpub, next_pubkey, next_address = self.next_account
if self.address_is_old(next_address):
print_error("creating account", next_id)
self.add_account(next_id, BIP32_Account({'xpub':next_xpub}))
# here the user should get a notification
self.next_account = None
self.storage.put('next_account2', self.next_account)
elif self.history.get(next_address, []):
if next_id not in self.accounts:
print_error("create pending account", next_id)
self.accounts[next_id] = PendingAccount({'pending':True, 'address':next_address, 'pubkey':next_pubkey})
self.save_accounts()
class NewWallet(BIP32_Wallet, Mnemonic):
# Standard wallet
root_derivation = "m/"
wallet_type = 'standard'
def create_main_account(self, password):
xpub = self.master_public_keys.get("x/")
account = BIP32_Account({'xpub':xpub})
self.add_account('0', account)
class Multisig_Wallet(BIP32_Wallet, Mnemonic):
# generic m of n
root_name = "x1/"
root_derivation = "m/"
def __init__(self, storage):
BIP32_Wallet.__init__(self, storage)
self.wallet_type = storage.get('wallet_type')
m = re.match('(\d+)of(\d+)', self.wallet_type)
self.m = int(m.group(1))
self.n = int(m.group(2))
def load_accounts(self):
self.accounts = {}
d = self.storage.get('accounts', {})
v = d.get('0')
if v:
if v.get('xpub3'):
v['xpubs'] = [v['xpub'], v['xpub2'], v['xpub3']]
elif v.get('xpub2'):
v['xpubs'] = [v['xpub'], v['xpub2']]
self.accounts = {'0': Multisig_Account(v)}
def create_main_account(self, password):
account = Multisig_Account({'xpubs': self.master_public_keys.values(), 'm': self.m})
self.add_account('0', account)
def get_master_public_keys(self):
return self.master_public_keys
def get_action(self):
for i in range(self.n):
if self.master_public_keys.get("x%d/"%(i+1)) is None:
return 'create_seed' if i == 0 else 'add_cosigners'
if not self.accounts:
return 'create_accounts'
class OldWallet(Deterministic_Wallet):
wallet_type = 'old'
def __init__(self, storage):
Deterministic_Wallet.__init__(self, storage)
self.gap_limit = storage.get('gap_limit', 5)
def make_seed(self):
import old_mnemonic
seed = random_seed(128)
return ' '.join(old_mnemonic.mn_encode(seed))
def format_seed(self, seed):
import old_mnemonic
# see if seed was entered as hex
seed = seed.strip()
try:
assert seed
seed.decode('hex')
return OLD_SEED_VERSION, str(seed)
except Exception:
pass
words = seed.split()
seed = old_mnemonic.mn_decode(words)
if not seed:
raise Exception("Invalid seed")
return OLD_SEED_VERSION, seed
def create_master_keys(self, password):
seed = self.get_seed(password)
mpk = OldAccount.mpk_from_seed(seed)
self.storage.put('master_public_key', mpk, True)
def get_master_public_key(self):
return self.storage.get("master_public_key")
def get_master_public_keys(self):
return {'Main Account':self.get_master_public_key()}
def create_main_account(self, password):
mpk = self.storage.get("master_public_key")
self.create_account(mpk)
def create_account(self, mpk):
self.accounts['0'] = OldAccount({'mpk':mpk, 0:[], 1:[]})
self.save_accounts()
def create_watching_only_wallet(self, mpk):
self.seed_version = OLD_SEED_VERSION
self.storage.put('seed_version', self.seed_version, False)
self.storage.put('master_public_key', mpk, True)
self.create_account(mpk)
def get_seed(self, password):
seed = pw_decode(self.seed, password).encode('utf8')
return seed
def check_password(self, password):
seed = self.get_seed(password)
self.accounts['0'].check_seed(seed)
def get_mnemonic(self, password):
import old_mnemonic
s = self.get_seed(password)
return ' '.join(old_mnemonic.mn_encode(s))
wallet_types = [
# category type description constructor
('standard', 'old', ("Old wallet"), OldWallet),
('standard', 'xpub', ("BIP32 Import"), BIP32_Simple_Wallet),
('standard', 'standard', ("Standard wallet"), NewWallet),
('standard', 'imported', ("Imported wallet"), Imported_Wallet),
('multisig', '2of2', ("Multisig wallet (2 of 2)"), Multisig_Wallet),
('multisig', '2of3', ("Multisig wallet (2 of 3)"), Multisig_Wallet)
]
# former WalletFactory
class Wallet(object):
"""The main wallet "entry point".
This class is actually a factory that will return a wallet of the correct
type when passed a WalletStorage instance."""
def __new__(self, storage):
seed_version = storage.get('seed_version')
if not seed_version:
seed_version = OLD_SEED_VERSION if len(storage.get('master_public_key','')) == 128 else NEW_SEED_VERSION
if seed_version not in [OLD_SEED_VERSION, NEW_SEED_VERSION]:
msg = "Your wallet has an unsupported seed version."
msg += '\n\nWallet file: %s' % os.path.abspath(storage.path)
if seed_version in [5, 7, 8, 9, 10]:
msg += "\n\nTo open this wallet, try 'git checkout seed_v%d'"%seed_version
if seed_version == 6:
# version 1.9.8 created v6 wallets when an incorrect seed was entered in the restore dialog
msg += '\n\nThis file was created because of a bug in version 1.9.8.'
if storage.get('master_public_keys') is None and storage.get('master_private_keys') is None and storage.get('imported_keys') is None:
# pbkdf2 was not included with the binaries, and wallet creation aborted.
msg += "\nIt does not contain any keys, and can safely be removed."
else:
# creation was complete if electrum was run from source
msg += "\nPlease open this file with Electrum 1.9.8, and move your coins to a new wallet."
raise BaseException(msg)
wallet_type = storage.get('wallet_type')
if wallet_type:
for cat, t, name, c in wallet_types:
if t == wallet_type:
WalletClass = c
break
else:
if re.match('(\d+)of(\d+)', wallet_type):
WalletClass = Multisig_Wallet
else:
raise BaseException('unknown wallet type', wallet_type)
else:
if seed_version == OLD_SEED_VERSION:
WalletClass = OldWallet
else:
WalletClass = NewWallet
return WalletClass(storage)
@classmethod
def is_seed(self, seed):
if not seed:
return False
elif is_old_seed(seed):
return True
elif is_new_seed(seed):
return True
else:
return False
@classmethod
def is_old_mpk(self, mpk):
try:
int(mpk, 16)
assert len(mpk) == 128
return True
except:
return False
@classmethod
def is_xpub(self, text):
try:
assert text[0:4] == 'xpub'
deserialize_xkey(text)
return True
except:
return False
@classmethod
def is_xprv(self, text):
try:
assert text[0:4] == 'xprv'
deserialize_xkey(text)
return True
except:
return False
@classmethod
def is_address(self, text):
if not text:
return False
for x in text.split():
if not bitcoin.is_address(x):
return False
return True
@classmethod
def is_private_key(self, text):
if not text:
return False
for x in text.split():
if not bitcoin.is_private_key(x):
return False
return True
@classmethod
def from_seed(self, seed, password, storage):
if is_old_seed(seed):
klass = OldWallet
elif is_new_seed(seed):
klass = NewWallet
w = klass(storage)
w.add_seed(seed, password)
w.create_master_keys(password)
w.create_main_account(password)
return w
@classmethod
def from_address(self, text, storage):
w = Imported_Wallet(storage)
for x in text.split():
w.accounts[IMPORTED_ACCOUNT].add(x, None, None, None)
w.save_accounts()
return w
@classmethod
def from_private_key(self, text, password, storage):
w = Imported_Wallet(storage)
w.update_password(None, password)
for x in text.split():
w.import_key(x, password)
return w
@classmethod
def from_old_mpk(self, mpk, storage):
w = OldWallet(storage)
w.seed = ''
w.create_watching_only_wallet(mpk)
return w
@classmethod
def from_xpub(self, xpub, storage):
w = BIP32_Simple_Wallet(storage)
w.create_xpub_wallet(xpub)
return w
@classmethod
def from_xprv(self, xprv, password, storage):
w = BIP32_Simple_Wallet(storage)
w.create_xprv_wallet(xprv, password)
return w
@classmethod
def from_multisig(klass, key_list, password, storage, wallet_type):
storage.put('wallet_type', wallet_type, True)
self = Multisig_Wallet(storage)
key_list = sorted(key_list, key = lambda x: klass.is_xpub(x))
for i, text in enumerate(key_list):
assert klass.is_seed(text) or klass.is_xprv(text) or klass.is_xpub(text)
name = "x%d/"%(i+1)
if klass.is_xprv(text):
xpub = bitcoin.xpub_from_xprv(text)
self.add_master_public_key(name, xpub)
self.add_master_private_key(name, text, password)
elif klass.is_xpub(text):
self.add_master_public_key(name, text)
elif klass.is_seed(text):
if name == 'x1/':
self.add_seed(text, password)
self.create_master_keys(password)
else:
self.add_cosigner_seed(text, name, password)
self.use_encryption = (password != None)
self.storage.put('use_encryption', self.use_encryption, True)
self.create_main_account(password)
return self<|fim▁end|> | self.storage.put('payment_requests', self.receive_requests)
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>###
# Copyright (c) 2005, Jeremiah Fincher
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,<|fim▁hole|># documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
"""
This plugin handles various plugin-related things, such as getting help for
a plugin, getting a list of the loaded plugins, and searching and downloading
plugins from supybot.com.
"""
import supybot
import supybot.world as world
# Use this for the version of this plugin. You may wish to put a CVS keyword
# in here if you're keeping the plugin in CVS or some similar system.
__version__ = "%%VERSION%%"
# XXX Replace this with an appropriate author or supybot.Author instance.
__author__ = supybot.authors.jemfinch
# This is a dictionary mapping supybot.Author instances to lists of
# contributions.
__contributors__ = {
supybot.authors.skorobeus: ['contributors'],
}
from . import config
# This had to be renamed because of stupid case-insensitivity issues.
from . import plugin
from imp import reload
reload(plugin) # In case we're being reloaded.
# Add more reloads here if you add third-party modules and want them to be
# reloaded when this plugin is reloaded. Don't forget to import them as well!
if world.testing:
from . import test
Class = plugin.Class
configure = config.configure
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:<|fim▁end|> | # this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the |
<|file_name|>extract-method-in-anonymous-function-declaration.ts<|end_file_name|><|fim▁begin|>/// <reference path='fourslash.ts' />
////export default function() {
//// /*start*/0/*end*/;
////}
goTo.select('start', 'end')
edit.applyRefactor({
refactorName: "Extract Symbol",
<|fim▁hole|> actionDescription: "Extract to inner function in anonymous function",
newContent:
`export default function() {
/*RENAME*/newFunction();
function newFunction() {
0;
}
}`
});<|fim▁end|> | actionName: "function_scope_0",
|
<|file_name|>legacy.js<|end_file_name|><|fim▁begin|>/*
artifact generator: C:\My\wizzi\v5\node_modules\wizzi-js\lib\artifacts\js\module\gen\main.js
primary source IttfDocument: c:\my\wizzi\v5\plugins\wizzi-core\src\ittf\root\legacy.js.ittf
*/
'use strict';<|fim▁hole|><|fim▁end|> |
module.exports = require('wizzi-legacy-v4'); |
<|file_name|>inference_unstable_itertools.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.<|fim▁hole|>//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub trait IpuItertools {
fn ipu_flatten(&self) -> u32 {
1
}
}
impl IpuItertools for char {}<|fim▁end|> | |
<|file_name|>EnumAvisImplique.java<|end_file_name|><|fim▁begin|>//
// Ce fichier a été généré par l'implémentation de référence JavaTM Architecture for XML Binding (JAXB), v2.2.11
// Voir <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Toute modification apportée à ce fichier sera perdue lors de la recompilation du schéma source.
// Généré le : 2017.09.18 à 11:31:40 AM CEST
//
package fr.gouv.api.Boamp_v230;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Classe Java pour EnumAvisImplique complex type.
*
* <p>Le fragment de schéma suivant indique le contenu attendu figurant dans cette classe.
*
* <pre>
* <complexType name="EnumAvisImplique">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <choice>
* <element name="MARCHE_PUBLIC" type="{http://www.w3.org/2001/XMLSchema}anyType"/>
* <element name="ACCORD_CADRE" type="{http://www.w3.org/2001/XMLSchema}anyType"/>
* <element name="SAD" type="{http://www.w3.org/2001/XMLSchema}anyType"/>
* </choice>
* </restriction>
* </complexContent>
* </complexType><|fim▁hole|> * </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "EnumAvisImplique", propOrder = {
"marchepublic",
"accordcadre",
"sad"
})
public class EnumAvisImplique {
@XmlElement(name = "MARCHE_PUBLIC")
protected Object marchepublic;
@XmlElement(name = "ACCORD_CADRE")
protected Object accordcadre;
@XmlElement(name = "SAD")
protected Object sad;
/**
* Obtient la valeur de la propriété marchepublic.
*
* @return
* possible object is
* {@link Object }
*
*/
public Object getMARCHEPUBLIC() {
return marchepublic;
}
/**
* Définit la valeur de la propriété marchepublic.
*
* @param value
* allowed object is
* {@link Object }
*
*/
public void setMARCHEPUBLIC(Object value) {
this.marchepublic = value;
}
/**
* Obtient la valeur de la propriété accordcadre.
*
* @return
* possible object is
* {@link Object }
*
*/
public Object getACCORDCADRE() {
return accordcadre;
}
/**
* Définit la valeur de la propriété accordcadre.
*
* @param value
* allowed object is
* {@link Object }
*
*/
public void setACCORDCADRE(Object value) {
this.accordcadre = value;
}
/**
* Obtient la valeur de la propriété sad.
*
* @return
* possible object is
* {@link Object }
*
*/
public Object getSAD() {
return sad;
}
/**
* Définit la valeur de la propriété sad.
*
* @param value
* allowed object is
* {@link Object }
*
*/
public void setSAD(Object value) {
this.sad = value;
}
}<|fim▁end|> | |
<|file_name|>draw.rs<|end_file_name|><|fim▁begin|>use std::ptr;
use std::mem;
use BufferViewExt;
use BufferViewSliceExt;
use ProgramExt;
use DrawError;
use UniformsExt;
use context::Context;
use ContextExt;
use QueryExt;
use TransformFeedbackSessionExt;
use fbo::{self, ValidatedAttachments};
use sync;
use uniforms::Uniforms;
use {Program, GlObject, ToGlEnum};
use index::{self, IndicesSource, PrimitiveType};
use vertex::{MultiVerticesSource, VerticesSource, TransformFeedbackSession};
use vertex_array_object::VertexAttributesSystem;
use draw_parameters::DrawParameters;
use draw_parameters::{BlendingFunction, BackfaceCullingMode};
use draw_parameters::{DepthTest, PolygonMode, StencilTest};
use draw_parameters::{SamplesQueryParam, TransformFeedbackPrimitivesWrittenQuery};
use draw_parameters::{PrimitivesGeneratedQuery, TimeElapsedQuery, ConditionalRendering};
use draw_parameters::{Smooth};
use Rect;
use libc;
use {gl, context, draw_parameters};
use version::Version;
use version::Api;
/// Draws everything.
pub fn draw<'a, U, V>(context: &Context, framebuffer: Option<&ValidatedAttachments>,
vertex_buffers: V, indices: IndicesSource,
program: &Program, uniforms: &U, draw_parameters: &DrawParameters,
dimensions: (u32, u32)) -> Result<(), DrawError>
where U: Uniforms, V: MultiVerticesSource<'a>
{
try!(draw_parameters::validate(context, draw_parameters));
// this contains the list of fences that will need to be fulfilled after the draw command
// has started
let mut fences = Vec::with_capacity(0);
// handling tessellation
let vertices_per_patch = match indices.get_primitives_type() {
index::PrimitiveType::Patches { vertices_per_patch } => {
if let Some(max) = context.capabilities().max_patch_vertices {
if vertices_per_patch == 0 || vertices_per_patch as gl::types::GLint > max {
return Err(DrawError::UnsupportedVerticesPerPatch);
}
} else {
return Err(DrawError::TessellationNotSupported);
}
// TODO: programs created from binaries have the wrong value
// for `has_tessellation_shaders`
/*if !program.has_tessellation_shaders() { // TODO:
panic!("Default tessellation level is not supported yet");
}*/
Some(vertices_per_patch)
},
_ => {<|fim▁hole|> /*if program.has_tessellation_shaders() {
return Err(DrawError::TessellationWithoutPatches);
}*/
None
},
};
// starting the state changes
let mut ctxt = context.make_current();
// handling vertices source
let (vertices_count, instances_count) = {
let index_buffer = match indices {
IndicesSource::IndexBuffer { buffer, .. } => Some(buffer),
IndicesSource::MultidrawArray { .. } => None,
IndicesSource::NoIndices { .. } => None,
};
// object that is used to build the bindings
let mut binder = VertexAttributesSystem::start(&mut ctxt, program, index_buffer);
// number of vertices in the vertices sources, or `None` if there is a mismatch
let mut vertices_count: Option<usize> = None;
// number of instances to draw
let mut instances_count: Option<usize> = None;
for src in vertex_buffers.iter() {
match src {
VerticesSource::VertexBuffer(buffer, format, per_instance) => {
// TODO: assert!(buffer.get_elements_size() == total_size(format));
if let Some(fence) = buffer.add_fence() {
fences.push(fence);
}
binder = binder.add(&buffer, format, if per_instance { Some(1) } else { None });
},
_ => {}
}
match src {
VerticesSource::VertexBuffer(ref buffer, _, false) => {
if let Some(curr) = vertices_count {
if curr != buffer.get_elements_count() {
vertices_count = None;
break;
}
} else {
vertices_count = Some(buffer.get_elements_count());
}
},
VerticesSource::VertexBuffer(ref buffer, _, true) => {
if let Some(curr) = instances_count {
if curr != buffer.get_elements_count() {
return Err(DrawError::InstancesCountMismatch);
}
} else {
instances_count = Some(buffer.get_elements_count());
}
},
VerticesSource::Marker { len, per_instance } if !per_instance => {
if let Some(curr) = vertices_count {
if curr != len {
vertices_count = None;
break;
}
} else {
vertices_count = Some(len);
}
},
VerticesSource::Marker { len, per_instance } if per_instance => {
if let Some(curr) = instances_count {
if curr != len {
return Err(DrawError::InstancesCountMismatch);
}
} else {
instances_count = Some(len);
}
},
_ => ()
}
}
binder.bind();
(vertices_count, instances_count)
};
// binding the FBO to draw upon
{
let fbo_id = fbo::FramebuffersContainer::get_framebuffer_for_drawing(&mut ctxt, framebuffer);
fbo::bind_framebuffer(&mut ctxt, fbo_id, true, false);
};
// binding the program and uniforms
program.use_program(&mut ctxt);
try!(uniforms.bind_uniforms(&mut ctxt, program, &mut fences));
// sync-ing draw_parameters
unsafe {
sync_depth(&mut ctxt, draw_parameters.depth_test, draw_parameters.depth_write,
draw_parameters.depth_range);
sync_stencil(&mut ctxt, &draw_parameters);
sync_blending(&mut ctxt, draw_parameters.blending_function);
sync_color_mask(&mut ctxt, draw_parameters.color_mask);
sync_line_width(&mut ctxt, draw_parameters.line_width);
sync_point_size(&mut ctxt, draw_parameters.point_size);
sync_polygon_mode(&mut ctxt, draw_parameters.backface_culling, draw_parameters.polygon_mode);
sync_multisampling(&mut ctxt, draw_parameters.multisampling);
sync_dithering(&mut ctxt, draw_parameters.dithering);
sync_viewport_scissor(&mut ctxt, draw_parameters.viewport, draw_parameters.scissor,
dimensions);
sync_rasterizer_discard(&mut ctxt, draw_parameters.draw_primitives);
sync_vertices_per_patch(&mut ctxt, vertices_per_patch);
try!(sync_queries(&mut ctxt, draw_parameters.samples_passed_query,
draw_parameters.time_elapsed_query,
draw_parameters.primitives_generated_query,
draw_parameters.transform_feedback_primitives_written_query));
sync_conditional_render(&mut ctxt, draw_parameters.condition);
try!(sync_smooth(&mut ctxt, draw_parameters.smooth, indices.get_primitives_type()));
// TODO: make sure that the program is the right one
// TODO: changing the current transform feedback requires pausing/unbinding before changing the program
if let Some(ref tf) = draw_parameters.transform_feedback {
tf.bind(&mut ctxt, indices.get_primitives_type());
} else {
TransformFeedbackSession::unbind(&mut ctxt);
}
if !program.has_srgb_output() {
if ctxt.version >= &Version(Api::Gl, 3, 0) || ctxt.extensions.gl_arb_framebuffer_srgb ||
ctxt.extensions.gl_ext_framebuffer_srgb || ctxt.extensions.gl_ext_srgb_write_control
{
if !ctxt.state.enabled_framebuffer_srgb {
ctxt.gl.Enable(gl::FRAMEBUFFER_SRGB);
ctxt.state.enabled_framebuffer_srgb = true;
}
}
}
}
// drawing
{
match &indices {
&IndicesSource::IndexBuffer { ref buffer, data_type, primitives } => {
let ptr: *const u8 = ptr::null_mut();
let ptr = unsafe { ptr.offset(buffer.get_offset_bytes() as isize) };
if let Some(fence) = buffer.add_fence() {
fences.push(fence);
}
unsafe {
if let Some(instances_count) = instances_count {
ctxt.gl.DrawElementsInstanced(primitives.to_glenum(),
buffer.get_elements_count() as gl::types::GLsizei,
data_type.to_glenum(),
ptr as *const libc::c_void,
instances_count as gl::types::GLsizei);
} else {
ctxt.gl.DrawElements(primitives.to_glenum(),
buffer.get_elements_count() as gl::types::GLsizei,
data_type.to_glenum(),
ptr as *const libc::c_void);
}
}
},
&IndicesSource::MultidrawArray { ref buffer, primitives } => {
let ptr: *const u8 = ptr::null_mut();
let ptr = unsafe { ptr.offset(buffer.get_offset_bytes() as isize) };
if let Some(fence) = buffer.add_fence() {
fences.push(fence);
}
unsafe {
buffer.prepare_and_bind_for_draw_indirect(&mut ctxt);
ctxt.gl.MultiDrawArraysIndirect(primitives.to_glenum(), ptr as *const _,
buffer.get_elements_count() as gl::types::GLsizei,
0);
}
},
&IndicesSource::NoIndices { primitives } => {
let vertices_count = match vertices_count {
Some(c) => c,
None => return Err(DrawError::VerticesSourcesLengthMismatch)
};
unsafe {
if let Some(instances_count) = instances_count {
ctxt.gl.DrawArraysInstanced(primitives.to_glenum(), 0,
vertices_count as gl::types::GLsizei,
instances_count as gl::types::GLsizei);
} else {
ctxt.gl.DrawArrays(primitives.to_glenum(), 0,
vertices_count as gl::types::GLsizei);
}
}
},
};
};
ctxt.state.next_draw_call_id += 1;
// fulfilling the fences
for fence in fences.into_iter() {
let mut new_fence = Some(unsafe {
sync::new_linear_sync_fence_if_supported(&mut ctxt)
}.unwrap());
mem::swap(&mut new_fence, &mut *fence.borrow_mut());
if let Some(new_fence) = new_fence {
unsafe { sync::destroy_linear_sync_fence(&mut ctxt, new_fence) };
}
}
Ok(())
}
fn sync_depth(ctxt: &mut context::CommandContext, depth_test: DepthTest, depth_write: bool,
depth_range: (f32, f32))
{
// depth test
match depth_test {
DepthTest::Overwrite => unsafe {
if ctxt.state.enabled_depth_test {
ctxt.gl.Disable(gl::DEPTH_TEST);
ctxt.state.enabled_depth_test = false;
}
},
depth_function => unsafe {
let depth_function = depth_function.to_glenum();
if ctxt.state.depth_func != depth_function {
ctxt.gl.DepthFunc(depth_function);
ctxt.state.depth_func = depth_function;
}
if !ctxt.state.enabled_depth_test {
ctxt.gl.Enable(gl::DEPTH_TEST);
ctxt.state.enabled_depth_test = true;
}
}
}
// depth mask
if depth_write != ctxt.state.depth_mask {
unsafe {
ctxt.gl.DepthMask(if depth_write { gl::TRUE } else { gl::FALSE });
}
ctxt.state.depth_mask = depth_write;
}
// depth range
if depth_range != ctxt.state.depth_range {
unsafe {
ctxt.gl.DepthRange(depth_range.0 as f64, depth_range.1 as f64);
}
ctxt.state.depth_range = depth_range;
}
}
fn sync_stencil(ctxt: &mut context::CommandContext, params: &DrawParameters) {
// TODO: optimize me
let (test_cw, read_mask_cw) = match params.stencil_test_clockwise {
StencilTest::AlwaysPass => (gl::ALWAYS, 0),
StencilTest::AlwaysFail => (gl::NEVER, 0),
StencilTest::IfLess { mask } => (gl::LESS, mask),
StencilTest::IfLessOrEqual { mask } => (gl::LEQUAL, mask),
StencilTest::IfMore { mask } => (gl::GREATER, mask),
StencilTest::IfMoreOrEqual { mask } => (gl::GEQUAL, mask),
StencilTest::IfEqual { mask } => (gl::EQUAL, mask),
StencilTest::IfNotEqual { mask } => (gl::NOTEQUAL, mask),
};
let (test_ccw, read_mask_ccw) = match params.stencil_test_counter_clockwise {
StencilTest::AlwaysPass => (gl::ALWAYS, 0),
StencilTest::AlwaysFail => (gl::NEVER, 0),
StencilTest::IfLess { mask } => (gl::LESS, mask),
StencilTest::IfLessOrEqual { mask } => (gl::LEQUAL, mask),
StencilTest::IfMore { mask } => (gl::GREATER, mask),
StencilTest::IfMoreOrEqual { mask } => (gl::GEQUAL, mask),
StencilTest::IfEqual { mask } => (gl::EQUAL, mask),
StencilTest::IfNotEqual { mask } => (gl::NOTEQUAL, mask),
};
if ctxt.state.stencil_func_back != (test_cw, params.stencil_reference_value_clockwise, read_mask_cw) {
unsafe { ctxt.gl.StencilFuncSeparate(gl::BACK, test_cw, params.stencil_reference_value_clockwise, read_mask_cw) };
ctxt.state.stencil_func_back = (test_cw, params.stencil_reference_value_clockwise, read_mask_cw);
}
if ctxt.state.stencil_func_front != (test_ccw, params.stencil_reference_value_counter_clockwise, read_mask_ccw) {
unsafe { ctxt.gl.StencilFuncSeparate(gl::FRONT, test_cw, params.stencil_reference_value_clockwise, read_mask_cw) };
ctxt.state.stencil_func_front = (test_ccw, params.stencil_reference_value_counter_clockwise, read_mask_ccw);
}
if ctxt.state.stencil_mask_back != params.stencil_write_mask_clockwise {
unsafe { ctxt.gl.StencilMaskSeparate(gl::BACK, params.stencil_write_mask_clockwise) };
ctxt.state.stencil_mask_back = params.stencil_write_mask_clockwise;
}
if ctxt.state.stencil_mask_front != params.stencil_write_mask_clockwise {
unsafe { ctxt.gl.StencilMaskSeparate(gl::FRONT, params.stencil_write_mask_clockwise) };
ctxt.state.stencil_mask_front = params.stencil_write_mask_clockwise;
}
let op_back = (params.stencil_fail_operation_clockwise.to_glenum(),
params.stencil_pass_depth_fail_operation_clockwise.to_glenum(),
params.stencil_depth_pass_operation_clockwise.to_glenum());
if ctxt.state.stencil_op_back != op_back {
unsafe { ctxt.gl.StencilOpSeparate(gl::BACK, op_back.0, op_back.1, op_back.2) };
ctxt.state.stencil_op_back = op_back;
}
let op_front = (params.stencil_fail_operation_counter_clockwise.to_glenum(),
params.stencil_pass_depth_fail_operation_counter_clockwise.to_glenum(),
params.stencil_depth_pass_operation_counter_clockwise.to_glenum());
if ctxt.state.stencil_op_front != op_front {
unsafe { ctxt.gl.StencilOpSeparate(gl::FRONT, op_front.0, op_front.1, op_front.2) };
ctxt.state.stencil_op_front = op_front;
}
let enable_stencil = test_cw != gl::ALWAYS || test_ccw != gl::ALWAYS ||
op_back.0 != gl::KEEP || op_front.0 != gl::KEEP;
if ctxt.state.enabled_stencil_test != enable_stencil {
if enable_stencil {
unsafe { ctxt.gl.Enable(gl::STENCIL_TEST) };
} else {
unsafe { ctxt.gl.Disable(gl::STENCIL_TEST) };
}
ctxt.state.enabled_stencil_test = enable_stencil;
}
}
fn sync_blending(ctxt: &mut context::CommandContext, blending_function: Option<BlendingFunction>) {
let blend_factors = match blending_function {
Some(BlendingFunction::AlwaysReplace) => unsafe {
if ctxt.state.enabled_blend {
ctxt.gl.Disable(gl::BLEND);
ctxt.state.enabled_blend = false;
}
None
},
Some(BlendingFunction::Min) => unsafe {
if ctxt.state.blend_equation != gl::MIN {
ctxt.gl.BlendEquation(gl::MIN);
ctxt.state.blend_equation = gl::MIN;
}
if !ctxt.state.enabled_blend {
ctxt.gl.Enable(gl::BLEND);
ctxt.state.enabled_blend = true;
}
None
},
Some(BlendingFunction::Max) => unsafe {
if ctxt.state.blend_equation != gl::MAX {
ctxt.gl.BlendEquation(gl::MAX);
ctxt.state.blend_equation = gl::MAX;
}
if !ctxt.state.enabled_blend {
ctxt.gl.Enable(gl::BLEND);
ctxt.state.enabled_blend = true;
}
None
},
Some(BlendingFunction::Addition { source, destination }) => unsafe {
if ctxt.state.blend_equation != gl::FUNC_ADD {
ctxt.gl.BlendEquation(gl::FUNC_ADD);
ctxt.state.blend_equation = gl::FUNC_ADD;
}
if !ctxt.state.enabled_blend {
ctxt.gl.Enable(gl::BLEND);
ctxt.state.enabled_blend = true;
}
Some((source, destination))
},
Some(BlendingFunction::Subtraction { source, destination }) => unsafe {
if ctxt.state.blend_equation != gl::FUNC_SUBTRACT {
ctxt.gl.BlendEquation(gl::FUNC_SUBTRACT);
ctxt.state.blend_equation = gl::FUNC_SUBTRACT;
}
if !ctxt.state.enabled_blend {
ctxt.gl.Enable(gl::BLEND);
ctxt.state.enabled_blend = true;
}
Some((source, destination))
},
Some(BlendingFunction::ReverseSubtraction { source, destination }) => unsafe {
if ctxt.state.blend_equation != gl::FUNC_REVERSE_SUBTRACT {
ctxt.gl.BlendEquation(gl::FUNC_REVERSE_SUBTRACT);
ctxt.state.blend_equation = gl::FUNC_REVERSE_SUBTRACT;
}
if !ctxt.state.enabled_blend {
ctxt.gl.Enable(gl::BLEND);
ctxt.state.enabled_blend = true;
}
Some((source, destination))
},
_ => None
};
if let Some((source, destination)) = blend_factors {
let source = source.to_glenum();
let destination = destination.to_glenum();
if ctxt.state.blend_func != (source, destination) {
unsafe { ctxt.gl.BlendFunc(source, destination) };
ctxt.state.blend_func = (source, destination);
}
};
}
fn sync_color_mask(ctxt: &mut context::CommandContext, mask: (bool, bool, bool, bool)) {
let mask = (
if mask.0 { 1 } else { 0 },
if mask.1 { 1 } else { 0 },
if mask.2 { 1 } else { 0 },
if mask.3 { 1 } else { 0 },
);
if ctxt.state.color_mask != mask {
unsafe {
ctxt.gl.ColorMask(mask.0, mask.1, mask.2, mask.3);
}
ctxt.state.color_mask = mask;
}
}
fn sync_line_width(ctxt: &mut context::CommandContext, line_width: Option<f32>) {
if let Some(line_width) = line_width {
if ctxt.state.line_width != line_width {
unsafe {
ctxt.gl.LineWidth(line_width);
ctxt.state.line_width = line_width;
}
}
}
}
fn sync_point_size(ctxt: &mut context::CommandContext, point_size: Option<f32>) {
if let Some(point_size) = point_size {
if ctxt.state.point_size != point_size {
unsafe {
ctxt.gl.PointSize(point_size);
ctxt.state.point_size = point_size;
}
}
}
}
fn sync_polygon_mode(ctxt: &mut context::CommandContext, backface_culling: BackfaceCullingMode,
polygon_mode: PolygonMode)
{
// back-face culling
// note: we never change the value of `glFrontFace`, whose default is GL_CCW
// that's why `CullClockWise` uses `GL_BACK` for example
match backface_culling {
BackfaceCullingMode::CullingDisabled => unsafe {
if ctxt.state.enabled_cull_face {
ctxt.gl.Disable(gl::CULL_FACE);
ctxt.state.enabled_cull_face = false;
}
},
BackfaceCullingMode::CullCounterClockWise => unsafe {
if !ctxt.state.enabled_cull_face {
ctxt.gl.Enable(gl::CULL_FACE);
ctxt.state.enabled_cull_face = true;
}
if ctxt.state.cull_face != gl::FRONT {
ctxt.gl.CullFace(gl::FRONT);
ctxt.state.cull_face = gl::FRONT;
}
},
BackfaceCullingMode::CullClockWise => unsafe {
if !ctxt.state.enabled_cull_face {
ctxt.gl.Enable(gl::CULL_FACE);
ctxt.state.enabled_cull_face = true;
}
if ctxt.state.cull_face != gl::BACK {
ctxt.gl.CullFace(gl::BACK);
ctxt.state.cull_face = gl::BACK;
}
},
}
// polygon mode
unsafe {
let polygon_mode = polygon_mode.to_glenum();
if ctxt.state.polygon_mode != polygon_mode {
ctxt.gl.PolygonMode(gl::FRONT_AND_BACK, polygon_mode);
ctxt.state.polygon_mode = polygon_mode;
}
}
}
fn sync_multisampling(ctxt: &mut context::CommandContext, multisampling: bool) {
if ctxt.state.enabled_multisample != multisampling {
unsafe {
if multisampling {
ctxt.gl.Enable(gl::MULTISAMPLE);
ctxt.state.enabled_multisample = true;
} else {
ctxt.gl.Disable(gl::MULTISAMPLE);
ctxt.state.enabled_multisample = false;
}
}
}
}
fn sync_dithering(ctxt: &mut context::CommandContext, dithering: bool) {
if ctxt.state.enabled_dither != dithering {
unsafe {
if dithering {
ctxt.gl.Enable(gl::DITHER);
ctxt.state.enabled_dither = true;
} else {
ctxt.gl.Disable(gl::DITHER);
ctxt.state.enabled_dither = false;
}
}
}
}
fn sync_viewport_scissor(ctxt: &mut context::CommandContext, viewport: Option<Rect>,
scissor: Option<Rect>, surface_dimensions: (u32, u32))
{
// viewport
if let Some(viewport) = viewport {
assert!(viewport.width <= ctxt.capabilities.max_viewport_dims.0 as u32,
"Viewport dimensions are too large");
assert!(viewport.height <= ctxt.capabilities.max_viewport_dims.1 as u32,
"Viewport dimensions are too large");
let viewport = (viewport.left as gl::types::GLint, viewport.bottom as gl::types::GLint,
viewport.width as gl::types::GLsizei,
viewport.height as gl::types::GLsizei);
if ctxt.state.viewport != Some(viewport) {
unsafe { ctxt.gl.Viewport(viewport.0, viewport.1, viewport.2, viewport.3); }
ctxt.state.viewport = Some(viewport);
}
} else {
assert!(surface_dimensions.0 <= ctxt.capabilities.max_viewport_dims.0 as u32,
"Viewport dimensions are too large");
assert!(surface_dimensions.1 <= ctxt.capabilities.max_viewport_dims.1 as u32,
"Viewport dimensions are too large");
let viewport = (0, 0, surface_dimensions.0 as gl::types::GLsizei,
surface_dimensions.1 as gl::types::GLsizei);
if ctxt.state.viewport != Some(viewport) {
unsafe { ctxt.gl.Viewport(viewport.0, viewport.1, viewport.2, viewport.3); }
ctxt.state.viewport = Some(viewport);
}
}
// scissor
if let Some(scissor) = scissor {
let scissor = (scissor.left as gl::types::GLint, scissor.bottom as gl::types::GLint,
scissor.width as gl::types::GLsizei,
scissor.height as gl::types::GLsizei);
unsafe {
if ctxt.state.scissor != Some(scissor) {
ctxt.gl.Scissor(scissor.0, scissor.1, scissor.2, scissor.3);
ctxt.state.scissor = Some(scissor);
}
if !ctxt.state.enabled_scissor_test {
ctxt.gl.Enable(gl::SCISSOR_TEST);
ctxt.state.enabled_scissor_test = true;
}
}
} else {
unsafe {
if ctxt.state.enabled_scissor_test {
ctxt.gl.Disable(gl::SCISSOR_TEST);
ctxt.state.enabled_scissor_test = false;
}
}
}
}
fn sync_rasterizer_discard(ctxt: &mut context::CommandContext, draw_primitives: bool) {
if ctxt.state.enabled_rasterizer_discard == draw_primitives {
if ctxt.version >= &Version(Api::Gl, 3, 0) {
if draw_primitives {
unsafe { ctxt.gl.Disable(gl::RASTERIZER_DISCARD); }
ctxt.state.enabled_rasterizer_discard = false;
} else {
unsafe { ctxt.gl.Enable(gl::RASTERIZER_DISCARD); }
ctxt.state.enabled_rasterizer_discard = true;
}
} else if ctxt.extensions.gl_ext_transform_feedback {
if draw_primitives {
unsafe { ctxt.gl.Disable(gl::RASTERIZER_DISCARD_EXT); }
ctxt.state.enabled_rasterizer_discard = false;
} else {
unsafe { ctxt.gl.Enable(gl::RASTERIZER_DISCARD_EXT); }
ctxt.state.enabled_rasterizer_discard = true;
}
} else {
unreachable!();
}
}
}
unsafe fn sync_vertices_per_patch(ctxt: &mut context::CommandContext, vertices_per_patch: Option<u16>) {
if let Some(vertices_per_patch) = vertices_per_patch {
let vertices_per_patch = vertices_per_patch as gl::types::GLint;
if ctxt.state.patch_patch_vertices != vertices_per_patch {
ctxt.gl.PatchParameteri(gl::PATCH_VERTICES, vertices_per_patch);
ctxt.state.patch_patch_vertices = vertices_per_patch;
}
}
}
fn sync_queries(ctxt: &mut context::CommandContext,
samples_passed_query: Option<SamplesQueryParam>,
time_elapsed_query: Option<&TimeElapsedQuery>,
primitives_generated_query: Option<&PrimitivesGeneratedQuery>,
transform_feedback_primitives_written_query:
Option<&TransformFeedbackPrimitivesWrittenQuery>)
-> Result<(), DrawError>
{
// FIXME: doesn't use ARB/EXT variants
if let Some(spq) = samples_passed_query {
let (ty, id, unused) = match spq {
SamplesQueryParam::SamplesPassedQuery(q) => (q.get_type(), q.get_id(), q.is_unused()),
SamplesQueryParam::AnySamplesPassedQuery(q) => (q.get_type(), q.get_id(), q.is_unused()),
};
if ty == gl::SAMPLES_PASSED {
if ctxt.state.any_samples_passed_query != 0 {
ctxt.state.any_samples_passed_query = 0;
unsafe { ctxt.gl.EndQuery(gl::ANY_SAMPLES_PASSED); }
}
if ctxt.state.any_samples_passed_conservative_query != 0 {
ctxt.state.any_samples_passed_conservative_query = 0;
unsafe { ctxt.gl.EndQuery(gl::ANY_SAMPLES_PASSED_CONSERVATIVE); }
}
if ctxt.state.samples_passed_query != id {
if !unused {
return Err(DrawError::WrongQueryOperation);
}
if ctxt.state.samples_passed_query != 0 {
unsafe { ctxt.gl.EndQuery(gl::SAMPLES_PASSED); }
}
unsafe { ctxt.gl.BeginQuery(gl::SAMPLES_PASSED, id); }
ctxt.state.samples_passed_query = id;
match spq {
SamplesQueryParam::SamplesPassedQuery(q) => q.set_used(),
SamplesQueryParam::AnySamplesPassedQuery(q) => q.set_used(),
};
}
} else if ty == gl::ANY_SAMPLES_PASSED {
if ctxt.state.samples_passed_query != 0 {
ctxt.state.samples_passed_query = 0;
unsafe { ctxt.gl.EndQuery(gl::SAMPLES_PASSED); }
}
if ctxt.state.any_samples_passed_conservative_query != 0 {
ctxt.state.any_samples_passed_conservative_query = 0;
unsafe { ctxt.gl.EndQuery(gl::ANY_SAMPLES_PASSED_CONSERVATIVE); }
}
if ctxt.state.any_samples_passed_query != id {
if !unused {
return Err(DrawError::WrongQueryOperation);
}
if ctxt.state.any_samples_passed_query != 0 {
unsafe { ctxt.gl.EndQuery(gl::ANY_SAMPLES_PASSED); }
}
unsafe { ctxt.gl.BeginQuery(gl::ANY_SAMPLES_PASSED, id); }
ctxt.state.any_samples_passed_query = id;
match spq {
SamplesQueryParam::SamplesPassedQuery(q) => q.set_used(),
SamplesQueryParam::AnySamplesPassedQuery(q) => q.set_used(),
};
}
} else if ty == gl::ANY_SAMPLES_PASSED_CONSERVATIVE {
if ctxt.state.samples_passed_query != 0 {
ctxt.state.samples_passed_query = 0;
unsafe { ctxt.gl.EndQuery(gl::SAMPLES_PASSED); }
}
if ctxt.state.any_samples_passed_query != 0 {
ctxt.state.any_samples_passed_query = 0;
unsafe { ctxt.gl.EndQuery(gl::ANY_SAMPLES_PASSED); }
}
if ctxt.state.any_samples_passed_conservative_query != id {
if !unused {
return Err(DrawError::WrongQueryOperation);
}
if ctxt.state.any_samples_passed_conservative_query != 0 {
unsafe { ctxt.gl.EndQuery(gl::ANY_SAMPLES_PASSED_CONSERVATIVE); }
}
unsafe { ctxt.gl.BeginQuery(gl::ANY_SAMPLES_PASSED_CONSERVATIVE, id); }
ctxt.state.any_samples_passed_conservative_query = id;
match spq {
SamplesQueryParam::SamplesPassedQuery(q) => q.set_used(),
SamplesQueryParam::AnySamplesPassedQuery(q) => q.set_used(),
};
}
} else {
unreachable!();
}
} else {
if ctxt.state.samples_passed_query != 0 {
ctxt.state.samples_passed_query = 0;
unsafe { ctxt.gl.EndQuery(gl::SAMPLES_PASSED); }
}
if ctxt.state.any_samples_passed_query != 0 {
ctxt.state.any_samples_passed_query = 0;
unsafe { ctxt.gl.EndQuery(gl::ANY_SAMPLES_PASSED); }
}
if ctxt.state.any_samples_passed_conservative_query != 0 {
ctxt.state.any_samples_passed_conservative_query = 0;
unsafe { ctxt.gl.EndQuery(gl::ANY_SAMPLES_PASSED_CONSERVATIVE); }
}
}
macro_rules! test {
($state:ident, $new:expr, $glenum:expr) => {
match (&mut ctxt.state.$state, $state) {
(&mut 0, None) => (),
(&mut existing, Some(new)) if existing == new.get_id() => (),
(existing, Some(new)) => {
if !new.is_unused() {
return Err(DrawError::WrongQueryOperation);
}
new.set_used();
*existing = new.get_id();
unsafe { ctxt.gl.BeginQuery($glenum, *existing); }
},
(existing, None) => {
*existing = 0;
unsafe { ctxt.gl.EndQuery($glenum); }
}
}
}
}
test!(time_elapsed_query, time_elapsed_query, gl::TIME_ELAPSED);
test!(primitives_generated_query, primitives_generated_query, gl::PRIMITIVES_GENERATED);
test!(transform_feedback_primitives_written_query, transform_feedback_primitives_written_query,
gl::TRANSFORM_FEEDBACK_PRIMITIVES_WRITTEN);
Ok(())
}
fn sync_conditional_render(ctxt: &mut context::CommandContext,
condition: Option<ConditionalRendering>)
{
fn get_mode(wait: bool, per_region: bool) -> gl::types::GLenum {
match (wait, per_region) {
(true, true) => gl::QUERY_BY_REGION_WAIT,
(true, false) => gl::QUERY_WAIT,
(false, true) => gl::QUERY_BY_REGION_NO_WAIT,
(false, false) => gl::QUERY_NO_WAIT,
}
}
fn get_id(q: &SamplesQueryParam) -> gl::types::GLuint {
match q {
&SamplesQueryParam::SamplesPassedQuery(ref q) => q.get_id(),
&SamplesQueryParam::AnySamplesPassedQuery(ref q) => q.get_id(),
}
}
// FIXME: check whether the query is binded to a query slot
match (&mut ctxt.state.conditional_render, condition) {
(&mut None, None) => (),
(cur @ &mut Some(_), None) => {
if ctxt.version >= &Version(Api::Gl, 3, 0) {
unsafe { ctxt.gl.EndConditionalRender() };
} else if ctxt.extensions.gl_nv_conditional_render {
unsafe { ctxt.gl.EndConditionalRenderNV() };
} else {
unreachable!();
}
*cur = None;
},
(cur @ &mut None, Some(ConditionalRendering { query, wait, per_region })) => {
let mode = get_mode(wait, per_region);
let id = get_id(&query);
if ctxt.version >= &Version(Api::Gl, 3, 0) {
unsafe { ctxt.gl.BeginConditionalRender(id, mode) };
} else if ctxt.extensions.gl_nv_conditional_render {
unsafe { ctxt.gl.BeginConditionalRenderNV(id, mode) };
} else {
unreachable!();
}
*cur = Some((id, mode));
},
(&mut Some((ref mut id, ref mut mode)), Some(ConditionalRendering { query, wait, per_region })) => {
let new_mode = get_mode(wait, per_region);
let new_id = get_id(&query);
// determining whether we have to change the mode
// if the new mode is "no_wait" but he old mode is "wait", we don't need to change it
let no_mode_change = match (new_mode, *mode) {
(a, b) if a == b => true,
(gl::QUERY_NO_WAIT, gl::QUERY_WAIT) => true,
(gl::QUERY_BY_REGION_NO_WAIT, gl::QUERY_BY_REGION_WAIT) => true,
_ => false,
};
if !no_mode_change || new_id != *id {
if ctxt.version >= &Version(Api::Gl, 3, 0) {
unsafe {
ctxt.gl.EndConditionalRender();
ctxt.gl.BeginConditionalRender(new_id, new_mode);
}
} else if ctxt.extensions.gl_nv_conditional_render {
unsafe {
ctxt.gl.EndConditionalRenderNV();
ctxt.gl.BeginConditionalRenderNV(new_id, new_mode);
}
} else {
unreachable!();
}
}
*id = new_id;
*mode = new_mode;
},
}
}
fn sync_smooth(ctxt: &mut context::CommandContext,
smooth: Option<Smooth>,
primitive_type: PrimitiveType) -> Result<(), DrawError> {
if let Some(smooth) = smooth {
// check if smoothing is supported, it isn't on OpenGL ES
if !(ctxt.version >= &Version(Api::Gl, 1, 0)) {
return Err(DrawError::SmoothingNotSupported);
}
let hint = smooth.to_glenum();
match primitive_type {
// point
PrimitiveType::Points =>
return Err(DrawError::SmoothingNotSupported),
// line
PrimitiveType::LinesList | PrimitiveType::LinesListAdjacency |
PrimitiveType::LineStrip | PrimitiveType::LineStripAdjacency |
PrimitiveType::LineLoop => unsafe {
if !ctxt.state.enabled_line_smooth {
ctxt.state.enabled_line_smooth = true;
ctxt.gl.Enable(gl::LINE_SMOOTH);
}
if ctxt.state.smooth.0 != hint {
ctxt.state.smooth.0 = hint;
ctxt.gl.Hint(gl::LINE_SMOOTH_HINT, hint);
}
},
// polygon
_ => unsafe {
if !ctxt.state.enabled_polygon_smooth {
ctxt.state.enabled_polygon_smooth = true;
ctxt.gl.Enable(gl::POLYGON_SMOOTH);
}
if ctxt.state.smooth.1 != hint {
ctxt.state.smooth.1 = hint;
ctxt.gl.Hint(gl::POLYGON_SMOOTH_HINT, hint);
}
}
}
}
else {
match primitive_type {
// point
PrimitiveType::Points => (),
// line
PrimitiveType::LinesList | PrimitiveType::LinesListAdjacency |
PrimitiveType::LineStrip | PrimitiveType::LineStripAdjacency |
PrimitiveType::LineLoop => unsafe {
if ctxt.state.enabled_line_smooth {
ctxt.state.enabled_line_smooth = false;
ctxt.gl.Disable(gl::LINE_SMOOTH);
}
},
// polygon
_ => unsafe {
if ctxt.state.enabled_polygon_smooth {
ctxt.state.enabled_polygon_smooth = false;
ctxt.gl.Disable(gl::POLYGON_SMOOTH);
}
}
}
}
Ok(())
}<|fim▁end|> | // TODO: programs created from binaries have the wrong value
// for `has_tessellation_shaders` |
<|file_name|>test_model.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import pkg_resources
import unittest
from datetime import datetime, timedelta
from pylons import tmpl_context as c
from tg import config
import mock
from alluratest.controller import setup_basic_test, setup_global_objects, setup_trove_categories
from allura.tests import decorators as td
from allura.model import User, Project, TroveCategory
from allura.lib import helpers as h
from allura import model as M
from forgeuserstats.model import stats as USM
# important to be distinct from 'test' which ForgeGit uses, so that the
# tests can run in parallel and not clobber each other
test_project_with_repo = 'test2'
with_git = td.with_tool(test_project_with_repo, 'Git',
'src-git', 'Git', type='git')
class TestUserStats(unittest.TestCase):
def setUp(self):
setup_basic_test()
setup_global_objects()
self.user = User.by_username('test-user-2')
c.user = self.user
def test_init_values(self):
artifacts = self.user.stats.getArtifacts()
tickets = self.user.stats.getTickets()
commits = self.user.stats.getCommits()
assert self.user.stats.tot_logins_count == 0
assert artifacts['created'] == 0
assert artifacts['modified'] == 0
assert tickets['assigned'] == 0
assert tickets['solved'] == 0
assert tickets['revoked'] == 0
assert tickets['averagesolvingtime'] is None
assert commits['number'] == 0
assert commits['lines'] == 0
lmartifacts = self.user.stats.getLastMonthArtifacts()
lmtickets = self.user.stats.getLastMonthTickets()
lmcommits = self.user.stats.getLastMonthCommits()
assert self.user.stats.getLastMonthLogins() == 0
assert lmartifacts['created'] == 0
assert lmartifacts['modified'] == 0
assert lmtickets['assigned'] == 0
assert lmtickets['solved'] == 0
assert lmtickets['revoked'] == 0
assert lmtickets['averagesolvingtime'] is None
assert lmcommits['number'] == 0
assert lmcommits['lines'] == 0
@td.with_user_project('test-user-2')
def test_create_artifact_stats(self):
setup_trove_categories()
p = Project.query.get(shortname='u/test-user-2')
topic = TroveCategory.query.get(shortname='scientific')
init_lm_art = self.user.stats.getLastMonthArtifacts()
init_art = self.user.stats.getArtifacts()
init_art_wiki = self.user.stats.getArtifacts(art_type='Wiki')
init_art_by_type = self.user.stats.getArtifactsByType()
init_lm_art_by_type = self.user.stats.getLastMonthArtifactsByType()
init_art_sci = self.user.stats.getArtifacts(category=topic._id)
self.user.stats.addNewArtifact('Wiki', datetime.utcnow(), p)
lm_art = self.user.stats.getLastMonthArtifacts()
artifacts = self.user.stats.getArtifacts()
art_wiki = self.user.stats.getArtifacts(art_type='Wiki')
art_by_type = self.user.stats.getArtifactsByType()
lm_art_by_type = self.user.stats.getLastMonthArtifactsByType()
assert lm_art['created'] == init_lm_art['created'] + 1
assert lm_art['modified'] == init_lm_art['modified']
assert artifacts['created'] == init_art['created'] + 1
assert artifacts['modified'] == init_art['modified']
assert art_wiki['created'] == init_art_wiki['created'] + 1
assert art_wiki['modified'] == init_art_wiki['modified']
assert art_by_type['Wiki'][
'created'] == init_art_by_type['Wiki']['created'] + 1
assert art_by_type['Wiki'][
'modified'] == init_art_by_type['Wiki']['modified']
assert lm_art_by_type['Wiki'][
'created'] == init_lm_art_by_type['Wiki']['created'] + 1
assert lm_art_by_type['Wiki'][
'modified'] == init_lm_art_by_type['Wiki']['modified']
# In that case, last month stats should not be changed
new_date = datetime.utcnow() + timedelta(-32)
self.user.stats.addNewArtifact('Wiki', new_date, p)
lm_art = self.user.stats.getLastMonthArtifacts()
artifacts = self.user.stats.getArtifacts()
art_wiki = self.user.stats.getArtifacts(art_type='Wiki')
art_by_type = self.user.stats.getArtifactsByType()
lm_art_by_type = self.user.stats.getLastMonthArtifactsByType()
assert lm_art['created'] == init_lm_art['created'] + 1
assert lm_art['modified'] == init_lm_art['modified']
assert artifacts['created'] == init_art['created'] + 2
assert artifacts['modified'] == init_art['modified']
assert art_wiki['created'] == init_art_wiki['created'] + 2
assert art_wiki['modified'] == init_art_wiki['modified']
assert art_by_type['Wiki'][
'created'] == init_art_by_type['Wiki']['created'] + 2
assert art_by_type['Wiki'][
'modified'] == init_art_by_type['Wiki']['modified']
assert lm_art_by_type['Wiki'][
'created'] == init_lm_art_by_type['Wiki']['created'] + 1
assert lm_art_by_type['Wiki'][
'modified'] == init_lm_art_by_type['Wiki']['modified']
p.trove_topic = [topic._id]
self.user.stats.addNewArtifact('Wiki', datetime.utcnow(), p)
lm_art = self.user.stats.getLastMonthArtifacts()
artifacts = self.user.stats.getArtifacts()
art_wiki = self.user.stats.getArtifacts(art_type='Wiki')
art_by_type = self.user.stats.getArtifactsByType()
lm_art_by_type = self.user.stats.getLastMonthArtifactsByType()
art_sci = self.user.stats.getArtifacts(category=topic._id)
art_by_cat = self.user.stats.getArtifactsByCategory(detailed=True)
assert lm_art['created'] == init_lm_art['created'] + 2
assert lm_art['modified'] == init_lm_art['modified']
assert artifacts['created'] == init_art['created'] + 3
assert artifacts['modified'] == init_art['modified']
assert art_wiki['created'] == init_art_wiki['created'] + 3
assert art_wiki['modified'] == init_art_wiki['modified']
assert art_by_type['Wiki'][
'created'] == init_art_by_type['Wiki']['created'] + 3
assert art_by_type['Wiki'][
'modified'] == init_art_by_type['Wiki']['modified']
assert lm_art_by_type['Wiki'][
'created'] == init_lm_art_by_type['Wiki']['created'] + 2
assert lm_art_by_type['Wiki'][
'modified'] == init_lm_art_by_type['Wiki']['modified']
assert art_sci['created'] == init_art_sci['created'] + 1
assert art_sci['modified'] == init_art_sci['modified']
assert dict(messagetype='Wiki', created=1,
modified=0) in art_by_cat[topic]
art_by_cat = self.user.stats.getArtifactsByCategory(detailed=False)
assert art_by_cat[topic]['created'] == 1 and art_by_cat[
topic]['modified'] == 0
@td.with_user_project('test-user-2')
def test_modify_artifact_stats(self):
setup_trove_categories()
p = Project.query.get(shortname='u/test-user-2')
topic = TroveCategory.query.get(shortname='scientific')
init_lm_art = self.user.stats.getLastMonthArtifacts()
init_art = self.user.stats.getArtifacts()
init_art_wiki = self.user.stats.getArtifacts(art_type='Wiki')
init_art_by_type = self.user.stats.getArtifactsByType()
init_lm_art_by_type = self.user.stats.getLastMonthArtifactsByType()
init_art_sci = self.user.stats.getArtifacts(category=topic._id)
self.user.stats.addModifiedArtifact('Wiki', datetime.utcnow(), p)
lm_art = self.user.stats.getLastMonthArtifacts()
artifacts = self.user.stats.getArtifacts()
art_wiki = self.user.stats.getArtifacts(art_type='Wiki')
art_by_type = self.user.stats.getArtifactsByType()
lm_art_by_type = self.user.stats.getLastMonthArtifactsByType()
assert lm_art['created'] == init_lm_art['created']
assert lm_art['modified'] == init_lm_art['modified'] + 1
assert artifacts['created'] == init_art['created']
assert artifacts['modified'] == init_art['modified'] + 1
assert art_wiki['created'] == init_art_wiki['created']
assert art_wiki['modified'] == init_art_wiki['modified'] + 1
assert art_by_type['Wiki'][
'created'] == init_art_by_type['Wiki']['created']
assert art_by_type['Wiki'][
'modified'] == init_art_by_type['Wiki']['modified'] + 1
assert lm_art_by_type['Wiki'][
'created'] == init_lm_art_by_type['Wiki']['created']
assert lm_art_by_type['Wiki'][
'modified'] == init_lm_art_by_type['Wiki']['modified'] + 1
# In that case, last month stats should not be changed
new_date = datetime.utcnow() + timedelta(-32)
self.user.stats.addModifiedArtifact('Wiki', new_date, p)
lm_art = self.user.stats.getLastMonthArtifacts()
artifacts = self.user.stats.getArtifacts()
art_wiki = self.user.stats.getArtifacts(art_type='Wiki')
art_by_type = self.user.stats.getArtifactsByType()
lm_art_by_type = self.user.stats.getLastMonthArtifactsByType()
assert lm_art['created'] == init_lm_art['created']
assert lm_art['modified'] == init_lm_art['modified'] + 1
assert artifacts['created'] == init_art['created']
assert artifacts['modified'] == init_art['modified'] + 2
assert art_wiki['created'] == init_art_wiki['created']
assert art_wiki['modified'] == init_art_wiki['modified'] + 2
assert art_by_type['Wiki'][
'created'] == init_art_by_type['Wiki']['created']
assert art_by_type['Wiki'][
'modified'] == init_art_by_type['Wiki']['modified'] + 2
assert lm_art_by_type['Wiki'][
'created'] == init_lm_art_by_type['Wiki']['created']
assert lm_art_by_type['Wiki'][
'modified'] == init_lm_art_by_type['Wiki']['modified'] + 1
p.trove_topic = [topic._id]
self.user.stats.addModifiedArtifact('Wiki', datetime.utcnow(), p)
lm_art = self.user.stats.getLastMonthArtifacts()
artifacts = self.user.stats.getArtifacts()
art_wiki = self.user.stats.getArtifacts(art_type='Wiki')
art_by_type = self.user.stats.getArtifactsByType()
lm_art_by_type = self.user.stats.getLastMonthArtifactsByType()
art_sci = self.user.stats.getArtifacts(category=topic._id)
art_by_cat = self.user.stats.getArtifactsByCategory(detailed=True)
assert lm_art['created'] == init_lm_art['created']
assert lm_art['modified'] == init_lm_art['modified'] + 2
assert artifacts['created'] == init_art['created']
assert artifacts['modified'] == init_art['modified'] + 3
assert art_wiki['created'] == init_art_wiki['created']
assert art_wiki['modified'] == init_art_wiki['modified'] + 3
assert art_by_type['Wiki'][
'created'] == init_art_by_type['Wiki']['created']
assert art_by_type['Wiki'][
'modified'] == init_art_by_type['Wiki']['modified'] + 3
assert lm_art_by_type['Wiki'][
'created'] == init_lm_art_by_type['Wiki']['created']
assert lm_art_by_type['Wiki'][
'modified'] == init_lm_art_by_type['Wiki']['modified'] + 2
assert art_sci['created'] == init_art_sci['created']
assert art_sci['modified'] == init_art_sci['modified'] + 1
assert dict(messagetype='Wiki', created=0,
modified=1) in art_by_cat[topic]
art_by_cat = self.user.stats.getArtifactsByCategory(detailed=False)
assert art_by_cat[topic]['created'] == 0 and art_by_cat[
topic]['modified'] == 1
@td.with_user_project('test-user-2')
def test_ticket_stats(self):
setup_trove_categories()<|fim▁hole|>
init_lm_tickets_art = self.user.stats.getLastMonthArtifacts(
art_type='Ticket')
init_tickets_art = self.user.stats.getArtifacts(art_type='Ticket')
init_tickets_sci_art = self.user.stats.getArtifacts(category=topic._id)
init_tickets = self.user.stats.getTickets()
init_lm_tickets = self.user.stats.getLastMonthTickets()
self.user.stats.addNewArtifact('Ticket', create_time, p)
lm_tickets_art = self.user.stats.getLastMonthArtifacts(
art_type='Ticket')
tickets_art = self.user.stats.getArtifacts(art_type='Ticket')
tickets_sci_art = self.user.stats.getArtifacts(category=topic._id)
assert lm_tickets_art['created'] == init_lm_tickets_art['created'] + 1
assert lm_tickets_art['modified'] == init_lm_tickets_art['modified']
assert tickets_art['created'] == init_tickets_art['created'] + 1
assert tickets_art['modified'] == init_tickets_art['modified']
assert tickets_sci_art['created'] == tickets_sci_art['created']
assert tickets_sci_art['modified'] == tickets_sci_art['modified']
p.trove_topic = [topic._id]
self.user.stats.addAssignedTicket(create_time, p)
tickets = self.user.stats.getTickets()
lm_tickets = self.user.stats.getLastMonthTickets()
assert tickets['assigned'] == init_tickets['assigned'] + 1
assert tickets['revoked'] == init_tickets['revoked']
assert tickets['solved'] == init_tickets['solved']
assert tickets['averagesolvingtime'] is None
assert lm_tickets['assigned'] == init_lm_tickets['assigned'] + 1
assert lm_tickets['revoked'] == init_lm_tickets['revoked']
assert lm_tickets['solved'] == init_lm_tickets['solved']
assert lm_tickets['averagesolvingtime'] is None
self.user.stats.addRevokedTicket(create_time + timedelta(-32), p)
tickets = self.user.stats.getTickets()
assert tickets['assigned'] == init_tickets['assigned'] + 1
assert tickets['revoked'] == init_tickets['revoked'] + 1
assert tickets['solved'] == init_tickets['solved']
assert tickets['averagesolvingtime'] is None
assert lm_tickets['assigned'] == init_lm_tickets['assigned'] + 1
assert lm_tickets['revoked'] == init_lm_tickets['revoked']
assert lm_tickets['solved'] == init_lm_tickets['solved']
assert lm_tickets['averagesolvingtime'] is None
self.user.stats.addClosedTicket(
create_time, create_time + timedelta(1), p)
tickets = self.user.stats.getTickets()
lm_tickets = self.user.stats.getLastMonthTickets()
assert tickets['assigned'] == init_tickets['assigned'] + 1
assert tickets['revoked'] == init_tickets['revoked'] + 1
assert tickets['solved'] == init_tickets['solved'] + 1
solving_time = dict(seconds=0, minutes=0, days=1, hours=0)
assert tickets['averagesolvingtime'] == solving_time
assert lm_tickets['assigned'] == init_lm_tickets['assigned'] + 1
assert lm_tickets['revoked'] == init_lm_tickets['revoked']
assert lm_tickets['solved'] == init_lm_tickets['solved'] + 1
assert lm_tickets['averagesolvingtime'] == solving_time
p.trove_topic = []
self.user.stats.addClosedTicket(
create_time, create_time + timedelta(3), p)
tickets = self.user.stats.getTickets()
lm_tickets = self.user.stats.getLastMonthTickets()
solving_time = dict(seconds=0, minutes=0, days=2, hours=0)
assert tickets['assigned'] == init_tickets['assigned'] + 1
assert tickets['revoked'] == init_tickets['revoked'] + 1
assert tickets['solved'] == init_tickets['solved'] + 2
assert tickets['averagesolvingtime'] == solving_time
assert lm_tickets['assigned'] == init_lm_tickets['assigned'] + 1
assert lm_tickets['revoked'] == init_lm_tickets['revoked']
assert lm_tickets['solved'] == init_lm_tickets['solved'] + 2
assert lm_tickets['averagesolvingtime'] == solving_time
by_cat = self.user.stats.getTicketsByCategory()
lm_by_cat = self.user.stats.getLastMonthTicketsByCategory()
solving_time = dict(days=1, hours=0, minutes=0, seconds=0)
assert by_cat[topic]['assigned'] == 1
assert by_cat[topic]['revoked'] == 1
assert by_cat[topic]['solved'] == 1
assert by_cat[topic]['averagesolvingtime'] == solving_time
assert lm_by_cat[topic]['assigned'] == 1
assert lm_by_cat[topic]['revoked'] == 0
assert lm_by_cat[topic]['solved'] == 1
assert lm_by_cat[topic]['averagesolvingtime'] == solving_time
@with_git
@td.with_user_project('test-user-2')
def test_commit_stats(self):
setup_trove_categories()
p = Project.query.get(shortname='u/test-user-2')
topic = TroveCategory.query.get(shortname='scientific')
commit_time = datetime.utcnow() + timedelta(-1)
self.user.set_password('testpassword')
addr = M.EmailAddress.upsert('rcopeland@geek.net')
self.user.claim_address('rcopeland@geek.net')
repo_dir = pkg_resources.resource_filename(
'forgeuserstats', 'tests/data')
c.app.repo.fs_path = repo_dir
c.app.repo.name = 'testgit.git'
repo = c.app.repo
repo.refresh()
commit = repo.commit('HEAD')
init_commits = self.user.stats.getCommits()
assert init_commits['number'] == 4
init_lmcommits = self.user.stats.getLastMonthCommits()
assert init_lmcommits['number'] == 4
p.trove_topic = [topic._id]
self.user.stats.addCommit(commit, datetime.utcnow(), p)
commits = self.user.stats.getCommits()
assert commits['number'] == init_commits['number'] + 1
assert commits['lines'] == init_commits['lines'] + 1
lmcommits = self.user.stats.getLastMonthCommits()
assert lmcommits['number'] == init_lmcommits['number'] + 1
assert lmcommits['lines'] == init_lmcommits['lines'] + 1
by_cat = self.user.stats.getCommitsByCategory()
assert by_cat[topic]['number'] == 1
assert by_cat[topic]['lines'] == 1
lm_by_cat = self.user.stats.getLastMonthCommitsByCategory()
assert lm_by_cat[topic]['number'] == 1
assert lm_by_cat[topic]['lines'] == 1
self.user.stats.addCommit(
commit, datetime.utcnow() + timedelta(-40), p)
commits = self.user.stats.getCommits()
assert commits['number'] == init_commits['number'] + 2
assert commits['lines'] == init_commits['lines'] + 2
lmcommits = self.user.stats.getLastMonthCommits()
assert lmcommits['number'] == init_lmcommits['number'] + 1
assert lmcommits['lines'] == init_lmcommits['lines'] + 1
by_cat = self.user.stats.getCommitsByCategory()
assert by_cat[topic]['number'] == 2
assert by_cat[topic]['lines'] == 2
lm_by_cat = self.user.stats.getLastMonthCommitsByCategory()
assert lm_by_cat[topic]['number'] == 1
assert lm_by_cat[topic]['lines'] == 1
@td.with_user_project('test-user-2')
def test_login_stats(self):
init_logins = self.user.stats.tot_logins_count
init_lm_logins = self.user.stats.getLastMonthLogins()
login_datetime = datetime.utcnow()
self.user.stats.addLogin(login_datetime)
logins = self.user.stats.tot_logins_count
lm_logins = self.user.stats.getLastMonthLogins()
assert logins == init_logins + 1
assert lm_logins == init_lm_logins + 1
assert abs(self.user.stats.last_login -
login_datetime) < timedelta(seconds=1)
self.user.stats.addLogin(datetime.utcnow() + timedelta(-32))
logins = self.user.stats.tot_logins_count
lm_logins = self.user.stats.getLastMonthLogins()
assert logins == init_logins + 2
assert lm_logins == init_lm_logins + 1
assert abs(self.user.stats.last_login -
login_datetime) < timedelta(seconds=1)
def test_start_date(self):
stats = USM.UserStats(registration_date=datetime(2012, 04, 01))
self.assertEqual(stats.start_date, datetime(2012, 04, 01))
with h.push_config(config, **{'userstats.start_date': '2013-04-01'}):
self.assertEqual(stats.start_date, datetime(2013, 04, 01))
with h.push_config(config, **{'userstats.start_date': '2011-04-01'}):
self.assertEqual(stats.start_date, datetime(2012, 04, 01))
@mock.patch('allura.model.stats.difflib.unified_diff')
def test_count_loc(self, unified_diff):
stats = USM.UserStats()
newcommit = mock.Mock(
parent_ids=['deadbeef'],
diffs=mock.Mock(
changed=[mock.MagicMock()],
copied=[mock.MagicMock()],
added=[mock.MagicMock()],
),
)
unified_diff.return_value = ['+++', '---', '+line']
newcommit.tree.get_blob_by_path.return_value = mock.MagicMock()
newcommit.tree.get_blob_by_path.return_value.__iter__.return_value = [
'one']
newcommit.repo.commit(
).tree.get_blob_by_path.return_value = mock.MagicMock()
newcommit.repo.commit().tree.get_blob_by_path.return_value.__iter__.return_value = [
'two']
commit_datetime = datetime.utcnow()
project = mock.Mock(
trove_topic=[],
trove_language=[],
)
stats.addCommit(newcommit, commit_datetime, project)
self.assertEqual(stats.general[0].commits[0],
{'lines': 3, 'number': 1, 'language': None})
unified_diff.reset_mock()
with h.push_config(config, **{'userstats.count_lines_of_code': 'false'}):
stats.addCommit(newcommit, commit_datetime, project)
self.assertEqual(stats.general[0].commits[0],
{'lines': 3, 'number': 2, 'language': None})
unified_diff.assert_not_called()<|fim▁end|> |
p = Project.query.get(shortname='u/test-user-2')
topic = TroveCategory.query.get(shortname='scientific')
create_time = datetime.utcnow() + timedelta(-5) |
<|file_name|>ServerHealthService.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2021 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.serverhealth;
import com.thoughtworks.go.config.CruiseConfig;
import com.thoughtworks.go.config.CruiseConfigProvider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.stereotype.Service;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
@Service
public class ServerHealthService implements ApplicationContextAware {
private static final Logger LOG = LoggerFactory.getLogger(ServerHealthService.class);
private HashMap<ServerHealthState, Set<String>> pipelinesWithErrors;
private Map<HealthStateType, ServerHealthState> serverHealth;
private ApplicationContext applicationContext;
public ServerHealthService() {
this.serverHealth = new ConcurrentHashMap<>();
this.pipelinesWithErrors = new HashMap<>();
}
public void removeByScope(HealthStateScope scope) {
for (HealthStateType healthStateType : entryKeys()) {
if (healthStateType.isSameScope(scope)) {
serverHealth.remove(healthStateType);
}
}
}
private Set<HealthStateType> entryKeys() {<|fim▁hole|> }
public List<ServerHealthState> filterByScope(HealthStateScope scope) {
List<ServerHealthState> filtered = new ArrayList<>();
for (Map.Entry<HealthStateType, ServerHealthState> entry : sortedEntries()) {
HealthStateType type = entry.getKey();
if (type.isSameScope(scope)) {
filtered.add(entry.getValue());
}
}
return filtered;
}
public HealthStateType update(ServerHealthState serverHealthState) {
HealthStateType type = serverHealthState.getType();
if (serverHealthState.getLogLevel() == HealthStateLevel.OK) {
if (serverHealth.containsKey(type)) {
serverHealth.remove(type);
}
return null;
} else {
serverHealth.put(type, serverHealthState);
return type;
}
}
// called from spring timer
public synchronized void onTimer() {
CruiseConfig currentConfig = applicationContext.getBean(CruiseConfigProvider.class).getCurrentConfig();
purgeStaleHealthMessages(currentConfig);
LOG.debug("Recomputing material to pipeline mappings.");
HashMap<ServerHealthState, Set<String>> erroredPipelines = new HashMap<>();
for (Map.Entry<HealthStateType, ServerHealthState> entry : serverHealth.entrySet()) {
erroredPipelines.put(entry.getValue(), entry.getValue().getPipelineNames(currentConfig));
}
pipelinesWithErrors = erroredPipelines;
LOG.debug("Done recomputing material to pipeline mappings.");
}
public Set<String> getPipelinesWithErrors(ServerHealthState serverHealthState) {
return pipelinesWithErrors.get(serverHealthState);
}
void purgeStaleHealthMessages(CruiseConfig cruiseConfig) {
removeMessagesForElementsNoLongerInConfig(cruiseConfig);
removeExpiredMessages();
}
@Deprecated(forRemoval = true) // Remove once we get rid of SpringJUnitTestRunner
public void removeAllLogs() {
serverHealth.clear();
}
private void removeMessagesForElementsNoLongerInConfig(CruiseConfig cruiseConfig) {
for (HealthStateType type : entryKeys()) {
if (type.isRemovedFromConfig(cruiseConfig)) {
this.removeByScope(type);
}
}
}
private void removeExpiredMessages() {
for (Map.Entry<HealthStateType, ServerHealthState> entry : new HashSet<>(serverHealth.entrySet())) {
ServerHealthState value = entry.getValue();
if (value.hasExpired()) {
serverHealth.remove(entry.getKey());
}
}
}
private void removeByScope(HealthStateType type) {
removeByScope(type.getScope());
}
public ServerHealthStates logs() {
ArrayList<ServerHealthState> logs = new ArrayList<>();
for (Map.Entry<HealthStateType, ServerHealthState> entry : sortedEntries()) {
logs.add(entry.getValue());
}
return new ServerHealthStates(logs);
}
private List<Map.Entry<HealthStateType, ServerHealthState>> sortedEntries() {
List<Map.Entry<HealthStateType, ServerHealthState>> entries = new ArrayList<>(serverHealth.entrySet());
entries.sort(Comparator.comparing(Map.Entry::getKey));
return entries;
}
public String getLogsAsText() {
StringBuilder text = new StringBuilder();
for (ServerHealthState state : logs()) {
text.append(state.getDescription());
text.append("\n\t");
text.append(state.getMessage());
text.append("\n");
}
return text.toString();
}
public boolean containsError(HealthStateType type, HealthStateLevel level) {
ServerHealthStates allLogs = logs();
for (ServerHealthState log : allLogs) {
if (log.getType().equals(type) && log.getLogLevel() == level) {
return true;
}
}
return false;
}
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.applicationContext = applicationContext;
}
}<|fim▁end|> | return new HashSet<>(serverHealth.keySet()); |
<|file_name|>Comments.java<|end_file_name|><|fim▁begin|>package com.tale.model;
import com.blade.jdbc.annotation.Table;
import java.io.Serializable;
//
@Table(name = "t_comments", pk = "coid")
public class Comments implements Serializable {
private static final long serialVersionUID = 1L;
// comment表主键
private Integer coid;
// post表主键,关联字段
private Integer cid;
// 评论生成时的GMT unix时间戳
private Integer created;
// 评论作者
private String author;
// 评论所属用户id
private Integer author_id;
// 评论所属内容作者id
private Integer owner_id;
// 评论者邮件
private String mail;
// 评论者网址
private String url;
// 评论者ip地址
private String ip;
// 评论者客户端
private String agent;
// 评论内容
private String content;
// 评论类型
private String type;
// 评论状态
private String status;
// 父级评论
private Integer parent;
public Comments() {
}
public Integer getCoid() {
return coid;
}
public void setCoid(Integer coid) {
this.coid = coid;
}
public Integer getCid() {
return cid;
}
public void setCid(Integer cid) {
this.cid = cid;
}
public Integer getCreated() {
return created;
}
public void setCreated(Integer created) {
this.created = created;
}
public String getAuthor() {
return author;
}
public void setAuthor(String author) {
this.author = author;
}
public String getMail() {
return mail;
}
public void setMail(String mail) {
this.mail = mail;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getIp() {
return ip;
}
public void setIp(String ip) {
this.ip = ip;
}
<|fim▁hole|> }
public void setAgent(String agent) {
this.agent = agent;
}
public String getContent() {
return content;
}
public void setContent(String content) {
this.content = content;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public Integer getParent() {
return parent;
}
public void setParent(Integer parent) {
this.parent = parent;
}
public Integer getAuthor_id() {
return author_id;
}
public void setAuthor_id(Integer author_id) {
this.author_id = author_id;
}
public Integer getOwner_id() {
return owner_id;
}
public void setOwner_id(Integer owner_id) {
this.owner_id = owner_id;
}
}<|fim▁end|> | public String getAgent() {
return agent; |
<|file_name|>colorPkg.go<|end_file_name|><|fim▁begin|>package processing
import "github.com/gopherjs/gopherjs/js"
////////////////////////////////////////////////////////////
// COLOR
//Creating and reading:
func Color(params ...interface{}) *js.Object {
switch len(params) {
case 1:
return pG.Call("color", params[0])
case 2:
return pG.Call("color", params[0], params[1])
case 3:
return pG.Call("color", params[0], params[1], params[2])
case 4:
return pG.Call("color", params[0], params[1], params[2], params[3])
default:
println("Error in Color function (1)")
return nil
}
}
func Alpha(color *js.Object) int {
return pG.Call("alpha", color).Int()
}
func Blue(color *js.Object) int {
return pG.Call("blue", color).Int()
}
func Brightness(color *js.Object) int {
return pG.Call("brightness", color).Int()
}
func Green(color *js.Object) int {
return pG.Call("green", color).Int()
}
func Hue(color *js.Object) int {
return pG.Call("hue", color).Int()
}
func LerpColor(from interface{}, to interface{}, amt float64) *js.Object {
return pG.Call("lerpColor", from, to, amt)
}
func Lightness(color *js.Object) int {
return pG.Call("lightness", color).Int()
}
func Red(color *js.Object) int {
return pG.Call("red", color).Int()
}
func Saturation(color *js.Object) int {
return pG.Call("saturation", color).Int()
}
//Setting:
func Background(values ...interface{}) {
switch len(values) {
case 1:
pG.Call("background", values[0])
break
case 2:
pG.Call("background", values[0].(int), values[1].(int))
break
case 3:
pG.Call("background", values[0].(int), values[1].(int), values[2].(int))<|fim▁hole|>
default:
println("Error in background function (2)...")
return
}
}
func Clear() {
pG.Call("clear")
}
func ColorMode(mode string, maxValues ...int) {
switch len(maxValues) {
case 0:
pG.Call("colorMode", mode)
break
case 1:
pG.Call("colorMode", mode, maxValues[0])
break
case 2:
pG.Call("colorMode", mode, maxValues[0], maxValues[1])
break
case 3:
pG.Call("colorMode", mode, maxValues[0], maxValues[1], maxValues[2])
break
case 4:
pG.Call("colorMode", mode, maxValues[0], maxValues[1], maxValues[2], maxValues[3])
break
default:
println("Error in colorMode (1)")
return
}
}
func Fill(firstValue interface{}, extraValues ...float64) {
switch len(extraValues) {
case 0:
/* Darwin!
typeOfFirstValue := reflect.TypeOf(firstValue).Name()
if typeOfFirstValue == reflect.Int.String() || typeOfFirstValue == reflect.Float64.String(){
pG.Call("fill", firstValue.(float64))
}
*/
pG.Call("fill", firstValue)
break
case 1:
pG.Call("fill", firstValue, extraValues[0])
break
case 2:
pG.Call("fill", firstValue, extraValues[0], extraValues[1])
break
case 3:
pG.Call("fill", firstValue, extraValues[0], extraValues[1], extraValues[2])
break
}
}
func NoFill() {
pG.Call("noFill")
}
func NoStroke() {
pG.Call("noStroke")
}
func Stroke(firstValue interface{}, extraValues ...int) {
switch len(extraValues) {
case 0:
pG.Call("stroke", firstValue)
break
case 1:
pG.Call("stroke", firstValue, extraValues[0])
break
case 2:
pG.Call("stroke", firstValue, extraValues[0], extraValues[1])
break
case 3:
pG.Call("stroke", firstValue, extraValues[0], extraValues[1], extraValues[2])
break
}
}
func StrokeWeight(weight int) {
pG.Call("strokeWeight", weight)
}<|fim▁end|> | break |
<|file_name|>test_tag_show.py<|end_file_name|><|fim▁begin|>import pytest
from cli_config.tag import tag
from utility.nix_error import NixError
def test_tag_show_no_tag(capsys):
with pytest.raises(SystemExit) as _excinfo:
tag.tag("nixconfig", ["show"])
_out, _err = capsys.readouterr()
assert "2" in str(_excinfo.value), "Exception doesn't contain expected string"
assert len(_out) == 0, "StdOut should be empty, contains: {}".format(_out)
assert "the following arguments are required: tag" in _err, "StdErr doesn't contain expected string"
def test_tag_show_invalid_tag(capsys):
with pytest.raises(NixError) as _excinfo:
tag.tag("nixconfig", ["show", "badtag"])<|fim▁hole|>
assert "Unknown tag: badtag" in str(_excinfo.value)
assert len(_out) == 0, "StdOut should be empty, contains: {}".format(_out)
assert len(_err) is 0, "StdErr should be empty, contains: {}".format(_err)
def test_tag_show_good_tag(capsys):
tag.tag("nixconfig", ["show", "tag1"])
_out, _err = capsys.readouterr()
assert "script1" in _out, "'script1' should be in output"
assert "script2" in _out, "'script2' should be in output"
assert "script3" not in _out, "'script2' should be in output"
assert len(_err) is 0, "StdErr should be empty, contains: {}".format(_err)<|fim▁end|> |
_out, _err = capsys.readouterr() |
<|file_name|>compose.d.ts<|end_file_name|><|fim▁begin|>import * as React from 'react';
import { IconBaseProps } from 'react-icon-base';<|fim▁hole|><|fim▁end|> | declare class IoCompose extends React.Component<IconBaseProps> { }
export = IoCompose; |
<|file_name|>bgjob.py<|end_file_name|><|fim▁begin|># This script is an example of how you can run blender from the command line (in background mode with no interface)
# to automate tasks, in this example it creates a text object, camera and light, then renders and/or saves it.
# This example also shows how you can parse command line options to python scripts.
#
# Example usage for this test.
# blender -b -P $HOME/background_job.py -- --text="Hello World" --render="/tmp/hello" --save="/tmp/hello.blend"
# [Ivana:] note that /tmp can be replace by the full path to PWD - ./ does not work
#
# Notice all python args are after the '--' argument.
import Blender
import bpy
def example_function(body_text, save_path, render_path):
sce= bpy.data.scenes.active
txt_data= bpy.data.curves.new('MyText', 'Text3d')
# Text Object
txt_ob = sce.objects.new(txt_data) # add the data to the scene as an object
txt_data.setText(body_text) # set the body text to the command line arg given
txt_data.setAlignment(Blender.Text3d.MIDDLE)# center text
# Camera
cam_data= bpy.data.cameras.new('MyCam') # create new camera data
cam_ob= sce.objects.new(cam_data) # add the camera data to the scene (creating a new object)
sce.objects.camera= cam_ob # set the active camera
cam_ob.loc= 0,0,10
# Lamp
lamp_data= bpy.data.lamps.new('MyLamp')
lamp_ob= sce.objects.new(lamp_data)
lamp_ob.loc= 2,2,5
if save_path:
try:
f= open(save_path, 'w')
f.close()
ok= True
except:
print 'Cannot save to path "%s"' % save_path
ok= False
if ok:
Blender.Save(save_path, 1)
if render_path:
render= sce.render
render.extensions= True
render.renderPath = render_path
#[Ivana:] don't know how to change the format
#render.setImageType(PNG)
render.sFrame= 1
render.eFrame= 1
render.renderAnim()
import sys # to get command line args
import optparse # to parse options for us and print a nice help message
script_name= 'background_job.py'
def main():
# get the args passed to blender after "--", all of which are ignored by blender specifically
# so python may receive its own arguments
argv= sys.argv
if '--' not in argv:
argv = [] # as if no args are passed
else:
argv = argv[argv.index('--')+1: ] # get all args after "--"
# When --help or no args are given, print this help
usage_text = 'Run blender in background mode with this script:'
usage_text += ' blender -b -P ' + script_name + ' -- [options]'
parser = optparse.OptionParser(usage = usage_text)
# Example background utility, add some text and renders or saves it (with options)
# Possible types are: string, int, long, choice, float and complex.
parser.add_option('-t', '--text', dest='body_text', help='This text will be used to render an image', type='string')<|fim▁hole|> options, args = parser.parse_args(argv) # In this example we wont use the args
if not argv:
parser.print_help()
return
if not options.body_text:
print 'Error: --text="some string" argument not given, aborting.'
parser.print_help()
return
# Run the example function
example_function(options.body_text, options.save_path, options.render_path)
print 'batch job finished, exiting'
if __name__ == '__main__':
main()<|fim▁end|> |
parser.add_option('-s', '--save', dest='save_path', help='Save the generated file to the specified path', metavar='FILE')
parser.add_option('-r', '--render', dest='render_path', help='Render an image to the specified path', metavar='FILE')
|
<|file_name|>CCPointExtension.js<|end_file_name|><|fim▁begin|>/****************************************************************************
Copyright (c) 2008-2010 Ricardo Quesada
Copyright (c) 2011-2012 cocos2d-x.org
Copyright (c) 2013-2014 Chukong Technologies Inc.
http://www.cocos2d-x.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
****************************************************************************/
/**
* <p>cc.Point extensions based on Chipmunk's cpVect file.<br />
* These extensions work both with cc.Point</p>
*
* <p>The "ccp" prefix means: "CoCos2d Point"</p>
*
* <p> //Examples:<br />
* - cc.pAdd( cc.p(1,1), cc.p(2,2) ); // preferred cocos2d way<br />
* - cc.pAdd( cc.p(1,1), cc.p(2,2) ); // also ok but more verbose<br />
* - cc.pAdd( cc.cpv(1,1), cc.cpv(2,2) ); // mixing chipmunk and cocos2d (avoid)</p>
*/
/**
* smallest such that 1.0+FLT_EPSILON != 1.0
* @constant
* @type Number
*/
cc.POINT_EPSILON = parseFloat('1.192092896e-07F');
/**
* Returns opposite of point.
* @param {cc.Point} point
* @return {cc.Point}
*/
cc.pNeg = function (point) {
return cc.p(-point.x, -point.y);
};
/**
* Calculates sum of two points.
* @param {cc.Point} v1
* @param {cc.Point} v2
* @return {cc.Point}
*/
cc.pAdd = function (v1, v2) {
return cc.p(v1.x + v2.x, v1.y + v2.y);
};
/**
* Calculates difference of two points.
* @param {cc.Point} v1
* @param {cc.Point} v2
* @return {cc.Point}
*/
cc.pSub = function (v1, v2) {
return cc.p(v1.x - v2.x, v1.y - v2.y);
};
/**
* Returns point multiplied by given factor.
* @param {cc.Point} point
* @param {Number} floatVar
* @return {cc.Point}
*/
cc.pMult = function (point, floatVar) {
return cc.p(point.x * floatVar, point.y * floatVar);
};
/**
* Calculates midpoint between two points.
* @param {cc.Point} v1
* @param {cc.Point} v2
* @return {cc.pMult}
*/
cc.pMidpoint = function (v1, v2) {
return cc.pMult(cc.pAdd(v1, v2), 0.5);
};
/**
* Calculates dot product of two points.
* @param {cc.Point} v1
* @param {cc.Point} v2
* @return {Number}
*/
cc.pDot = function (v1, v2) {
return v1.x * v2.x + v1.y * v2.y;
};
/**
* Calculates cross product of two points.
* @param {cc.Point} v1
* @param {cc.Point} v2
* @return {Number}
*/
cc.pCross = function (v1, v2) {
return v1.x * v2.y - v1.y * v2.x;
};
/**
* Calculates perpendicular of v, rotated 90 degrees counter-clockwise -- cross(v, perp(v)) >= 0
* @param {cc.Point} point
* @return {cc.Point}
*/
cc.pPerp = function (point) {
return cc.p(-point.y, point.x);
};
/**
* Calculates perpendicular of v, rotated 90 degrees clockwise -- cross(v, rperp(v)) <= 0
* @param {cc.Point} point
* @return {cc.Point}
*/
cc.pRPerp = function (point) {
return cc.p(point.y, -point.x);
};
/**
* Calculates the projection of v1 over v2.
* @param {cc.Point} v1
* @param {cc.Point} v2
* @return {cc.pMult}
*/
cc.pProject = function (v1, v2) {
return cc.pMult(v2, cc.pDot(v1, v2) / cc.pDot(v2, v2));
};
/**
* Rotates two points.
* @param {cc.Point} v1
* @param {cc.Point} v2
* @return {cc.Point}
*/
cc.pRotate = function (v1, v2) {
return cc.p(v1.x * v2.x - v1.y * v2.y, v1.x * v2.y + v1.y * v2.x);
};
/**
* Unrotates two points.
* @param {cc.Point} v1
* @param {cc.Point} v2
* @return {cc.Point}
*/
cc.pUnrotate = function (v1, v2) {
return cc.p(v1.x * v2.x + v1.y * v2.y, v1.y * v2.x - v1.x * v2.y);
};
/**
* Calculates the square length of a cc.Point (not calling sqrt() )
* @param {cc.Point} v
*@return {Number}
*/
cc.pLengthSQ = function (v) {
return cc.pDot(v, v);
};
/**
* Calculates the square distance between two points (not calling sqrt() )
* @param {cc.Point} point1
* @param {cc.Point} point2
* @return {Number}
*/
cc.pDistanceSQ = function(point1, point2){
return cc.pLengthSQ(cc.pSub(point1,point2));
};
/**
* Calculates distance between point an origin
* @param {cc.Point} v
* @return {Number}
*/
cc.pLength = function (v) {
return Math.sqrt(cc.pLengthSQ(v));
};
/**
* Calculates the distance between two points
* @param {cc.Point} v1
* @param {cc.Point} v2
* @return {Number}
*/
cc.pDistance = function (v1, v2) {
return cc.pLength(cc.pSub(v1, v2));
};
/**
* Returns point multiplied to a length of 1.
* @param {cc.Point} v
* @return {cc.Point}
*/
cc.pNormalize = function (v) {
var n = cc.pLength(v);
return n === 0 ? cc.p(v) : cc.pMult(v, 1.0 / n);
};
/**
* Converts radians to a normalized vector.
* @param {Number} a
* @return {cc.Point}
*/
cc.pForAngle = function (a) {
return cc.p(Math.cos(a), Math.sin(a));
};
/**
* Converts a vector to radians.
* @param {cc.Point} v
* @return {Number}
*/
cc.pToAngle = function (v) {
return Math.atan2(v.y, v.x);
};
/**
* Clamp a value between from and to.
* @param {Number} value
* @param {Number} min_inclusive
* @param {Number} max_inclusive
* @return {Number}
*/
cc.clampf = function (value, min_inclusive, max_inclusive) {
if (min_inclusive > max_inclusive) {
var temp = min_inclusive;
min_inclusive = max_inclusive;
max_inclusive = temp;
}
return value < min_inclusive ? min_inclusive : value < max_inclusive ? value : max_inclusive;
};
/**
* Clamp a point between from and to.
* @param {Point} p
* @param {Number} min_inclusive
* @param {Number} max_inclusive
* @return {cc.Point}
*/
cc.pClamp = function (p, min_inclusive, max_inclusive) {
return cc.p(cc.clampf(p.x, min_inclusive.x, max_inclusive.x), cc.clampf(p.y, min_inclusive.y, max_inclusive.y));
};
/**
* Quickly convert cc.Size to a cc.Point<|fim▁hole|> */
cc.pFromSize = function (s) {
return cc.p(s.width, s.height);
};
/**
* Run a math operation function on each point component <br />
* Math.abs, Math.fllor, Math.ceil, Math.round.
* @param {cc.Point} p
* @param {Function} opFunc
* @return {cc.Point}
* @example
* //For example: let's try to take the floor of x,y
* var p = cc.pCompOp(cc.p(10,10),Math.abs);
*/
cc.pCompOp = function (p, opFunc) {
return cc.p(opFunc(p.x), opFunc(p.y));
};
/**
* Linear Interpolation between two points a and b
* alpha == 0 ? a
* alpha == 1 ? b
* otherwise a value between a..b
* @param {cc.Point} a
* @param {cc.Point} b
* @param {Number} alpha
* @return {cc.pAdd}
*/
cc.pLerp = function (a, b, alpha) {
return cc.pAdd(cc.pMult(a, 1 - alpha), cc.pMult(b, alpha));
};
/**
* @param {cc.Point} a
* @param {cc.Point} b
* @param {Number} variance
* @return {Boolean} if points have fuzzy equality which means equal with some degree of variance.
*/
cc.pFuzzyEqual = function (a, b, variance) {
if (a.x - variance <= b.x && b.x <= a.x + variance) {
if (a.y - variance <= b.y && b.y <= a.y + variance)
return true;
}
return false;
};
/**
* Multiplies a nd b components, a.x*b.x, a.y*b.y
* @param {cc.Point} a
* @param {cc.Point} b
* @return {cc.Point}
*/
cc.pCompMult = function (a, b) {
return cc.p(a.x * b.x, a.y * b.y);
};
/**
* @param {cc.Point} a
* @param {cc.Point} b
* @return {Number} the signed angle in radians between two vector directions
*/
cc.pAngleSigned = function (a, b) {
var a2 = cc.pNormalize(a);
var b2 = cc.pNormalize(b);
var angle = Math.atan2(a2.x * b2.y - a2.y * b2.x, cc.pDot(a2, b2));
if (Math.abs(angle) < cc.POINT_EPSILON)
return 0.0;
return angle;
};
/**
* @param {cc.Point} a
* @param {cc.Point} b
* @return {Number} the angle in radians between two vector directions
*/
cc.pAngle = function (a, b) {
var angle = Math.acos(cc.pDot(cc.pNormalize(a), cc.pNormalize(b)));
if (Math.abs(angle) < cc.POINT_EPSILON) return 0.0;
return angle;
};
/**
* Rotates a point counter clockwise by the angle around a pivot
* @param {cc.Point} v v is the point to rotate
* @param {cc.Point} pivot pivot is the pivot, naturally
* @param {Number} angle angle is the angle of rotation cw in radians
* @return {cc.Point} the rotated point
*/
cc.pRotateByAngle = function (v, pivot, angle) {
var r = cc.pSub(v, pivot);
var cosa = Math.cos(angle), sina = Math.sin(angle);
var t = r.x;
r.x = t * cosa - r.y * sina + pivot.x;
r.y = t * sina + r.y * cosa + pivot.y;
return r;
};
/**
* A general line-line intersection test
* indicating successful intersection of a line<br />
* note that to truly test intersection for segments we have to make<br />
* sure that s & t lie within [0..1] and for rays, make sure s & t > 0<br />
* the hit point is p3 + t * (p4 - p3);<br />
* the hit point also is p1 + s * (p2 - p1);
* @param {cc.Point} A A is the startpoint for the first line P1 = (p1 - p2).
* @param {cc.Point} B B is the endpoint for the first line P1 = (p1 - p2).
* @param {cc.Point} C C is the startpoint for the second line P2 = (p3 - p4).
* @param {cc.Point} D D is the endpoint for the second line P2 = (p3 - p4).
* @param {cc.Point} retP retP.x is the range for a hitpoint in P1 (pa = p1 + s*(p2 - p1)), <br />
* retP.y is the range for a hitpoint in P3 (pa = p2 + t*(p4 - p3)).
* @return {Boolean}
*/
cc.pLineIntersect = function (A, B, C, D, retP) {
if ((A.x === B.x && A.y === B.y) || (C.x === D.x && C.y === D.y)) {
return false;
}
var BAx = B.x - A.x;
var BAy = B.y - A.y;
var DCx = D.x - C.x;
var DCy = D.y - C.y;
var ACx = A.x - C.x;
var ACy = A.y - C.y;
var denom = DCy * BAx - DCx * BAy;
retP.x = DCx * ACy - DCy * ACx;
retP.y = BAx * ACy - BAy * ACx;
if (denom === 0) {
if (retP.x === 0 || retP.y === 0) {
// Lines incident
return true;
}
// Lines parallel and not incident
return false;
}
retP.x = retP.x / denom;
retP.y = retP.y / denom;
return true;
};
/**
* ccpSegmentIntersect return YES if Segment A-B intersects with segment C-D.
* @param {cc.Point} A
* @param {cc.Point} B
* @param {cc.Point} C
* @param {cc.Point} D
* @return {Boolean}
*/
cc.pSegmentIntersect = function (A, B, C, D) {
var retP = cc.p(0, 0);
if (cc.pLineIntersect(A, B, C, D, retP))
if (retP.x >= 0.0 && retP.x <= 1.0 && retP.y >= 0.0 && retP.y <= 1.0)
return true;
return false;
};
/**
* ccpIntersectPoint return the intersection point of line A-B, C-D
* @param {cc.Point} A
* @param {cc.Point} B
* @param {cc.Point} C
* @param {cc.Point} D
* @return {cc.Point}
*/
cc.pIntersectPoint = function (A, B, C, D) {
var retP = cc.p(0, 0);
if (cc.pLineIntersect(A, B, C, D, retP)) {
// Point of intersection
var P = cc.p(0, 0);
P.x = A.x + retP.x * (B.x - A.x);
P.y = A.y + retP.x * (B.y - A.y);
return P;
}
return cc.p(0,0);
};
/**
* check to see if both points are equal
* @param {cc.Point} A A ccp a
* @param {cc.Point} B B ccp b to be compared
* @return {Boolean} the true if both ccp are same
*/
cc.pSameAs = function (A, B) {
if ((A != null) && (B != null)) {
return (A.x === B.x && A.y === B.y);
}
return false;
};
// High Perfomance In Place Operationrs ---------------------------------------
/**
* sets the position of the point to 0
* @param {cc.Point} v
*/
cc.pZeroIn = function(v) {
v.x = 0;
v.y = 0;
};
/**
* copies the position of one point to another
* @param {cc.Point} v1
* @param {cc.Point} v2
*/
cc.pIn = function(v1, v2) {
v1.x = v2.x;
v1.y = v2.y;
};
/**
* multiplies the point with the given factor (inplace)
* @param {cc.Point} point
* @param {Number} floatVar
*/
cc.pMultIn = function(point, floatVar) {
point.x *= floatVar;
point.y *= floatVar;
};
/**
* subtracts one point from another (inplace)
* @param {cc.Point} v1
* @param {cc.Point} v2
*/
cc.pSubIn = function(v1, v2) {
v1.x -= v2.x;
v1.y -= v2.y;
};
/**
* adds one point to another (inplace)
* @param {cc.Point} v1
* @param {cc.point} v2
*/
cc.pAddIn = function(v1, v2) {
v1.x += v2.x;
v1.y += v2.y;
};
/**
* normalizes the point (inplace)
* @param {cc.Point} v
*/
cc.pNormalizeIn = function(v) {
cc.pMultIn(v, 1.0 / Math.sqrt(v.x * v.x + v.y * v.y));
};<|fim▁end|> | * @param {cc.Size} s
* @return {cc.Point} |
<|file_name|>RecognitionRelayServlet.java<|end_file_name|><|fim▁begin|>/**
*
*/
package org.sylvani.io.voice.http;
import java.io.IOException;
import java.util.List;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.eclipse.emf.common.util.EList;
import org.eclipse.smarthome.model.sitemap.Sitemap;
import org.eclipse.smarthome.model.sitemap.SitemapProvider;
import org.eclipse.smarthome.model.sitemap.Widget;
import org.eclipse.smarthome.ui.items.ItemUIRegistry;
/**
* Varianten
* - Simple and Stupid HAL9000
* - Volltext
* - Tagged Analyse
*
* @author hkuhn
*
*/
public class RecognitionRelayServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
private ItemUIRegistry itemUIRegistry;
private List<SitemapProvider> sitemaps;
public RecognitionRelayServlet(ItemUIRegistry itemUIRegistry, List<SitemapProvider> sitemaps) {
this.itemUIRegistry = itemUIRegistry;
this.sitemaps = sitemaps;
}
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
super.doPost(req, resp);
}
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
for (SitemapProvider sitemapProvider : sitemaps) {
for (String sitemapName : sitemapProvider.getSitemapNames()) {
System.out.println("sitemap " + sitemapName);
<|fim▁hole|> System.out.println("#Widget " + widget.getLabel() + " " + widget.getItem());
}
}
}
}
}<|fim▁end|> | Sitemap sitemap = sitemapProvider.getSitemap(sitemapName);
EList<Widget> list = sitemap.getChildren();
for (Widget widget : list) { |
<|file_name|>stereobm.cpp<|end_file_name|><|fim▁begin|>//M*//////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2000, Intel Corporation, all rights reserved.
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
/****************************************************************************************\
* Very fast SAD-based (Sum-of-Absolute-Diffrences) stereo correspondence algorithm. *
* Contributed by Kurt Konolige *
\****************************************************************************************/
#include "precomp.hpp"
#include <stdio.h>
#include <limits>
#include "opencl_kernels_calib3d.hpp"
namespace cv
{
struct StereoBMParams
{
StereoBMParams(int _numDisparities=64, int _SADWindowSize=21)
{
preFilterType = StereoBM::PREFILTER_XSOBEL;
preFilterSize = 9;
preFilterCap = 31;
SADWindowSize = _SADWindowSize;
minDisparity = 0;
numDisparities = _numDisparities > 0 ? _numDisparities : 64;
textureThreshold = 10;
uniquenessRatio = 15;
speckleRange = speckleWindowSize = 0;
roi1 = roi2 = Rect(0,0,0,0);
disp12MaxDiff = -1;
dispType = CV_16S;
}
int preFilterType;
int preFilterSize;
int preFilterCap;
int SADWindowSize;
int minDisparity;
int numDisparities;
int textureThreshold;
int uniquenessRatio;
int speckleRange;
int speckleWindowSize;
Rect roi1, roi2;
int disp12MaxDiff;
int dispType;
};
static bool ocl_prefilter_norm(InputArray _input, OutputArray _output, int winsize, int prefilterCap)
{
ocl::Kernel k("prefilter_norm", ocl::calib3d::stereobm_oclsrc, cv::format("-D WSZ=%d", winsize));
if(k.empty())
return false;
int scale_g = winsize*winsize/8, scale_s = (1024 + scale_g)/(scale_g*2);
scale_g *= scale_s;
UMat input = _input.getUMat(), output;
_output.create(input.size(), input.type());
output = _output.getUMat();
size_t globalThreads[3] = { input.cols, input.rows, 1 };
k.args(ocl::KernelArg::PtrReadOnly(input), ocl::KernelArg::PtrWriteOnly(output), input.rows, input.cols,
prefilterCap, scale_g, scale_s);
return k.run(2, globalThreads, NULL, false);
}
static void prefilterNorm( const Mat& src, Mat& dst, int winsize, int ftzero, uchar* buf )
{
int x, y, wsz2 = winsize/2;
int* vsum = (int*)alignPtr(buf + (wsz2 + 1)*sizeof(vsum[0]), 32);
int scale_g = winsize*winsize/8, scale_s = (1024 + scale_g)/(scale_g*2);
const int OFS = 256*5, TABSZ = OFS*2 + 256;
uchar tab[TABSZ];
const uchar* sptr = src.ptr();
int srcstep = (int)src.step;
Size size = src.size();
scale_g *= scale_s;
for( x = 0; x < TABSZ; x++ )
tab[x] = (uchar)(x - OFS < -ftzero ? 0 : x - OFS > ftzero ? ftzero*2 : x - OFS + ftzero);
for( x = 0; x < size.width; x++ )
vsum[x] = (ushort)(sptr[x]*(wsz2 + 2));
for( y = 1; y < wsz2; y++ )
{
for( x = 0; x < size.width; x++ )
vsum[x] = (ushort)(vsum[x] + sptr[srcstep*y + x]);
}
for( y = 0; y < size.height; y++ )
{
const uchar* top = sptr + srcstep*MAX(y-wsz2-1,0);
const uchar* bottom = sptr + srcstep*MIN(y+wsz2,size.height-1);
const uchar* prev = sptr + srcstep*MAX(y-1,0);
const uchar* curr = sptr + srcstep*y;
const uchar* next = sptr + srcstep*MIN(y+1,size.height-1);
uchar* dptr = dst.ptr<uchar>(y);
for( x = 0; x < size.width; x++ )
vsum[x] = (ushort)(vsum[x] + bottom[x] - top[x]);
for( x = 0; x <= wsz2; x++ )
{
vsum[-x-1] = vsum[0];
vsum[size.width+x] = vsum[size.width-1];
}
int sum = vsum[0]*(wsz2 + 1);
for( x = 1; x <= wsz2; x++ )
sum += vsum[x];
int val = ((curr[0]*5 + curr[1] + prev[0] + next[0])*scale_g - sum*scale_s) >> 10;
dptr[0] = tab[val + OFS];
for( x = 1; x < size.width-1; x++ )
{
sum += vsum[x+wsz2] - vsum[x-wsz2-1];
val = ((curr[x]*4 + curr[x-1] + curr[x+1] + prev[x] + next[x])*scale_g - sum*scale_s) >> 10;
dptr[x] = tab[val + OFS];
}
sum += vsum[x+wsz2] - vsum[x-wsz2-1];
val = ((curr[x]*5 + curr[x-1] + prev[x] + next[x])*scale_g - sum*scale_s) >> 10;
dptr[x] = tab[val + OFS];
}
}
static bool ocl_prefilter_xsobel(InputArray _input, OutputArray _output, int prefilterCap)
{
ocl::Kernel k("prefilter_xsobel", ocl::calib3d::stereobm_oclsrc);
if(k.empty())
return false;
UMat input = _input.getUMat(), output;
_output.create(input.size(), input.type());
output = _output.getUMat();
size_t globalThreads[3] = { input.cols, input.rows, 1 };
k.args(ocl::KernelArg::PtrReadOnly(input), ocl::KernelArg::PtrWriteOnly(output), input.rows, input.cols, prefilterCap);
return k.run(2, globalThreads, NULL, false);
}
static void
prefilterXSobel( const Mat& src, Mat& dst, int ftzero )
{
int x, y;
const int OFS = 256*4, TABSZ = OFS*2 + 256;
uchar tab[TABSZ];
Size size = src.size();
for( x = 0; x < TABSZ; x++ )
tab[x] = (uchar)(x - OFS < -ftzero ? 0 : x - OFS > ftzero ? ftzero*2 : x - OFS + ftzero);
uchar val0 = tab[0 + OFS];
#if CV_SSE2
volatile bool useSIMD = checkHardwareSupport(CV_CPU_SSE2);
#endif
for( y = 0; y < size.height-1; y += 2 )
{
const uchar* srow1 = src.ptr<uchar>(y);
const uchar* srow0 = y > 0 ? srow1 - src.step : size.height > 1 ? srow1 + src.step : srow1;
const uchar* srow2 = y < size.height-1 ? srow1 + src.step : size.height > 1 ? srow1 - src.step : srow1;
const uchar* srow3 = y < size.height-2 ? srow1 + src.step*2 : srow1;
uchar* dptr0 = dst.ptr<uchar>(y);
uchar* dptr1 = dptr0 + dst.step;
dptr0[0] = dptr0[size.width-1] = dptr1[0] = dptr1[size.width-1] = val0;
x = 1;
#if CV_SSE2
if( useSIMD )
{
__m128i z = _mm_setzero_si128(), ftz = _mm_set1_epi16((short)ftzero),
ftz2 = _mm_set1_epi8(cv::saturate_cast<uchar>(ftzero*2));
for( ; x <= size.width-9; x += 8 )
{
__m128i c0 = _mm_unpacklo_epi8(_mm_loadl_epi64((__m128i*)(srow0 + x - 1)), z);
__m128i c1 = _mm_unpacklo_epi8(_mm_loadl_epi64((__m128i*)(srow1 + x - 1)), z);
__m128i d0 = _mm_unpacklo_epi8(_mm_loadl_epi64((__m128i*)(srow0 + x + 1)), z);
__m128i d1 = _mm_unpacklo_epi8(_mm_loadl_epi64((__m128i*)(srow1 + x + 1)), z);
d0 = _mm_sub_epi16(d0, c0);
d1 = _mm_sub_epi16(d1, c1);
__m128i c2 = _mm_unpacklo_epi8(_mm_loadl_epi64((__m128i*)(srow2 + x - 1)), z);
__m128i c3 = _mm_unpacklo_epi8(_mm_loadl_epi64((__m128i*)(srow3 + x - 1)), z);
__m128i d2 = _mm_unpacklo_epi8(_mm_loadl_epi64((__m128i*)(srow2 + x + 1)), z);
__m128i d3 = _mm_unpacklo_epi8(_mm_loadl_epi64((__m128i*)(srow3 + x + 1)), z);
d2 = _mm_sub_epi16(d2, c2);
d3 = _mm_sub_epi16(d3, c3);
__m128i v0 = _mm_add_epi16(d0, _mm_add_epi16(d2, _mm_add_epi16(d1, d1)));
__m128i v1 = _mm_add_epi16(d1, _mm_add_epi16(d3, _mm_add_epi16(d2, d2)));
v0 = _mm_packus_epi16(_mm_add_epi16(v0, ftz), _mm_add_epi16(v1, ftz));
v0 = _mm_min_epu8(v0, ftz2);
_mm_storel_epi64((__m128i*)(dptr0 + x), v0);
_mm_storel_epi64((__m128i*)(dptr1 + x), _mm_unpackhi_epi64(v0, v0));
}
}
#endif
for( ; x < size.width-1; x++ )
{
int d0 = srow0[x+1] - srow0[x-1], d1 = srow1[x+1] - srow1[x-1],
d2 = srow2[x+1] - srow2[x-1], d3 = srow3[x+1] - srow3[x-1];
int v0 = tab[d0 + d1*2 + d2 + OFS];
int v1 = tab[d1 + d2*2 + d3 + OFS];
dptr0[x] = (uchar)v0;
dptr1[x] = (uchar)v1;
}
}
for( ; y < size.height; y++ )
{
uchar* dptr = dst.ptr<uchar>(y);
for( x = 0; x < size.width; x++ )
dptr[x] = val0;
}
}
static const int DISPARITY_SHIFT = 4;
#if CV_SSE2
static void findStereoCorrespondenceBM_SSE2( const Mat& left, const Mat& right,
Mat& disp, Mat& cost, StereoBMParams& state,
uchar* buf, int _dy0, int _dy1 )
{
const int ALIGN = 16;
int x, y, d;
int wsz = state.SADWindowSize, wsz2 = wsz/2;
int dy0 = MIN(_dy0, wsz2+1), dy1 = MIN(_dy1, wsz2+1);
int ndisp = state.numDisparities;
int mindisp = state.minDisparity;
int lofs = MAX(ndisp - 1 + mindisp, 0);
int rofs = -MIN(ndisp - 1 + mindisp, 0);
int width = left.cols, height = left.rows;
int width1 = width - rofs - ndisp + 1;
int ftzero = state.preFilterCap;
int textureThreshold = state.textureThreshold;
int uniquenessRatio = state.uniquenessRatio;
short FILTERED = (short)((mindisp - 1) << DISPARITY_SHIFT);
ushort *sad, *hsad0, *hsad, *hsad_sub;
int *htext;
uchar *cbuf0, *cbuf;
const uchar* lptr0 = left.ptr() + lofs;
const uchar* rptr0 = right.ptr() + rofs;
const uchar *lptr, *lptr_sub, *rptr;
short* dptr = disp.ptr<short>();
int sstep = (int)left.step;
int dstep = (int)(disp.step/sizeof(dptr[0]));
int cstep = (height + dy0 + dy1)*ndisp;
short costbuf = 0;
int coststep = cost.data ? (int)(cost.step/sizeof(costbuf)) : 0;
const int TABSZ = 256;
uchar tab[TABSZ];
const __m128i d0_8 = _mm_setr_epi16(0,1,2,3,4,5,6,7), dd_8 = _mm_set1_epi16(8);
sad = (ushort*)alignPtr(buf + sizeof(sad[0]), ALIGN);
hsad0 = (ushort*)alignPtr(sad + ndisp + 1 + dy0*ndisp, ALIGN);
htext = (int*)alignPtr((int*)(hsad0 + (height+dy1)*ndisp) + wsz2 + 2, ALIGN);
cbuf0 = (uchar*)alignPtr((uchar*)(htext + height + wsz2 + 2) + dy0*ndisp, ALIGN);
for( x = 0; x < TABSZ; x++ )
tab[x] = (uchar)std::abs(x - ftzero);
// initialize buffers
memset( hsad0 - dy0*ndisp, 0, (height + dy0 + dy1)*ndisp*sizeof(hsad0[0]) );
memset( htext - wsz2 - 1, 0, (height + wsz + 1)*sizeof(htext[0]) );
for( x = -wsz2-1; x < wsz2; x++ )
{
hsad = hsad0 - dy0*ndisp; cbuf = cbuf0 + (x + wsz2 + 1)*cstep - dy0*ndisp;
lptr = lptr0 + MIN(MAX(x, -lofs), width-lofs-1) - dy0*sstep;
rptr = rptr0 + MIN(MAX(x, -rofs), width-rofs-1) - dy0*sstep;
for( y = -dy0; y < height + dy1; y++, hsad += ndisp, cbuf += ndisp, lptr += sstep, rptr += sstep )
{
int lval = lptr[0];
__m128i lv = _mm_set1_epi8((char)lval), z = _mm_setzero_si128();
for( d = 0; d < ndisp; d += 16 )
{
__m128i rv = _mm_loadu_si128((const __m128i*)(rptr + d));
__m128i hsad_l = _mm_load_si128((__m128i*)(hsad + d));
__m128i hsad_h = _mm_load_si128((__m128i*)(hsad + d + 8));
__m128i diff = _mm_adds_epu8(_mm_subs_epu8(lv, rv), _mm_subs_epu8(rv, lv));
_mm_store_si128((__m128i*)(cbuf + d), diff);
hsad_l = _mm_add_epi16(hsad_l, _mm_unpacklo_epi8(diff,z));
hsad_h = _mm_add_epi16(hsad_h, _mm_unpackhi_epi8(diff,z));
_mm_store_si128((__m128i*)(hsad + d), hsad_l);
_mm_store_si128((__m128i*)(hsad + d + 8), hsad_h);
}
htext[y] += tab[lval];
}
}
// initialize the left and right borders of the disparity map
for( y = 0; y < height; y++ )
{
for( x = 0; x < lofs; x++ )
dptr[y*dstep + x] = FILTERED;
for( x = lofs + width1; x < width; x++ )
dptr[y*dstep + x] = FILTERED;
}
dptr += lofs;
for( x = 0; x < width1; x++, dptr++ )
{
short* costptr = cost.data ? cost.ptr<short>() + lofs + x : &costbuf;
int x0 = x - wsz2 - 1, x1 = x + wsz2;
const uchar* cbuf_sub = cbuf0 + ((x0 + wsz2 + 1) % (wsz + 1))*cstep - dy0*ndisp;
cbuf = cbuf0 + ((x1 + wsz2 + 1) % (wsz + 1))*cstep - dy0*ndisp;
hsad = hsad0 - dy0*ndisp;
lptr_sub = lptr0 + MIN(MAX(x0, -lofs), width-1-lofs) - dy0*sstep;
lptr = lptr0 + MIN(MAX(x1, -lofs), width-1-lofs) - dy0*sstep;
rptr = rptr0 + MIN(MAX(x1, -rofs), width-1-rofs) - dy0*sstep;
for( y = -dy0; y < height + dy1; y++, cbuf += ndisp, cbuf_sub += ndisp,
hsad += ndisp, lptr += sstep, lptr_sub += sstep, rptr += sstep )
{
int lval = lptr[0];
__m128i lv = _mm_set1_epi8((char)lval), z = _mm_setzero_si128();
for( d = 0; d < ndisp; d += 16 )
{
__m128i rv = _mm_loadu_si128((const __m128i*)(rptr + d));
__m128i hsad_l = _mm_load_si128((__m128i*)(hsad + d));
__m128i hsad_h = _mm_load_si128((__m128i*)(hsad + d + 8));
__m128i cbs = _mm_load_si128((const __m128i*)(cbuf_sub + d));
__m128i diff = _mm_adds_epu8(_mm_subs_epu8(lv, rv), _mm_subs_epu8(rv, lv));
__m128i diff_h = _mm_sub_epi16(_mm_unpackhi_epi8(diff, z), _mm_unpackhi_epi8(cbs, z));
_mm_store_si128((__m128i*)(cbuf + d), diff);
diff = _mm_sub_epi16(_mm_unpacklo_epi8(diff, z), _mm_unpacklo_epi8(cbs, z));
hsad_h = _mm_add_epi16(hsad_h, diff_h);
hsad_l = _mm_add_epi16(hsad_l, diff);
_mm_store_si128((__m128i*)(hsad + d), hsad_l);
_mm_store_si128((__m128i*)(hsad + d + 8), hsad_h);
}
htext[y] += tab[lval] - tab[lptr_sub[0]];
}
// fill borders
for( y = dy1; y <= wsz2; y++ )
htext[height+y] = htext[height+dy1-1];
for( y = -wsz2-1; y < -dy0; y++ )
htext[y] = htext[-dy0];
// initialize sums
for( d = 0; d < ndisp; d++ )
sad[d] = (ushort)(hsad0[d-ndisp*dy0]*(wsz2 + 2 - dy0));
hsad = hsad0 + (1 - dy0)*ndisp;
for( y = 1 - dy0; y < wsz2; y++, hsad += ndisp )
for( d = 0; d < ndisp; d += 16 )
{
__m128i s0 = _mm_load_si128((__m128i*)(sad + d));
__m128i s1 = _mm_load_si128((__m128i*)(sad + d + 8));
__m128i t0 = _mm_load_si128((__m128i*)(hsad + d));
__m128i t1 = _mm_load_si128((__m128i*)(hsad + d + 8));
s0 = _mm_add_epi16(s0, t0);
s1 = _mm_add_epi16(s1, t1);
_mm_store_si128((__m128i*)(sad + d), s0);
_mm_store_si128((__m128i*)(sad + d + 8), s1);
}
int tsum = 0;
for( y = -wsz2-1; y < wsz2; y++ )
tsum += htext[y];
// finally, start the real processing
for( y = 0; y < height; y++ )
{
int minsad = INT_MAX, mind = -1;
hsad = hsad0 + MIN(y + wsz2, height+dy1-1)*ndisp;
hsad_sub = hsad0 + MAX(y - wsz2 - 1, -dy0)*ndisp;
__m128i minsad8 = _mm_set1_epi16(SHRT_MAX);
__m128i mind8 = _mm_set1_epi16(0), d8 = d0_8, mask;
for( d = 0; d < ndisp; d += 16 )
{
__m128i u0 = _mm_load_si128((__m128i*)(hsad_sub + d));
__m128i u1 = _mm_load_si128((__m128i*)(hsad + d));
__m128i v0 = _mm_load_si128((__m128i*)(hsad_sub + d + 8));
__m128i v1 = _mm_load_si128((__m128i*)(hsad + d + 8));
__m128i usad8 = _mm_load_si128((__m128i*)(sad + d));
__m128i vsad8 = _mm_load_si128((__m128i*)(sad + d + 8));
u1 = _mm_sub_epi16(u1, u0);
v1 = _mm_sub_epi16(v1, v0);
usad8 = _mm_add_epi16(usad8, u1);
vsad8 = _mm_add_epi16(vsad8, v1);
mask = _mm_cmpgt_epi16(minsad8, usad8);
minsad8 = _mm_min_epi16(minsad8, usad8);
mind8 = _mm_max_epi16(mind8, _mm_and_si128(mask, d8));
_mm_store_si128((__m128i*)(sad + d), usad8);
_mm_store_si128((__m128i*)(sad + d + 8), vsad8);
mask = _mm_cmpgt_epi16(minsad8, vsad8);
minsad8 = _mm_min_epi16(minsad8, vsad8);
d8 = _mm_add_epi16(d8, dd_8);
mind8 = _mm_max_epi16(mind8, _mm_and_si128(mask, d8));
d8 = _mm_add_epi16(d8, dd_8);
}
tsum += htext[y + wsz2] - htext[y - wsz2 - 1];
if( tsum < textureThreshold )
{
dptr[y*dstep] = FILTERED;
continue;
}
ushort CV_DECL_ALIGNED(16) minsad_buf[8], mind_buf[8];
_mm_store_si128((__m128i*)minsad_buf, minsad8);
_mm_store_si128((__m128i*)mind_buf, mind8);
for( d = 0; d < 8; d++ )
if(minsad > (int)minsad_buf[d] || (minsad == (int)minsad_buf[d] && mind > mind_buf[d]))
{
minsad = minsad_buf[d];
mind = mind_buf[d];
}
if( uniquenessRatio > 0 )
{
int thresh = minsad + (minsad * uniquenessRatio/100);<|fim▁hole|> d8 = _mm_sub_epi16(d0_8, dd_16);
for( d = 0; d < ndisp; d += 16 )
{
__m128i usad8 = _mm_load_si128((__m128i*)(sad + d));
__m128i vsad8 = _mm_load_si128((__m128i*)(sad + d + 8));
mask = _mm_cmpgt_epi16( thresh8, _mm_min_epi16(usad8,vsad8));
d8 = _mm_add_epi16(d8, dd_16);
if( !_mm_movemask_epi8(mask) )
continue;
mask = _mm_cmpgt_epi16( thresh8, usad8);
mask = _mm_and_si128(mask, _mm_or_si128(_mm_cmpgt_epi16(d1,d8), _mm_cmpgt_epi16(d8,d2)));
if( _mm_movemask_epi8(mask) )
break;
__m128i t8 = _mm_add_epi16(d8, dd_8);
mask = _mm_cmpgt_epi16( thresh8, vsad8);
mask = _mm_and_si128(mask, _mm_or_si128(_mm_cmpgt_epi16(d1,t8), _mm_cmpgt_epi16(t8,d2)));
if( _mm_movemask_epi8(mask) )
break;
}
if( d < ndisp )
{
dptr[y*dstep] = FILTERED;
continue;
}
}
if( 0 < mind && mind < ndisp - 1 )
{
int p = sad[mind+1], n = sad[mind-1];
d = p + n - 2*sad[mind] + std::abs(p - n);
dptr[y*dstep] = (short)(((ndisp - mind - 1 + mindisp)*256 + (d != 0 ? (p-n)*256/d : 0) + 15) >> 4);
}
else
dptr[y*dstep] = (short)((ndisp - mind - 1 + mindisp)*16);
costptr[y*coststep] = sad[mind];
}
}
}
#endif
static void
findStereoCorrespondenceBM( const Mat& left, const Mat& right,
Mat& disp, Mat& cost, const StereoBMParams& state,
uchar* buf, int _dy0, int _dy1 )
{
const int ALIGN = 16;
int x, y, d;
int wsz = state.SADWindowSize, wsz2 = wsz/2;
int dy0 = MIN(_dy0, wsz2+1), dy1 = MIN(_dy1, wsz2+1);
int ndisp = state.numDisparities;
int mindisp = state.minDisparity;
int lofs = MAX(ndisp - 1 + mindisp, 0);
int rofs = -MIN(ndisp - 1 + mindisp, 0);
int width = left.cols, height = left.rows;
int width1 = width - rofs - ndisp + 1;
int ftzero = state.preFilterCap;
int textureThreshold = state.textureThreshold;
int uniquenessRatio = state.uniquenessRatio;
short FILTERED = (short)((mindisp - 1) << DISPARITY_SHIFT);
int *sad, *hsad0, *hsad, *hsad_sub, *htext;
uchar *cbuf0, *cbuf;
const uchar* lptr0 = left.ptr() + lofs;
const uchar* rptr0 = right.ptr() + rofs;
const uchar *lptr, *lptr_sub, *rptr;
short* dptr = disp.ptr<short>();
int sstep = (int)left.step;
int dstep = (int)(disp.step/sizeof(dptr[0]));
int cstep = (height+dy0+dy1)*ndisp;
int costbuf = 0;
int coststep = cost.data ? (int)(cost.step/sizeof(costbuf)) : 0;
const int TABSZ = 256;
uchar tab[TABSZ];
sad = (int*)alignPtr(buf + sizeof(sad[0]), ALIGN);
hsad0 = (int*)alignPtr(sad + ndisp + 1 + dy0*ndisp, ALIGN);
htext = (int*)alignPtr((int*)(hsad0 + (height+dy1)*ndisp) + wsz2 + 2, ALIGN);
cbuf0 = (uchar*)alignPtr((uchar*)(htext + height + wsz2 + 2) + dy0*ndisp, ALIGN);
for( x = 0; x < TABSZ; x++ )
tab[x] = (uchar)std::abs(x - ftzero);
// initialize buffers
memset( hsad0 - dy0*ndisp, 0, (height + dy0 + dy1)*ndisp*sizeof(hsad0[0]) );
memset( htext - wsz2 - 1, 0, (height + wsz + 1)*sizeof(htext[0]) );
for( x = -wsz2-1; x < wsz2; x++ )
{
hsad = hsad0 - dy0*ndisp; cbuf = cbuf0 + (x + wsz2 + 1)*cstep - dy0*ndisp;
lptr = lptr0 + std::min(std::max(x, -lofs), width-lofs-1) - dy0*sstep;
rptr = rptr0 + std::min(std::max(x, -rofs), width-rofs-1) - dy0*sstep;
for( y = -dy0; y < height + dy1; y++, hsad += ndisp, cbuf += ndisp, lptr += sstep, rptr += sstep )
{
int lval = lptr[0];
for( d = 0; d < ndisp; d++ )
{
int diff = std::abs(lval - rptr[d]);
cbuf[d] = (uchar)diff;
hsad[d] = (int)(hsad[d] + diff);
}
htext[y] += tab[lval];
}
}
// initialize the left and right borders of the disparity map
for( y = 0; y < height; y++ )
{
for( x = 0; x < lofs; x++ )
dptr[y*dstep + x] = FILTERED;
for( x = lofs + width1; x < width; x++ )
dptr[y*dstep + x] = FILTERED;
}
dptr += lofs;
for( x = 0; x < width1; x++, dptr++ )
{
int* costptr = cost.data ? cost.ptr<int>() + lofs + x : &costbuf;
int x0 = x - wsz2 - 1, x1 = x + wsz2;
const uchar* cbuf_sub = cbuf0 + ((x0 + wsz2 + 1) % (wsz + 1))*cstep - dy0*ndisp;
cbuf = cbuf0 + ((x1 + wsz2 + 1) % (wsz + 1))*cstep - dy0*ndisp;
hsad = hsad0 - dy0*ndisp;
lptr_sub = lptr0 + MIN(MAX(x0, -lofs), width-1-lofs) - dy0*sstep;
lptr = lptr0 + MIN(MAX(x1, -lofs), width-1-lofs) - dy0*sstep;
rptr = rptr0 + MIN(MAX(x1, -rofs), width-1-rofs) - dy0*sstep;
for( y = -dy0; y < height + dy1; y++, cbuf += ndisp, cbuf_sub += ndisp,
hsad += ndisp, lptr += sstep, lptr_sub += sstep, rptr += sstep )
{
int lval = lptr[0];
for( d = 0; d < ndisp; d++ )
{
int diff = std::abs(lval - rptr[d]);
cbuf[d] = (uchar)diff;
hsad[d] = hsad[d] + diff - cbuf_sub[d];
}
htext[y] += tab[lval] - tab[lptr_sub[0]];
}
// fill borders
for( y = dy1; y <= wsz2; y++ )
htext[height+y] = htext[height+dy1-1];
for( y = -wsz2-1; y < -dy0; y++ )
htext[y] = htext[-dy0];
// initialize sums
for( d = 0; d < ndisp; d++ )
sad[d] = (int)(hsad0[d-ndisp*dy0]*(wsz2 + 2 - dy0));
hsad = hsad0 + (1 - dy0)*ndisp;
for( y = 1 - dy0; y < wsz2; y++, hsad += ndisp )
for( d = 0; d < ndisp; d++ )
sad[d] = (int)(sad[d] + hsad[d]);
int tsum = 0;
for( y = -wsz2-1; y < wsz2; y++ )
tsum += htext[y];
// finally, start the real processing
for( y = 0; y < height; y++ )
{
int minsad = INT_MAX, mind = -1;
hsad = hsad0 + MIN(y + wsz2, height+dy1-1)*ndisp;
hsad_sub = hsad0 + MAX(y - wsz2 - 1, -dy0)*ndisp;
for( d = 0; d < ndisp; d++ )
{
int currsad = sad[d] + hsad[d] - hsad_sub[d];
sad[d] = currsad;
if( currsad < minsad )
{
minsad = currsad;
mind = d;
}
}
tsum += htext[y + wsz2] - htext[y - wsz2 - 1];
if( tsum < textureThreshold )
{
dptr[y*dstep] = FILTERED;
continue;
}
if( uniquenessRatio > 0 )
{
int thresh = minsad + (minsad * uniquenessRatio/100);
for( d = 0; d < ndisp; d++ )
{
if( (d < mind-1 || d > mind+1) && sad[d] <= thresh)
break;
}
if( d < ndisp )
{
dptr[y*dstep] = FILTERED;
continue;
}
}
{
sad[-1] = sad[1];
sad[ndisp] = sad[ndisp-2];
int p = sad[mind+1], n = sad[mind-1];
d = p + n - 2*sad[mind] + std::abs(p - n);
dptr[y*dstep] = (short)(((ndisp - mind - 1 + mindisp)*256 + (d != 0 ? (p-n)*256/d : 0) + 15) >> 4);
costptr[y*coststep] = sad[mind];
}
}
}
}
static bool ocl_prefiltering(InputArray left0, InputArray right0, OutputArray left, OutputArray right, StereoBMParams* state)
{
if( state->preFilterType == StereoBM::PREFILTER_NORMALIZED_RESPONSE )
{
if(!ocl_prefilter_norm( left0, left, state->preFilterSize, state->preFilterCap))
return false;
if(!ocl_prefilter_norm( right0, right, state->preFilterSize, state->preFilterCap))
return false;
}
else
{
if(!ocl_prefilter_xsobel( left0, left, state->preFilterCap ))
return false;
if(!ocl_prefilter_xsobel( right0, right, state->preFilterCap))
return false;
}
return true;
}
struct PrefilterInvoker : public ParallelLoopBody
{
PrefilterInvoker(const Mat& left0, const Mat& right0, Mat& left, Mat& right,
uchar* buf0, uchar* buf1, StereoBMParams* _state)
{
imgs0[0] = &left0; imgs0[1] = &right0;
imgs[0] = &left; imgs[1] = &right;
buf[0] = buf0; buf[1] = buf1;
state = _state;
}
void operator()( const Range& range ) const
{
for( int i = range.start; i < range.end; i++ )
{
if( state->preFilterType == StereoBM::PREFILTER_NORMALIZED_RESPONSE )
prefilterNorm( *imgs0[i], *imgs[i], state->preFilterSize, state->preFilterCap, buf[i] );
else
prefilterXSobel( *imgs0[i], *imgs[i], state->preFilterCap );
}
}
const Mat* imgs0[2];
Mat* imgs[2];
uchar* buf[2];
StereoBMParams* state;
};
static bool ocl_stereobm( InputArray _left, InputArray _right,
OutputArray _disp, StereoBMParams* state)
{
int ndisp = state->numDisparities;
int mindisp = state->minDisparity;
int wsz = state->SADWindowSize;
int wsz2 = wsz/2;
ocl::Device devDef = ocl::Device::getDefault();
int sizeX = devDef.isIntel() ? 32 : std::max(11, 27 - devDef.maxComputeUnits()),
sizeY = sizeX - 1,
N = ndisp * 2;
cv::String opt = cv::format("-D DEFINE_KERNEL_STEREOBM -D MIN_DISP=%d -D NUM_DISP=%d"
" -D BLOCK_SIZE_X=%d -D BLOCK_SIZE_Y=%d -D WSZ=%d",
mindisp, ndisp,
sizeX, sizeY, wsz);
ocl::Kernel k("stereoBM", ocl::calib3d::stereobm_oclsrc, opt);
if(k.empty())
return false;
UMat left = _left.getUMat(), right = _right.getUMat();
int cols = left.cols, rows = left.rows;
_disp.create(_left.size(), CV_16S);
_disp.setTo((mindisp - 1) << 4);
Rect roi = Rect(Point(wsz2 + mindisp + ndisp - 1, wsz2), Point(cols-wsz2-mindisp, rows-wsz2) );
UMat disp = (_disp.getUMat())(roi);
int globalX = (disp.cols + sizeX - 1) / sizeX,
globalY = (disp.rows + sizeY - 1) / sizeY;
size_t globalThreads[3] = {N, globalX, globalY};
size_t localThreads[3] = {N, 1, 1};
int idx = 0;
idx = k.set(idx, ocl::KernelArg::PtrReadOnly(left));
idx = k.set(idx, ocl::KernelArg::PtrReadOnly(right));
idx = k.set(idx, ocl::KernelArg::WriteOnlyNoSize(disp));
idx = k.set(idx, rows);
idx = k.set(idx, cols);
idx = k.set(idx, state->textureThreshold);
idx = k.set(idx, state->uniquenessRatio);
return k.run(3, globalThreads, localThreads, false);
}
struct FindStereoCorrespInvoker : public ParallelLoopBody
{
FindStereoCorrespInvoker( const Mat& _left, const Mat& _right,
Mat& _disp, StereoBMParams* _state,
int _nstripes, size_t _stripeBufSize,
bool _useShorts, Rect _validDisparityRect,
Mat& _slidingSumBuf, Mat& _cost )
{
left = &_left; right = &_right;
disp = &_disp; state = _state;
nstripes = _nstripes; stripeBufSize = _stripeBufSize;
useShorts = _useShorts;
validDisparityRect = _validDisparityRect;
slidingSumBuf = &_slidingSumBuf;
cost = &_cost;
}
void operator()( const Range& range ) const
{
int cols = left->cols, rows = left->rows;
int _row0 = std::min(cvRound(range.start * rows / nstripes), rows);
int _row1 = std::min(cvRound(range.end * rows / nstripes), rows);
uchar *ptr = slidingSumBuf->ptr() + range.start * stripeBufSize;
int FILTERED = (state->minDisparity - 1)*16;
Rect roi = validDisparityRect & Rect(0, _row0, cols, _row1 - _row0);
if( roi.height == 0 )
return;
int row0 = roi.y;
int row1 = roi.y + roi.height;
Mat part;
if( row0 > _row0 )
{
part = disp->rowRange(_row0, row0);
part = Scalar::all(FILTERED);
}
if( _row1 > row1 )
{
part = disp->rowRange(row1, _row1);
part = Scalar::all(FILTERED);
}
Mat left_i = left->rowRange(row0, row1);
Mat right_i = right->rowRange(row0, row1);
Mat disp_i = disp->rowRange(row0, row1);
Mat cost_i = state->disp12MaxDiff >= 0 ? cost->rowRange(row0, row1) : Mat();
#if CV_SSE2
if( useShorts )
findStereoCorrespondenceBM_SSE2( left_i, right_i, disp_i, cost_i, *state, ptr, row0, rows - row1 );
else
#endif
findStereoCorrespondenceBM( left_i, right_i, disp_i, cost_i, *state, ptr, row0, rows - row1 );
if( state->disp12MaxDiff >= 0 )
validateDisparity( disp_i, cost_i, state->minDisparity, state->numDisparities, state->disp12MaxDiff );
if( roi.x > 0 )
{
part = disp_i.colRange(0, roi.x);
part = Scalar::all(FILTERED);
}
if( roi.x + roi.width < cols )
{
part = disp_i.colRange(roi.x + roi.width, cols);
part = Scalar::all(FILTERED);
}
}
protected:
const Mat *left, *right;
Mat* disp, *slidingSumBuf, *cost;
StereoBMParams *state;
int nstripes;
size_t stripeBufSize;
bool useShorts;
Rect validDisparityRect;
};
class StereoBMImpl : public StereoBM
{
public:
StereoBMImpl()
{
params = StereoBMParams();
}
StereoBMImpl( int _numDisparities, int _SADWindowSize )
{
params = StereoBMParams(_numDisparities, _SADWindowSize);
}
void compute( InputArray leftarr, InputArray rightarr, OutputArray disparr )
{
int dtype = disparr.fixedType() ? disparr.type() : params.dispType;
Size leftsize = leftarr.size();
if (leftarr.size() != rightarr.size())
CV_Error( Error::StsUnmatchedSizes, "All the images must have the same size" );
if (leftarr.type() != CV_8UC1 || rightarr.type() != CV_8UC1)
CV_Error( Error::StsUnsupportedFormat, "Both input images must have CV_8UC1" );
if (dtype != CV_16SC1 && dtype != CV_32FC1)
CV_Error( Error::StsUnsupportedFormat, "Disparity image must have CV_16SC1 or CV_32FC1 format" );
if( params.preFilterType != PREFILTER_NORMALIZED_RESPONSE &&
params.preFilterType != PREFILTER_XSOBEL )
CV_Error( Error::StsOutOfRange, "preFilterType must be = CV_STEREO_BM_NORMALIZED_RESPONSE" );
if( params.preFilterSize < 5 || params.preFilterSize > 255 || params.preFilterSize % 2 == 0 )
CV_Error( Error::StsOutOfRange, "preFilterSize must be odd and be within 5..255" );
if( params.preFilterCap < 1 || params.preFilterCap > 63 )
CV_Error( Error::StsOutOfRange, "preFilterCap must be within 1..63" );
if( params.SADWindowSize < 5 || params.SADWindowSize > 255 || params.SADWindowSize % 2 == 0 ||
params.SADWindowSize >= std::min(leftsize.width, leftsize.height) )
CV_Error( Error::StsOutOfRange, "SADWindowSize must be odd, be within 5..255 and be not larger than image width or height" );
if( params.numDisparities <= 0 || params.numDisparities % 16 != 0 )
CV_Error( Error::StsOutOfRange, "numDisparities must be positive and divisble by 16" );
if( params.textureThreshold < 0 )
CV_Error( Error::StsOutOfRange, "texture threshold must be non-negative" );
if( params.uniquenessRatio < 0 )
CV_Error( Error::StsOutOfRange, "uniqueness ratio must be non-negative" );
int FILTERED = (params.minDisparity - 1) << DISPARITY_SHIFT;
if(ocl::useOpenCL() && disparr.isUMat() && params.textureThreshold == 0)
{
UMat left, right;
if(ocl_prefiltering(leftarr, rightarr, left, right, ¶ms))
{
if(ocl_stereobm(left, right, disparr, ¶ms))
{
if( params.speckleRange >= 0 && params.speckleWindowSize > 0 )
filterSpeckles(disparr.getMat(), FILTERED, params.speckleWindowSize, params.speckleRange, slidingSumBuf);
if (dtype == CV_32F)
disparr.getUMat().convertTo(disparr, CV_32FC1, 1./(1 << DISPARITY_SHIFT), 0);
CV_IMPL_ADD(CV_IMPL_OCL);
return;
}
}
}
Mat left0 = leftarr.getMat(), right0 = rightarr.getMat();
disparr.create(left0.size(), dtype);
Mat disp0 = disparr.getMat();
preFilteredImg0.create( left0.size(), CV_8U );
preFilteredImg1.create( left0.size(), CV_8U );
cost.create( left0.size(), CV_16S );
Mat left = preFilteredImg0, right = preFilteredImg1;
int mindisp = params.minDisparity;
int ndisp = params.numDisparities;
int width = left0.cols;
int height = left0.rows;
int lofs = std::max(ndisp - 1 + mindisp, 0);
int rofs = -std::min(ndisp - 1 + mindisp, 0);
int width1 = width - rofs - ndisp + 1;
if( lofs >= width || rofs >= width || width1 < 1 )
{
disp0 = Scalar::all( FILTERED * ( disp0.type() < CV_32F ? 1 : 1./(1 << DISPARITY_SHIFT) ) );
return;
}
Mat disp = disp0;
if( dtype == CV_32F )
{
dispbuf.create(disp0.size(), CV_16S);
disp = dispbuf;
}
int wsz = params.SADWindowSize;
int bufSize0 = (int)((ndisp + 2)*sizeof(int));
bufSize0 += (int)((height+wsz+2)*ndisp*sizeof(int));
bufSize0 += (int)((height + wsz + 2)*sizeof(int));
bufSize0 += (int)((height+wsz+2)*ndisp*(wsz+2)*sizeof(uchar) + 256);
int bufSize1 = (int)((width + params.preFilterSize + 2) * sizeof(int) + 256);
int bufSize2 = 0;
if( params.speckleRange >= 0 && params.speckleWindowSize > 0 )
bufSize2 = width*height*(sizeof(Point_<short>) + sizeof(int) + sizeof(uchar));
#if CV_SSE2
bool useShorts = params.preFilterCap <= 31 && params.SADWindowSize <= 21 && checkHardwareSupport(CV_CPU_SSE2);
#else
const bool useShorts = false;
#endif
const double SAD_overhead_coeff = 10.0;
double N0 = 8000000 / (useShorts ? 1 : 4); // approx tbb's min number instructions reasonable for one thread
double maxStripeSize = std::min(std::max(N0 / (width * ndisp), (wsz-1) * SAD_overhead_coeff), (double)height);
int nstripes = cvCeil(height / maxStripeSize);
int bufSize = std::max(bufSize0 * nstripes, std::max(bufSize1 * 2, bufSize2));
if( slidingSumBuf.cols < bufSize )
slidingSumBuf.create( 1, bufSize, CV_8U );
uchar *_buf = slidingSumBuf.ptr();
parallel_for_(Range(0, 2), PrefilterInvoker(left0, right0, left, right, _buf, _buf + bufSize1, ¶ms), 1);
Rect validDisparityRect(0, 0, width, height), R1 = params.roi1, R2 = params.roi2;
validDisparityRect = getValidDisparityROI(R1.area() > 0 ? Rect(0, 0, width, height) : validDisparityRect,
R2.area() > 0 ? Rect(0, 0, width, height) : validDisparityRect,
params.minDisparity, params.numDisparities,
params.SADWindowSize);
parallel_for_(Range(0, nstripes),
FindStereoCorrespInvoker(left, right, disp, ¶ms, nstripes,
bufSize0, useShorts, validDisparityRect,
slidingSumBuf, cost));
if( params.speckleRange >= 0 && params.speckleWindowSize > 0 )
filterSpeckles(disp, FILTERED, params.speckleWindowSize, params.speckleRange, slidingSumBuf);
if (disp0.data != disp.data)
disp.convertTo(disp0, disp0.type(), 1./(1 << DISPARITY_SHIFT), 0);
}
AlgorithmInfo* info() const { return 0; }
int getMinDisparity() const { return params.minDisparity; }
void setMinDisparity(int minDisparity) { params.minDisparity = minDisparity; }
int getNumDisparities() const { return params.numDisparities; }
void setNumDisparities(int numDisparities) { params.numDisparities = numDisparities; }
int getBlockSize() const { return params.SADWindowSize; }
void setBlockSize(int blockSize) { params.SADWindowSize = blockSize; }
int getSpeckleWindowSize() const { return params.speckleWindowSize; }
void setSpeckleWindowSize(int speckleWindowSize) { params.speckleWindowSize = speckleWindowSize; }
int getSpeckleRange() const { return params.speckleRange; }
void setSpeckleRange(int speckleRange) { params.speckleRange = speckleRange; }
int getDisp12MaxDiff() const { return params.disp12MaxDiff; }
void setDisp12MaxDiff(int disp12MaxDiff) { params.disp12MaxDiff = disp12MaxDiff; }
int getPreFilterType() const { return params.preFilterType; }
void setPreFilterType(int preFilterType) { params.preFilterType = preFilterType; }
int getPreFilterSize() const { return params.preFilterSize; }
void setPreFilterSize(int preFilterSize) { params.preFilterSize = preFilterSize; }
int getPreFilterCap() const { return params.preFilterCap; }
void setPreFilterCap(int preFilterCap) { params.preFilterCap = preFilterCap; }
int getTextureThreshold() const { return params.textureThreshold; }
void setTextureThreshold(int textureThreshold) { params.textureThreshold = textureThreshold; }
int getUniquenessRatio() const { return params.uniquenessRatio; }
void setUniquenessRatio(int uniquenessRatio) { params.uniquenessRatio = uniquenessRatio; }
int getSmallerBlockSize() const { return 0; }
void setSmallerBlockSize(int) {}
Rect getROI1() const { return params.roi1; }
void setROI1(Rect roi1) { params.roi1 = roi1; }
Rect getROI2() const { return params.roi2; }
void setROI2(Rect roi2) { params.roi2 = roi2; }
void write(FileStorage& fs) const
{
fs << "name" << name_
<< "minDisparity" << params.minDisparity
<< "numDisparities" << params.numDisparities
<< "blockSize" << params.SADWindowSize
<< "speckleWindowSize" << params.speckleWindowSize
<< "speckleRange" << params.speckleRange
<< "disp12MaxDiff" << params.disp12MaxDiff
<< "preFilterType" << params.preFilterType
<< "preFilterSize" << params.preFilterSize
<< "preFilterCap" << params.preFilterCap
<< "textureThreshold" << params.textureThreshold
<< "uniquenessRatio" << params.uniquenessRatio;
}
void read(const FileNode& fn)
{
FileNode n = fn["name"];
CV_Assert( n.isString() && String(n) == name_ );
params.minDisparity = (int)fn["minDisparity"];
params.numDisparities = (int)fn["numDisparities"];
params.SADWindowSize = (int)fn["blockSize"];
params.speckleWindowSize = (int)fn["speckleWindowSize"];
params.speckleRange = (int)fn["speckleRange"];
params.disp12MaxDiff = (int)fn["disp12MaxDiff"];
params.preFilterType = (int)fn["preFilterType"];
params.preFilterSize = (int)fn["preFilterSize"];
params.preFilterCap = (int)fn["preFilterCap"];
params.textureThreshold = (int)fn["textureThreshold"];
params.uniquenessRatio = (int)fn["uniquenessRatio"];
params.roi1 = params.roi2 = Rect();
}
StereoBMParams params;
Mat preFilteredImg0, preFilteredImg1, cost, dispbuf;
Mat slidingSumBuf;
static const char* name_;
};
const char* StereoBMImpl::name_ = "StereoMatcher.BM";
Ptr<StereoBM> StereoBM::create(int _numDisparities, int _SADWindowSize)
{
return makePtr<StereoBMImpl>(_numDisparities, _SADWindowSize);
}
}
/* End of file. */<|fim▁end|> | __m128i thresh8 = _mm_set1_epi16((short)(thresh + 1));
__m128i d1 = _mm_set1_epi16((short)(mind-1)), d2 = _mm_set1_epi16((short)(mind+1));
__m128i dd_16 = _mm_add_epi16(dd_8, dd_8); |
<|file_name|>test_artificial_128_None_Lag1Trend_5__100.py<|end_file_name|><|fim▁begin|>import pyaf.Bench.TS_datasets as tsds<|fim▁hole|>
art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "Lag1Trend", cycle_length = 5, transform = "None", sigma = 0.0, exog_count = 100, ar_order = 0);<|fim▁end|> | import tests.artificial.process_artificial_dataset as art
|
<|file_name|>value_export.cpp<|end_file_name|><|fim▁begin|>/*
Copyright (C) 2017 Equinor ASA, Norway.
The file 'value_export.c' is part of ERT - Ensemble based Reservoir Tool.
ERT is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
ERT is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License at <http://www.gnu.org/licenses/gpl.html>
for more details.
*/
#include <stdlib.h>
#include <stdbool.h>
#include <stdio.h>
#include <algorithm>
#include <iostream>
#include <fstream>
#include <string>
#include <vector>
#include <ext/json/cJSON.h>
#include <ert/util/util.h>
#include <ert/util/test_util.h>
#include <ert/util/test_work_area.hpp>
#include <ert/enkf/value_export.hpp>
namespace {
// This is super ugly but I could not find any way to do this without using a
// a global variable. Of course the correct thing to do would be to implement
// a new walk_directory that either collects first and return (include
// max_depth, remove callbacks), or behaves like an iterator. But that would
// require quite some efforts, and it's probably not worth it for just one
// test. Especially considering that C++17 includes the filesystem library that
// would make this trivial (but it's a bit unclear what compiler we should take
// as a reference)
std::vector<std::string> storage;
void file_cb(
char const* path,
char const* name,
void *
) {
storage.push_back(std::string(path) + UTIL_PATH_SEP_STRING + name);
}
bool dir_cb (
char const* path,
char const* name,
int,
void *
) {
storage.push_back(std::string(path) + UTIL_PATH_SEP_STRING + name);
return false;
}
std::vector<std::string> directory_tree(std::string const& root) {
storage.clear();
util_walk_directory(
root.c_str(),
file_cb,
nullptr,
dir_cb,
nullptr);
return storage; // this creates a copy of storage
}
} /* unnamed namespace */
void test_create() {
ecl::util::TestArea ta("value_export");
value_export_type * export_value = value_export_alloc( "", "parameters");
test_assert_int_equal( 0 , value_export_size( export_value ));
test_assert_true( value_export_is_instance( export_value ));
value_export_txt( export_value );
test_assert_false( util_file_exists( "parameters.txt" ));
value_export_json( export_value );
test_assert_false( util_file_exists( "parameters.json" ));
value_export_free( export_value );
}
void test_export_json() {
ecl::util::TestArea ta("value_export_json");
value_export_type * export_value = value_export_alloc( "path", "parameters");
util_make_path( "path" );
value_export_append(export_value, "KEY100", "SUBKEY1", 100);
value_export_append(export_value, "KEY200", "SUBKEY2", 200);
value_export_append(export_value, "KEY100", "SUBKEY2", 300);
value_export_append(export_value, "KEY200", "SUBKEY1", 400);
value_export_append(export_value, "KEY200", "SUBKEY3", 500);
test_assert_int_equal( 5 , value_export_size( export_value ));
value_export_json( export_value );
test_assert_true( util_file_exists( "path/parameters.json" ));
std::ifstream f("path/parameters.json");
auto const strJSON = std::string(
std::istreambuf_iterator<char>(f), std::istreambuf_iterator<char>());
cJSON *json = cJSON_Parse(strJSON.c_str());
test_assert_not_NULL(json);
const cJSON * key1 = cJSON_GetObjectItemCaseSensitive(json, "KEY100");
test_assert_true(cJSON_IsObject(key1));
const cJSON * subkey1 = cJSON_GetObjectItemCaseSensitive(key1, "SUBKEY1");
test_assert_true(cJSON_IsNumber(subkey1));
const cJSON * compkey1 = cJSON_GetObjectItemCaseSensitive(json, "KEY100:SUBKEY1");
test_assert_double_equal(compkey1->valuedouble, 100);
// Export again with more values
value_export_append(export_value, "KEY300", "SUBKEY1", 600);
test_assert_int_equal( 6 , value_export_size( export_value ));
value_export_json( export_value );
std::ifstream f2("path/parameters.json");
auto const strJSON2 = std::string(
std::istreambuf_iterator<char>(f2), std::istreambuf_iterator<char>());
test_assert_true(strJSON2.size() > strJSON.size());
auto tree = directory_tree("path");<|fim▁hole|> std::end(tree),
[](std::string const& l, std::string const& r) -> bool {
return l.size() < r.size();
});
test_assert_string_equal(tree[0].c_str(), "path/parameters.json");
test_assert_string_equal(
tree[1].substr(0, 30).c_str(),
"path/parameters.json_backup_20"); // Fix this in 80 years
value_export_free( export_value );
}
void test_export_txt__() {
ecl::util::TestArea ta("export_txt__");
value_export_type * export_value = value_export_alloc( "", "parameters");
value_export_append(export_value, "KEY100", "SUBKEY1", 100);
value_export_append(export_value, "KEY200", "SUBKEY2", 200);
test_assert_int_equal( 2 , value_export_size( export_value ));
value_export_txt( export_value );
value_export_txt__( export_value , "parameters__.txt");
test_assert_true( util_file_exists( "path/parameters__.txt" ));
test_assert_true( util_files_equal( "path/parameters__.txt", "path/parameters.txt"));
value_export_free( export_value );
}
void test_export_txt() {
ecl::util::TestArea ta("export_txt");
value_export_type * export_value = value_export_alloc( "path", "parameters");
util_make_path( "path" );
value_export_append(export_value, "KEY100", "SUBKEY1", 100);
value_export_append(export_value, "KEY200", "SUBKEY2", 200);
test_assert_int_equal( 2 , value_export_size( export_value ));
value_export_txt( export_value );
test_assert_true( util_file_exists( "path/parameters.txt" ));
value_export_txt__( export_value , "path/parameters__.txt");
test_assert_true( util_file_exists( "path/parameters__.txt" ));
test_assert_true( util_files_equal( "path/parameters__.txt", "path/parameters.txt"));
{
FILE * stream = util_fopen("path/parameters.txt", "r");
char key1[100],key2[100], subkey1[100], subkey2[100];
double v1,v2;
fscanf( stream, "%[^:]:%s %lg %[^:]:%s %lg" , key1, subkey1, &v1, key2, subkey2, &v2);
fclose( stream );
test_assert_string_equal( key1, "KEY100");
test_assert_string_equal( subkey1, "SUBKEY1");
test_assert_string_equal( key2, "KEY200");
test_assert_string_equal( subkey2, "SUBKEY2");
test_assert_double_equal( v1, 100 );
test_assert_double_equal( v2, 200 );
}
// Export again with more values
value_export_append(export_value, "KEY300", "SUBKEY1", 600);
test_assert_int_equal( 3 , value_export_size( export_value ));
value_export_txt( export_value );
auto tree = directory_tree("path");
test_assert_size_t_equal(tree.size(), 3); // there is also parameters__.txt
std::sort(
std::begin(tree),
std::end(tree),
[](std::string const& l, std::string const& r) -> bool {
return l.size() < r.size();
});
test_assert_string_equal(tree[0].c_str(), "path/parameters.txt");
test_assert_string_equal(
tree[2].substr(0, 29).c_str(),
"path/parameters.txt_backup_20"); // Fix this in 80 years
test_assert_false(util_files_equal(tree[0].c_str(), tree[1].c_str()));
test_assert_true(util_files_equal(tree[2].c_str(), tree[1].c_str()));
value_export_free( export_value );
}
int main(int argc , char ** argv) {
test_create();
test_export_txt();
test_export_json();
exit(0);
}<|fim▁end|> | test_assert_size_t_equal(tree.size(), 2);
std::sort(
std::begin(tree), |
<|file_name|>config.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Original filename: config.py
#
# Author: Tim Brandt
# Email: tbrandt@astro.princeton.edu
# Date: August 2011
#
# Summary: Set configuration parameters to sensible values.
#
import re
from subprocess import *
import multiprocessing
import numpy as np
def config(nframes, framesize):
###################################################################
# Fetch the total amount of physical system memory in bytes.
# This is the second entry on the second line of the standard
# output of the 'free' command.
###################################################################
print "\nGetting system parameters, setting pipeline execution parameters..."
osver = Popen(["uname", "-a"], stdout=PIPE).stdout.read()
if osver.startswith("Linux"):
print "You are running Linux."
elif osver.startswith("Darwin"):
print "You are running Mac OS-X."
else:
print "Your operating system is not recognized."
if osver.startswith("Linux"):
mem = Popen(["free", "-b"], stdout=PIPE).stdout.read()
mem = int(mem.split('\n')[1].split()[1])<|fim▁hole|> elif osver.startswith("Darwin"):
mem = Popen(["vm_stat"], stdout=PIPE).stdout.read().split('\n')
blocksize = re.search('.*size of ([0-9]+) bytes.*', mem[0]).group(1)
totmem = 0.
for line in mem:
if np.any(["Pages free:" in line, "Pages active:" in line,
"Pages inactive:" in line, "Pages speculative:" in line,
"Pages wired down:" in line]):
totmem += float(line.split(':')[1]) * float(blocksize)
mem = int(totmem)
ncpus = multiprocessing.cpu_count()
hostname = Popen("hostname", stdout=PIPE).stdout.read().split()[0]
print "\n You are running on " + hostname + "."
print " You have " + str(mem / 2**20) + " megabytes of memory and " + \
str(ncpus) + " threads available."
datasize = framesize * nframes * 4
print " The dataset consists of " + str(nframes) + " frames, " + \
str(datasize * 100 / mem) + "% of your physical RAM."
storeall = False
if datasize * 100 / mem < 20:
storeall = True
print " --> You have enough RAM to store all data."
print " The pipeline will not need to write all intermediate files."
else:
print " --> You do not have enough RAM to store all data."
print " The pipeline will need to write all intermediate files"
print " and do the reduction in pieces."
return mem, ncpus, storeall<|fim▁end|> | |
<|file_name|>which.py<|end_file_name|><|fim▁begin|># This license covers everything within this project, except for a few pieces<|fim▁hole|># it covers. The few relevant pieces of code are all contained inside these
# directories:
#
# - pwnlib/constants/
# - pwnlib/data/
#
#
# Copyright (c) 2015 Gallopsled and Zach Riggle
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os
import stat
def which(name, all = False):
"""which(name, flags = os.X_OK, all = False) -> str or str set
Works as the system command ``which``; searches $PATH for ``name`` and
returns a full path if found.
If `all` is :const:`True` the set of all found locations is returned, else
the first occurrence or :const:`None` is returned.
Arguments:
`name` (str): The file to search for.
`all` (bool): Whether to return all locations where `name` was found.
Returns:
If `all` is :const:`True` the set of all locations where `name` was found,
else the first location or :const:`None` if not found.
Example:
>>> which('sh')
'/bin/sh'
"""
# If name is a path, do not attempt to resolve it.
if os.path.sep in name:
return name
isroot = os.getuid() == 0
out = set()
try:
path = os.environ['PATH']
except KeyError:
log.exception('Environment variable $PATH is not set')
for p in path.split(os.pathsep):
p = os.path.join(p, name)
if os.access(p, os.X_OK):
st = os.stat(p)
if not stat.S_ISREG(st.st_mode):
continue
# work around this issue: https://bugs.python.org/issue9311
if isroot and not \
st.st_mode & (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH):
continue
if all:
out.add(p)
else:
return p
if all:
return out
else:
return None<|fim▁end|> | # of code that we either did not write ourselves or which we derived from code
# that we did not write ourselves. These few pieces have their license specified
# in a header, or by a file called LICENSE.txt, which will explain exactly what |
<|file_name|>_katello.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2016, Eric D Helms <ericdhelms@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['deprecated'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: katello
short_description: Manage Katello Resources
deprecated:
removed_in: "2.12"
why: "Replaced by re-designed individual modules living at https://github.com/theforeman/foreman-ansible-modules"
alternative: https://github.com/theforeman/foreman-ansible-modules
description:
- Allows the management of Katello resources inside your Foreman server.
version_added: "2.3"
author:
- Eric D Helms (@ehelms)
requirements:
- nailgun >= 0.28.0
- python >= 2.6
- datetime
options:
server_url:
description:
- URL of Foreman server.
required: true
username:
description:
- Username on Foreman server.
required: true
password:
description:
- Password for user accessing Foreman server.
required: true
entity:
description:
- The Foreman resource that the action will be performed on (e.g. organization, host).
choices:
- repository
- manifest
- repository_set
- sync_plan
- content_view
- lifecycle_environment
- activation_key
- product
required: true
action:
description:
- action associated to the entity resource to set or edit in dictionary format.
- Possible Action in relation to Entitys.
- "sync (available when entity=product or entity=repository)"
- "publish (available when entity=content_view)"
- "promote (available when entity=content_view)"
choices:
- sync
- publish
- promote
required: false
params:
description:
- Parameters associated to the entity resource and action, to set or edit in dictionary format.
- Each choice may be only available with specific entitys and actions.
- "Possible Choices are in the format of param_name ([entry,action,action,...],[entity,..],...)."
- The action "None" means no action specified.
- Possible Params in relation to entity and action.
- "name ([product,sync,None], [repository,sync], [repository_set,None], [sync_plan,None],"
- "[content_view,promote,publish,None], [lifecycle_environment,None], [activation_key,None])"
- "organization ([product,sync,None] ,[repository,sync,None], [repository_set,None], [sync_plan,None], "
- "[content_view,promote,publish,None], [lifecycle_environment,None], [activation_key,None])"
- "content ([manifest,None])"
- "product ([repository,sync,None], [repository_set,None], [sync_plan,None])"
- "basearch ([repository_set,None])"
- "releaserver ([repository_set,None])"
- "sync_date ([sync_plan,None])"
- "interval ([sync_plan,None])"
- "repositories ([content_view,None])"
- "from_environment ([content_view,promote])"
- "to_environment([content_view,promote])"
- "prior ([lifecycle_environment,None])"
- "content_view ([activation_key,None])"
- "lifecycle_environment ([activation_key,None])"
required: true
task_timeout:
description:
- The timeout in seconds to wait for the started Foreman action to finish.
- If the timeout is reached and the Foreman action did not complete, the ansible task fails. However the foreman action does not get canceled.
default: 1000
version_added: "2.7"
required: false
verify_ssl:
description:
- verify the ssl/https connection (e.g for a valid certificate)
default: false
type: bool
required: false
'''
EXAMPLES = '''
---
# Simple Example:
- name: Create Product
katello:
username: admin
password: admin
server_url: https://fakeserver.com
entity: product
params:
name: Centos 7
delegate_to: localhost
# Abstraction Example:
# katello.yml
---
- name: "{{ name }}"
katello:
username: admin
password: admin
server_url: https://fakeserver.com
entity: "{{ entity }}"
params: "{{ params }}"
delegate_to: localhost
# tasks.yml
---
- include: katello.yml
vars:
name: Create Dev Environment
entity: lifecycle_environment
params:
name: Dev
prior: Library
organization: Default Organization
- include: katello.yml
vars:
name: Create Centos Product
entity: product
params:
name: Centos 7
organization: Default Organization
- include: katello.yml
vars:
name: Create 7.2 Repository
entity: repository
params:
name: Centos 7.2
product: Centos 7
organization: Default Organization
content_type: yum
url: http://mirror.centos.org/centos/7/os/x86_64/
- include: katello.yml
vars:
name: Create Centos 7 View
entity: content_view
params:
name: Centos 7 View
organization: Default Organization
repositories:
- name: Centos 7.2
product: Centos 7
- include: katello.yml
vars:
name: Enable RHEL Product
entity: repository_set
params:
name: Red Hat Enterprise Linux 7 Server (RPMs)
product: Red Hat Enterprise Linux Server
organization: Default Organization
basearch: x86_64
releasever: 7
- include: katello.yml
vars:
name: Promote Contentview Environment with longer timout
task_timeout: 10800
entity: content_view
action: promote
params:
name: MyContentView
organization: MyOrganisation
from_environment: Testing
to_environment: Production
# Best Practices
# In Foreman, things can be done in paralell.
# When a conflicting action is already running,
# the task will fail instantly instead of waiting for the already running action to complete.
# So you sould use a "until success" loop to catch this.
- name: Promote Contentview Environment with increased Timeout
katello:
username: ansibleuser
password: supersecret
task_timeout: 10800
entity: content_view
action: promote
params:
name: MyContentView
organization: MyOrganisation
from_environment: Testing
to_environment: Production
register: task_result
until: task_result is success
retries: 9
delay: 120
'''
RETURN = '''# '''
import datetime
import os
import traceback
try:
from nailgun import entities, entity_fields, entity_mixins
from nailgun.config import ServerConfig
HAS_NAILGUN_PACKAGE = True
except Exception:
HAS_NAILGUN_PACKAGE = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
class NailGun(object):
def __init__(self, server, entities, module, task_timeout):
self._server = server
self._entities = entities
self._module = module
entity_mixins.TASK_TIMEOUT = task_timeout
def find_organization(self, name, **params):
org = self._entities.Organization(self._server, name=name, **params)
response = org.search(set(), {'search': 'name={0}'.format(name)})
if len(response) == 1:
return response[0]
else:
self._module.fail_json(msg="No organization found for %s" % name)
def find_lifecycle_environment(self, name, organization):
org = self.find_organization(organization)
lifecycle_env = self._entities.LifecycleEnvironment(self._server, name=name, organization=org)
response = lifecycle_env.search()
if len(response) == 1:
return response[0]
else:
self._module.fail_json(msg="No Lifecycle Found found for %s" % name)
def find_product(self, name, organization):
org = self.find_organization(organization)
product = self._entities.Product(self._server, name=name, organization=org)
response = product.search()
if len(response) == 1:
return response[0]
else:
self._module.fail_json(msg="No Product found for %s" % name)
def find_repository(self, name, product, organization):
product = self.find_product(product, organization)
repository = self._entities.Repository(self._server, name=name, product=product)
repository._fields['organization'] = entity_fields.OneToOneField(entities.Organization)
repository.organization = product.organization
response = repository.search()
if len(response) == 1:
return response[0]
else:
self._module.fail_json(msg="No Repository found for %s" % name)
def find_content_view(self, name, organization):
org = self.find_organization(organization)
content_view = self._entities.ContentView(self._server, name=name, organization=org)
response = content_view.search()
if len(response) == 1:
return response[0]
else:
self._module.fail_json(msg="No Content View found for %s" % name)
def organization(self, params):
name = params['name']
del params['name']
org = self.find_organization(name, **params)
if org:
org = self._entities.Organization(self._server, name=name, id=org.id, **params)
org.update()
else:
org = self._entities.Organization(self._server, name=name, **params)
org.create()
return True
def manifest(self, params):
org = self.find_organization(params['organization'])
params['organization'] = org.id
try:
file = open(os.getcwd() + params['content'], 'r')
content = file.read()
finally:
file.close()
manifest = self._entities.Subscription(self._server)
try:
manifest.upload(
data={'organization_id': org.id},
files={'content': content}
)
return True
except Exception as e:
if "Import is the same as existing data" in e.message:
return False
else:
self._module.fail_json(msg="Manifest import failed with %s" % to_native(e),
exception=traceback.format_exc())
def product(self, params):
org = self.find_organization(params['organization'])
params['organization'] = org.id
product = self._entities.Product(self._server, **params)
response = product.search()
if len(response) == 1:
product.id = response[0].id
product.update()
else:
product.create()
return True
def sync_product(self, params):
org = self.find_organization(params['organization'])
product = self.find_product(params['name'], org.name)
return product.sync()
def repository(self, params):
product = self.find_product(params['product'], params['organization'])
params['product'] = product.id
del params['organization']
repository = self._entities.Repository(self._server, **params)
repository._fields['organization'] = entity_fields.OneToOneField(entities.Organization)
repository.organization = product.organization
response = repository.search()
if len(response) == 1:
repository.id = response[0].id
repository.update()
else:
repository.create()
return True
def sync_repository(self, params):
org = self.find_organization(params['organization'])
repository = self.find_repository(params['name'], params['product'], org.name)
return repository.sync()
def repository_set(self, params):
product = self.find_product(params['product'], params['organization'])
del params['product']
del params['organization']
if not product:
return False
else:
reposet = self._entities.RepositorySet(self._server, product=product, name=params['name'])
reposet = reposet.search()[0]
formatted_name = [params['name'].replace('(', '').replace(')', '')]
formatted_name.append(params['basearch'])
if 'releasever' in params:
formatted_name.append(params['releasever'])
formatted_name = ' '.join(formatted_name)
repository = self._entities.Repository(self._server, product=product, name=formatted_name)
repository._fields['organization'] = entity_fields.OneToOneField(entities.Organization)
repository.organization = product.organization
repository = repository.search()
if len(repository) == 0:
if 'releasever' in params:
reposet.enable(data={'basearch': params['basearch'], 'releasever': params['releasever']})
else:
reposet.enable(data={'basearch': params['basearch']})
return True
def sync_plan(self, params):
org = self.find_organization(params['organization'])
params['organization'] = org.id
params['sync_date'] = datetime.datetime.strptime(params['sync_date'], "%H:%M")
<|fim▁hole|> self._server,
name=params['name'],
organization=org
)
response = sync_plan.search()
sync_plan.sync_date = params['sync_date']
sync_plan.interval = params['interval']
if len(response) == 1:
sync_plan.id = response[0].id
sync_plan.update()
else:
response = sync_plan.create()
sync_plan.id = response[0].id
if products:
ids = []
for name in products:
product = self.find_product(name, org.name)
ids.append(product.id)
sync_plan.add_products(data={'product_ids': ids})
return True
def content_view(self, params):
org = self.find_organization(params['organization'])
content_view = self._entities.ContentView(self._server, name=params['name'], organization=org)
response = content_view.search()
if len(response) == 1:
content_view.id = response[0].id
content_view.update()
else:
content_view = content_view.create()
if params['repositories']:
repos = []
for repository in params['repositories']:
repository = self.find_repository(repository['name'], repository['product'], org.name)
repos.append(repository)
content_view.repository = repos
content_view.update(['repository'])
def find_content_view_version(self, name, organization, environment):
env = self.find_lifecycle_environment(environment, organization)
content_view = self.find_content_view(name, organization)
content_view_version = self._entities.ContentViewVersion(self._server, content_view=content_view)
response = content_view_version.search(['content_view'], {'environment_id': env.id})
if len(response) == 1:
return response[0]
else:
self._module.fail_json(msg="No Content View version found for %s" % response)
def publish(self, params):
content_view = self.find_content_view(params['name'], params['organization'])
return content_view.publish()
def promote(self, params):
to_environment = self.find_lifecycle_environment(params['to_environment'], params['organization'])
version = self.find_content_view_version(params['name'], params['organization'], params['from_environment'])
data = {'environment_id': to_environment.id}
return version.promote(data=data)
def lifecycle_environment(self, params):
org = self.find_organization(params['organization'])
prior_env = self.find_lifecycle_environment(params['prior'], params['organization'])
lifecycle_env = self._entities.LifecycleEnvironment(self._server, name=params['name'], organization=org, prior=prior_env)
response = lifecycle_env.search()
if len(response) == 1:
lifecycle_env.id = response[0].id
lifecycle_env.update()
else:
lifecycle_env.create()
return True
def activation_key(self, params):
org = self.find_organization(params['organization'])
activation_key = self._entities.ActivationKey(self._server, name=params['name'], organization=org)
response = activation_key.search()
if len(response) == 1:
activation_key.id = response[0].id
activation_key.update()
else:
activation_key.create()
if params['content_view']:
content_view = self.find_content_view(params['content_view'], params['organization'])
lifecycle_environment = self.find_lifecycle_environment(params['lifecycle_environment'], params['organization'])
activation_key.content_view = content_view
activation_key.environment = lifecycle_environment
activation_key.update()
return True
def main():
module = AnsibleModule(
argument_spec=dict(
server_url=dict(type='str', required=True),
username=dict(type='str', required=True, no_log=True),
password=dict(type='str', required=True, no_log=True),
entity=dict(type='str', required=True,
choices=['repository', 'manifest', 'repository_set', 'sync_plan',
'content_view', 'lifecycle_environment', 'activation_key', 'product']),
action=dict(type='str', choices=['sync', 'publish', 'promote']),
verify_ssl=dict(type='bool', default=False),
task_timeout=dict(type='int', default=1000),
params=dict(type='dict', required=True, no_log=True),
),
supports_check_mode=True,
)
if not HAS_NAILGUN_PACKAGE:
module.fail_json(msg="Missing required nailgun module (check docs or install with: pip install nailgun")
server_url = module.params['server_url']
username = module.params['username']
password = module.params['password']
entity = module.params['entity']
action = module.params['action']
params = module.params['params']
verify_ssl = module.params['verify_ssl']
task_timeout = module.params['task_timeout']
server = ServerConfig(
url=server_url,
auth=(username, password),
verify=verify_ssl
)
ng = NailGun(server, entities, module, task_timeout)
# Lets make an connection to the server with username and password
try:
org = entities.Organization(server)
org.search()
except Exception as e:
module.fail_json(msg="Failed to connect to Foreman server: %s " % e)
result = False
if entity == 'product':
if action == 'sync':
result = ng.sync_product(params)
else:
result = ng.product(params)
elif entity == 'repository':
if action == 'sync':
result = ng.sync_repository(params)
else:
result = ng.repository(params)
elif entity == 'manifest':
result = ng.manifest(params)
elif entity == 'repository_set':
result = ng.repository_set(params)
elif entity == 'sync_plan':
result = ng.sync_plan(params)
elif entity == 'content_view':
if action == 'publish':
result = ng.publish(params)
elif action == 'promote':
result = ng.promote(params)
else:
result = ng.content_view(params)
elif entity == 'lifecycle_environment':
result = ng.lifecycle_environment(params)
elif entity == 'activation_key':
result = ng.activation_key(params)
else:
module.fail_json(changed=False, result="Unsupported entity supplied")
module.exit_json(changed=result, result="%s updated" % entity)
if __name__ == '__main__':
main()<|fim▁end|> | products = params['products']
del params['products']
sync_plan = self._entities.SyncPlan( |
<|file_name|>paint_context.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Painting of display lists using Moz2D/Azure.
use azure::azure::AzIntSize;
use azure::azure_hl::{Color, ColorPattern};
use azure::azure_hl::{DrawOptions, DrawSurfaceOptions, DrawTarget, ExtendMode, FilterType};
use azure::azure_hl::{GaussianBlurInput, GradientStop, Filter, LinearGradientPattern};
use azure::azure_hl::{PatternRef, Path, PathBuilder, CompositionOp};
use azure::azure_hl::{GaussianBlurAttribute, StrokeOptions, SurfaceFormat};
use azure::scaled_font::ScaledFont;
use azure::{AZ_CAP_BUTT, AzFloat, struct__AzDrawOptions, struct__AzGlyph};
use azure::{struct__AzGlyphBuffer, struct__AzPoint, AzDrawTargetFillGlyphs};
use display_list::TextOrientation::{SidewaysLeft, SidewaysRight, Upright};
use display_list::{BOX_SHADOW_INFLATION_FACTOR, BorderRadii, ClippingRegion, TextDisplayItem};
use filters;
use font_context::FontContext;
use geom::matrix2d::Matrix2D;
use geom::point::Point2D;
use geom::rect::Rect;
use geom::side_offsets::SideOffsets2D;
use geom::size::Size2D;
use libc::size_t;
use libc::types::common::c99::{uint16_t, uint32_t};
use png::PixelsByColorType;
use servo_net::image::base::Image;
use servo_util::geometry::{Au, MAX_RECT};
use servo_util::opts;
use servo_util::range::Range;
use std::default::Default;
use std::f32;
use std::mem;
use std::num::{Float, FloatMath};
use std::ptr;
use style::computed_values::{border_style, filter, mix_blend_mode};
use std::sync::Arc;
use text::TextRun;
use text::glyph::CharIndex;
pub struct PaintContext<'a> {
pub draw_target: DrawTarget,
pub font_ctx: &'a mut Box<FontContext>,
/// The rectangle that this context encompasses in page coordinates.
pub page_rect: Rect<f32>,
/// The rectangle that this context encompasses in screen coordinates (pixels).
pub screen_rect: Rect<uint>,
/// The clipping rect for the stacking context as a whole.
pub clip_rect: Option<Rect<Au>>,
/// The current transient clipping region, if any. A "transient clipping region" is the
/// clipping region used by the last display item. We cache the last value so that we avoid
/// pushing and popping clipping regions unnecessarily.
pub transient_clip: Option<ClippingRegion>,
}
#[deriving(Copy)]
enum Direction {
Top,
Left,
Right,
Bottom
}
#[deriving(Copy)]
enum DashSize {
DottedBorder = 1,
DashedBorder = 3
}
impl<'a> PaintContext<'a> {
pub fn get_draw_target(&self) -> &DrawTarget {
&self.draw_target
}
pub fn draw_solid_color(&self, bounds: &Rect<Au>, color: Color) {
self.draw_target.make_current();
self.draw_target.fill_rect(&bounds.to_azure_rect(),
PatternRef::Color(&ColorPattern::new(color)),
None);
}
pub fn draw_border(&self,
bounds: &Rect<Au>,
border: &SideOffsets2D<Au>,
radius: &BorderRadii<Au>,
color: &SideOffsets2D<Color>,
style: &SideOffsets2D<border_style::T>) {
let border = border.to_float_px();
let radius = radius.to_radii_px();
self.draw_border_segment(Direction::Top, bounds, &border, &radius, color, style);
self.draw_border_segment(Direction::Right, bounds, &border, &radius, color, style);
self.draw_border_segment(Direction::Bottom, bounds, &border, &radius, color, style);
self.draw_border_segment(Direction::Left, bounds, &border, &radius, color, style);
}
pub fn draw_line(&self, bounds: &Rect<Au>, color: Color, style: border_style::T) {
self.draw_target.make_current();
self.draw_line_segment(bounds, &Default::default(), color, style);
}
pub fn draw_push_clip(&self, bounds: &Rect<Au>) {
let rect = bounds.to_azure_rect();
let path_builder = self.draw_target.create_path_builder();
let left_top = Point2D(rect.origin.x, rect.origin.y);
let right_top = Point2D(rect.origin.x + rect.size.width, rect.origin.y);
let left_bottom = Point2D(rect.origin.x, rect.origin.y + rect.size.height);
let right_bottom = Point2D(rect.origin.x + rect.size.width,
rect.origin.y + rect.size.height);
path_builder.move_to(left_top);
path_builder.line_to(right_top);
path_builder.line_to(right_bottom);
path_builder.line_to(left_bottom);
let path = path_builder.finish();
self.draw_target.push_clip(&path);
}
pub fn draw_pop_clip(&self) {
self.draw_target.pop_clip();
}
pub fn draw_image(&self, bounds: &Rect<Au>, image: Arc<Box<Image>>) {
let size = Size2D(image.width as i32, image.height as i32);
let (pixel_width, pixels, source_format) = match image.pixels {
PixelsByColorType::RGBA8(ref pixels) => (4, pixels.as_slice(), SurfaceFormat::B8G8R8A8),
PixelsByColorType::K8(ref pixels) => (1, pixels.as_slice(), SurfaceFormat::A8),
PixelsByColorType::RGB8(_) => panic!("RGB8 color type not supported"),
PixelsByColorType::KA8(_) => panic!("KA8 color type not supported"),
};
let stride = image.width * pixel_width;
self.draw_target.make_current();
let draw_target_ref = &self.draw_target;
let azure_surface = draw_target_ref.create_source_surface_from_data(pixels,
size,
stride as i32,
source_format);
let source_rect = Rect(Point2D(0.0, 0.0),
Size2D(image.width as AzFloat, image.height as AzFloat));
let dest_rect = bounds.to_azure_rect();
let draw_surface_options = DrawSurfaceOptions::new(Filter::Linear, true);
let draw_options = DrawOptions::new(1.0, 0);
draw_target_ref.draw_surface(azure_surface,
dest_rect,
source_rect,
draw_surface_options,
draw_options);
}
pub fn clear(&self) {
let pattern = ColorPattern::new(Color::new(0.0, 0.0, 0.0, 0.0));
let rect = Rect(Point2D(self.page_rect.origin.x as AzFloat,
self.page_rect.origin.y as AzFloat),
Size2D(self.screen_rect.size.width as AzFloat,
self.screen_rect.size.height as AzFloat));
let mut draw_options = DrawOptions::new(1.0, 0);
draw_options.set_composition_op(CompositionOp::Source);
self.draw_target.make_current();
self.draw_target.fill_rect(&rect, PatternRef::Color(&pattern), Some(&draw_options));
}
fn draw_border_segment(&self,
direction: Direction,
bounds: &Rect<Au>,
border: &SideOffsets2D<f32>,
radius: &BorderRadii<AzFloat>,
color: &SideOffsets2D<Color>,
style: &SideOffsets2D<border_style::T>) {
let (style_select, color_select) = match direction {
Direction::Top => (style.top, color.top),
Direction::Left => (style.left, color.left),
Direction::Right => (style.right, color.right),
Direction::Bottom => (style.bottom, color.bottom)
};
match style_select {
border_style::T::none | border_style::T::hidden => {}
border_style::T::dotted => {
// FIXME(sammykim): This doesn't work well with dash_pattern and cap_style.
self.draw_dashed_border_segment(direction,
bounds,
border,
color_select,
DashSize::DottedBorder);
}
border_style::T::dashed => {
self.draw_dashed_border_segment(direction,
bounds,
border,
color_select,
DashSize::DashedBorder);
}
border_style::T::solid => {
self.draw_solid_border_segment(direction, bounds, border, radius, color_select);
}
border_style::T::double => {
self.draw_double_border_segment(direction, bounds, border, radius, color_select);
}
border_style::T::groove | border_style::T::ridge => {
self.draw_groove_ridge_border_segment(direction,
bounds,
border,
radius,
color_select,
style_select);
}
border_style::T::inset | border_style::T::outset => {
self.draw_inset_outset_border_segment(direction,
bounds,
border,
radius,
color_select,
style_select);
}
}
}
fn draw_line_segment(&self,
bounds: &Rect<Au>,
radius: &BorderRadii<AzFloat>,
color: Color,
style: border_style::T) {
let border = SideOffsets2D::new_all_same(bounds.size.width).to_float_px();
match style {
border_style::T::none | border_style::T::hidden => {}
border_style::T::dotted => {
self.draw_dashed_border_segment(Direction::Right,
bounds,
&border,
color,
DashSize::DottedBorder);
}
border_style::T::dashed => {
self.draw_dashed_border_segment(Direction::Right,
bounds,
&border,
color,
DashSize::DashedBorder);
}
border_style::T::solid => {
self.draw_solid_border_segment(Direction::Right, bounds, &border, radius, color)
}
border_style::T::double => {
self.draw_double_border_segment(Direction::Right, bounds, &border, radius, color)
}
border_style::T::groove | border_style::T::ridge => {
self.draw_groove_ridge_border_segment(Direction::Right,
bounds,
&border,
radius,
color,
style);
}
border_style::T::inset | border_style::T::outset => {
self.draw_inset_outset_border_segment(Direction::Right,
bounds,
&border,
radius,
color,
style);
}
}
}
fn draw_border_path(&self,
bounds: &Rect<f32>,
direction: Direction,
border: &SideOffsets2D<f32>,
radii: &BorderRadii<AzFloat>,
color: Color) {
let mut path_builder = self.draw_target.create_path_builder();
self.create_border_path_segment(&mut path_builder,
bounds,
direction,
border,
radii);
let draw_options = DrawOptions::new(1.0, 0);
self.draw_target.fill(&path_builder.finish(), &ColorPattern::new(color), &draw_options);
}
fn push_rounded_rect_clip(&self, bounds: &Rect<f32>, radii: &BorderRadii<AzFloat>) {
let mut path_builder = self.draw_target.create_path_builder();
self.create_rounded_rect_path(&mut path_builder, bounds, radii);
self.draw_target.push_clip(&path_builder.finish());
}
// The following comment is wonderful, and stolen from
// gecko:gfx/thebes/gfxContext.cpp:RoundedRectangle for reference.
//
// It does not currently apply to the code, but will be extremely useful in<|fim▁hole|> // the future when the below TODO is addressed.
//
// TODO(cgaebel): Switch from arcs to beziers for drawing the corners.
// Then, add http://www.subcide.com/experiments/fail-whale/
// to the reftest suite.
//
// ---------------------------------------------------------------
//
// For CW drawing, this looks like:
//
// ...******0** 1 C
// ****
// *** 2
// **
// *
// *
// 3
// *
// *
//
// Where 0, 1, 2, 3 are the control points of the Bezier curve for
// the corner, and C is the actual corner point.
//
// At the start of the loop, the current point is assumed to be
// the point adjacent to the top left corner on the top
// horizontal. Note that corner indices start at the top left and
// continue clockwise, whereas in our loop i = 0 refers to the top
// right corner.
//
// When going CCW, the control points are swapped, and the first
// corner that's drawn is the top left (along with the top segment).
//
// There is considerable latitude in how one chooses the four
// control points for a Bezier curve approximation to an ellipse.
// For the overall path to be continuous and show no corner at the
// endpoints of the arc, points 0 and 3 must be at the ends of the
// straight segments of the rectangle; points 0, 1, and C must be
// collinear; and points 3, 2, and C must also be collinear. This
// leaves only two free parameters: the ratio of the line segments
// 01 and 0C, and the ratio of the line segments 32 and 3C. See
// the following papers for extensive discussion of how to choose
// these ratios:
//
// Dokken, Tor, et al. "Good approximation of circles by
// curvature-continuous Bezier curves." Computer-Aided
// Geometric Design 7(1990) 33--41.
// Goldapp, Michael. "Approximation of circular arcs by cubic
// polynomials." Computer-Aided Geometric Design 8(1991) 227--238.
// Maisonobe, Luc. "Drawing an elliptical arc using polylines,
// quadratic, or cubic Bezier curves."
// http://www.spaceroots.org/documents/ellipse/elliptical-arc.pdf
//
// We follow the approach in section 2 of Goldapp (least-error,
// Hermite-type approximation) and make both ratios equal to
//
// 2 2 + n - sqrt(2n + 28)
// alpha = - * ---------------------
// 3 n - 4
//
// where n = 3( cbrt(sqrt(2)+1) - cbrt(sqrt(2)-1) ).
//
// This is the result of Goldapp's equation (10b) when the angle
// swept out by the arc is pi/2, and the parameter "a-bar" is the
// expression given immediately below equation (21).
//
// Using this value, the maximum radial error for a circle, as a
// fraction of the radius, is on the order of 0.2 x 10^-3.
// Neither Dokken nor Goldapp discusses error for a general
// ellipse; Maisonobe does, but his choice of control points
// follows different constraints, and Goldapp's expression for
// 'alpha' gives much smaller radial error, even for very flat
// ellipses, than Maisonobe's equivalent.
//
// For the various corners and for each axis, the sign of this
// constant changes, or it might be 0 -- it's multiplied by the
// appropriate multiplier from the list before using.
#[allow(non_snake_case)]
fn create_border_path_segment(&self,
path_builder: &mut PathBuilder,
bounds: &Rect<f32>,
direction: Direction,
border: &SideOffsets2D<f32>,
radius: &BorderRadii<AzFloat>) {
// T = top, B = bottom, L = left, R = right
let box_TL = bounds.origin;
let box_TR = box_TL + Point2D(bounds.size.width, 0.0);
let box_BL = box_TL + Point2D(0.0, bounds.size.height);
let box_BR = box_TL + Point2D(bounds.size.width, bounds.size.height);
let rad_R: AzFloat = 0.;
let rad_BR = rad_R + f32::consts::FRAC_PI_4;
let rad_B = rad_BR + f32::consts::FRAC_PI_4;
let rad_BL = rad_B + f32::consts::FRAC_PI_4;
let rad_L = rad_BL + f32::consts::FRAC_PI_4;
let rad_TL = rad_L + f32::consts::FRAC_PI_4;
let rad_T = rad_TL + f32::consts::FRAC_PI_4;
let rad_TR = rad_T + f32::consts::FRAC_PI_4;
fn dx(x: AzFloat) -> Point2D<AzFloat> {
Point2D(x, 0.)
}
fn dy(y: AzFloat) -> Point2D<AzFloat> {
Point2D(0., y)
}
fn dx_if(cond: bool, dx: AzFloat) -> Point2D<AzFloat> {
Point2D(if cond { dx } else { 0. }, 0.)
}
fn dy_if(cond: bool, dy: AzFloat) -> Point2D<AzFloat> {
Point2D(0., if cond { dy } else { 0. })
}
match direction {
Direction::Top => {
let edge_TL = box_TL + dx(radius.top_left.max(border.left));
let edge_TR = box_TR + dx(-radius.top_right.max(border.right));
let edge_BR = edge_TR + dy(border.top);
let edge_BL = edge_TL + dy(border.top);
let corner_TL = edge_TL + dx_if(radius.top_left == 0., -border.left);
let corner_TR = edge_TR + dx_if(radius.top_right == 0., border.right);
path_builder.move_to(corner_TL);
path_builder.line_to(corner_TR);
if radius.top_right != 0. {
// the origin is the center of the arcs we're about to draw.
let origin = edge_TR + Point2D((border.right - radius.top_right).max(0.),
radius.top_right);
// the elbow is the inside of the border's curve.
let distance_to_elbow = (radius.top_right - border.top).max(0.);
path_builder.arc(origin, radius.top_right, rad_T, rad_TR, false);
path_builder.arc(origin, distance_to_elbow, rad_TR, rad_T, true);
}
path_builder.line_to(edge_BR);
path_builder.line_to(edge_BL);
if radius.top_left != 0. {
let origin = edge_TL + Point2D(-(border.left - radius.top_left).max(0.),
radius.top_left);
let distance_to_elbow = (radius.top_left - border.top).max(0.);
path_builder.arc(origin, distance_to_elbow, rad_T, rad_TL, true);
path_builder.arc(origin, radius.top_left, rad_TL, rad_T, false);
}
}
Direction::Left => {
let edge_TL = box_TL + dy(radius.top_left.max(border.top));
let edge_BL = box_BL + dy(-radius.bottom_left.max(border.bottom));
let edge_TR = edge_TL + dx(border.left);
let edge_BR = edge_BL + dx(border.left);
let corner_TL = edge_TL + dy_if(radius.top_left == 0., -border.top);
let corner_BL = edge_BL + dy_if(radius.bottom_left == 0., border.bottom);
path_builder.move_to(corner_BL);
path_builder.line_to(corner_TL);
if radius.top_left != 0. {
let origin = edge_TL + Point2D(radius.top_left,
-(border.top - radius.top_left).max(0.));
let distance_to_elbow = (radius.top_left - border.left).max(0.);
path_builder.arc(origin, radius.top_left, rad_L, rad_TL, false);
path_builder.arc(origin, distance_to_elbow, rad_TL, rad_L, true);
}
path_builder.line_to(edge_TR);
path_builder.line_to(edge_BR);
if radius.bottom_left != 0. {
let origin = edge_BL +
Point2D(radius.bottom_left,
(border.bottom - radius.bottom_left).max(0.));
let distance_to_elbow = (radius.bottom_left - border.left).max(0.);
path_builder.arc(origin, distance_to_elbow, rad_L, rad_BL, true);
path_builder.arc(origin, radius.bottom_left, rad_BL, rad_L, false);
}
}
Direction::Right => {
let edge_TR = box_TR + dy(radius.top_right.max(border.top));
let edge_BR = box_BR + dy(-radius.bottom_right.max(border.bottom));
let edge_TL = edge_TR + dx(-border.right);
let edge_BL = edge_BR + dx(-border.right);
let corner_TR = edge_TR + dy_if(radius.top_right == 0., -border.top);
let corner_BR = edge_BR + dy_if(radius.bottom_right == 0., border.bottom);
path_builder.move_to(edge_BL);
path_builder.line_to(edge_TL);
if radius.top_right != 0. {
let origin = edge_TR + Point2D(-radius.top_right,
-(border.top - radius.top_right).max(0.));
let distance_to_elbow = (radius.top_right - border.right).max(0.);
path_builder.arc(origin, distance_to_elbow, rad_R, rad_TR, true);
path_builder.arc(origin, radius.top_right, rad_TR, rad_R, false);
}
path_builder.line_to(corner_TR);
path_builder.line_to(corner_BR);
if radius.bottom_right != 0. {
let origin = edge_BR +
Point2D(-radius.bottom_right,
(border.bottom - radius.bottom_right).max(0.));
let distance_to_elbow = (radius.bottom_right - border.right).max(0.);
path_builder.arc(origin, radius.bottom_right, rad_R, rad_BR, false);
path_builder.arc(origin, distance_to_elbow, rad_BR, rad_R, true);
}
}
Direction::Bottom => {
let edge_BL = box_BL + dx(radius.bottom_left.max(border.left));
let edge_BR = box_BR + dx(-radius.bottom_right.max(border.right));
let edge_TL = edge_BL + dy(-border.bottom);
let edge_TR = edge_BR + dy(-border.bottom);
let corner_BR = edge_BR + dx_if(radius.bottom_right == 0., border.right);
let corner_BL = edge_BL + dx_if(radius.bottom_left == 0., -border.left);
path_builder.move_to(edge_TL);
path_builder.line_to(edge_TR);
if radius.bottom_right != 0. {
let origin = edge_BR + Point2D((border.right - radius.bottom_right).max(0.),
-radius.bottom_right);
let distance_to_elbow = (radius.bottom_right - border.bottom).max(0.);
path_builder.arc(origin, distance_to_elbow, rad_B, rad_BR, true);
path_builder.arc(origin, radius.bottom_right, rad_BR, rad_B, false);
}
path_builder.line_to(corner_BR);
path_builder.line_to(corner_BL);
if radius.bottom_left != 0. {
let origin = edge_BL - Point2D((border.left - radius.bottom_left).max(0.),
radius.bottom_left);
let distance_to_elbow = (radius.bottom_left - border.bottom).max(0.);
path_builder.arc(origin, radius.bottom_left, rad_B, rad_BL, false);
path_builder.arc(origin, distance_to_elbow, rad_BL, rad_B, true);
}
}
}
}
/// Creates a path representing the given rounded rectangle.
///
/// TODO(pcwalton): Should we unify with the code above? It doesn't seem immediately obvious
/// how to do that (especially without regressing performance) unless we have some way to
/// efficiently intersect or union paths, since different border styles/colors can force us to
/// slice through the rounded corners. My first attempt to unify with the above code resulted
/// in making a mess of it, and the simplicity of this code path is appealing, so it may not
/// be worth it… In any case, revisit this decision when we support elliptical radii.
fn create_rounded_rect_path(&self,
path_builder: &mut PathBuilder,
bounds: &Rect<f32>,
radii: &BorderRadii<AzFloat>) {
// +----------+
// / 1 2 \
// + 8 3 +
// | |
// + 7 4 +
// \ 6 5 /
// +----------+
path_builder.move_to(Point2D(bounds.origin.x + radii.top_left, bounds.origin.y)); // 1
path_builder.line_to(Point2D(bounds.max_x() - radii.top_right, bounds.origin.y)); // 2
path_builder.arc(Point2D(bounds.max_x() - radii.top_right,
bounds.origin.y + radii.top_right),
radii.top_right,
1.5f32 * f32::consts::FRAC_PI_2,
f32::consts::PI_2,
false); // 3
path_builder.line_to(Point2D(bounds.max_x(), bounds.max_y() - radii.bottom_right)); // 4
path_builder.arc(Point2D(bounds.max_x() - radii.bottom_right,
bounds.max_y() - radii.bottom_right),
radii.bottom_right,
0.0,
f32::consts::FRAC_PI_2,
false); // 5
path_builder.line_to(Point2D(bounds.origin.x + radii.bottom_left, bounds.max_y())); // 6
path_builder.arc(Point2D(bounds.origin.x + radii.bottom_left,
bounds.max_y() - radii.bottom_left),
radii.bottom_left,
f32::consts::FRAC_PI_2,
f32::consts::PI,
false); // 7
path_builder.line_to(Point2D(bounds.origin.x, bounds.origin.y + radii.top_left)); // 8
path_builder.arc(Point2D(bounds.origin.x + radii.top_left,
bounds.origin.y + radii.top_left),
radii.top_left,
f32::consts::PI,
1.5f32 * f32::consts::FRAC_PI_2,
false); // 1
}
fn draw_dashed_border_segment(&self,
direction: Direction,
bounds: &Rect<Au>,
border: &SideOffsets2D<f32>,
color: Color,
dash_size: DashSize) {
let rect = bounds.to_azure_rect();
let draw_opts = DrawOptions::new(1u as AzFloat, 0 as uint16_t);
let mut stroke_opts = StrokeOptions::new(0u as AzFloat, 10u as AzFloat);
let mut dash: [AzFloat, ..2] = [0u as AzFloat, 0u as AzFloat];
stroke_opts.set_cap_style(AZ_CAP_BUTT as u8);
let border_width = match direction {
Direction::Top => border.top,
Direction::Left => border.left,
Direction::Right => border.right,
Direction::Bottom => border.bottom
};
stroke_opts.line_width = border_width;
dash[0] = border_width * (dash_size as int) as AzFloat;
dash[1] = border_width * (dash_size as int) as AzFloat;
stroke_opts.mDashPattern = dash.as_mut_ptr();
stroke_opts.mDashLength = dash.len() as size_t;
let (start, end) = match direction {
Direction::Top => {
let y = rect.origin.y + border.top * 0.5;
let start = Point2D(rect.origin.x, y);
let end = Point2D(rect.origin.x + rect.size.width, y);
(start, end)
}
Direction::Left => {
let x = rect.origin.x + border.left * 0.5;
let start = Point2D(x, rect.origin.y + rect.size.height);
let end = Point2D(x, rect.origin.y + border.top);
(start, end)
}
Direction::Right => {
let x = rect.origin.x + rect.size.width - border.right * 0.5;
let start = Point2D(x, rect.origin.y);
let end = Point2D(x, rect.origin.y + rect.size.height);
(start, end)
}
Direction::Bottom => {
let y = rect.origin.y + rect.size.height - border.bottom * 0.5;
let start = Point2D(rect.origin.x + rect.size.width, y);
let end = Point2D(rect.origin.x + border.left, y);
(start, end)
}
};
self.draw_target.stroke_line(start,
end,
&ColorPattern::new(color),
&stroke_opts,
&draw_opts);
}
fn draw_solid_border_segment(&self,
direction: Direction,
bounds: &Rect<Au>,
border: &SideOffsets2D<f32>,
radius: &BorderRadii<AzFloat>,
color: Color) {
let rect = bounds.to_azure_rect();
self.draw_border_path(&rect, direction, border, radius, color);
}
fn get_scaled_bounds(&self,
bounds: &Rect<Au>,
border: &SideOffsets2D<f32>,
shrink_factor: f32) -> Rect<f32> {
let rect = bounds.to_azure_rect();
let scaled_border = SideOffsets2D::new(shrink_factor * border.top,
shrink_factor * border.right,
shrink_factor * border.bottom,
shrink_factor * border.left);
let left_top = Point2D(rect.origin.x, rect.origin.y);
let scaled_left_top = left_top + Point2D(scaled_border.left,
scaled_border.top);
return Rect(scaled_left_top,
Size2D(rect.size.width - 2.0 * scaled_border.right, rect.size.height - 2.0 * scaled_border.bottom));
}
fn scale_color(&self, color: Color, scale_factor: f32) -> Color {
return Color::new(color.r * scale_factor, color.g * scale_factor, color.b * scale_factor, color.a);
}
fn draw_double_border_segment(&self,
direction: Direction,
bounds: &Rect<Au>,
border: &SideOffsets2D<f32>,
radius: &BorderRadii<AzFloat>,
color: Color) {
let scaled_border = SideOffsets2D::new((1.0/3.0) * border.top,
(1.0/3.0) * border.right,
(1.0/3.0) * border.bottom,
(1.0/3.0) * border.left);
let inner_scaled_bounds = self.get_scaled_bounds(bounds, border, 2.0/3.0);
// draw the outer portion of the double border.
self.draw_solid_border_segment(direction, bounds, &scaled_border, radius, color);
// draw the inner portion of the double border.
self.draw_border_path(&inner_scaled_bounds, direction, &scaled_border, radius, color);
}
fn draw_groove_ridge_border_segment(&self,
direction: Direction,
bounds: &Rect<Au>,
border: &SideOffsets2D<f32>,
radius: &BorderRadii<AzFloat>,
color: Color,
style: border_style::T) {
// original bounds as a Rect<f32>, with no scaling.
let original_bounds = self.get_scaled_bounds(bounds, border, 0.0);
// shrink the bounds by 1/2 of the border, leaving the innermost 1/2 of the border
let inner_scaled_bounds = self.get_scaled_bounds(bounds, border, 0.5);
let scaled_border = SideOffsets2D::new(0.5 * border.top,
0.5 * border.right,
0.5 * border.bottom,
0.5 * border.left);
let is_groove = match style {
border_style::T::groove => true,
border_style::T::ridge => false,
_ => panic!("invalid border style")
};
let mut lighter_color;
let mut darker_color;
if color.r != 0.0 || color.g != 0.0 || color.b != 0.0 {
darker_color = self.scale_color(color, if is_groove { 1.0/3.0 } else { 2.0/3.0 });
lighter_color = color;
} else {
// You can't scale black color (i.e. 'scaled = 0 * scale', equals black).
darker_color = Color::new(0.3, 0.3, 0.3, color.a);
lighter_color = Color::new(0.7, 0.7, 0.7, color.a);
}
let (outer_color, inner_color) = match (direction, is_groove) {
(Direction::Top, true) | (Direction::Left, true) |
(Direction::Right, false) | (Direction::Bottom, false) => {
(darker_color, lighter_color)
}
(Direction::Top, false) | (Direction::Left, false) |
(Direction::Right, true) | (Direction::Bottom, true) => (lighter_color, darker_color),
};
// outer portion of the border
self.draw_border_path(&original_bounds, direction, &scaled_border, radius, outer_color);
// inner portion of the border
self.draw_border_path(&inner_scaled_bounds,
direction,
&scaled_border,
radius,
inner_color);
}
fn draw_inset_outset_border_segment(&self,
direction: Direction,
bounds: &Rect<Au>,
border: &SideOffsets2D<f32>,
radius: &BorderRadii<AzFloat>,
color: Color,
style: border_style::T) {
let is_inset = match style {
border_style::T::inset => true,
border_style::T::outset => false,
_ => panic!("invalid border style")
};
// original bounds as a Rect<f32>
let original_bounds = self.get_scaled_bounds(bounds, border, 0.0);
// You can't scale black color (i.e. 'scaled = 0 * scale', equals black).
let mut scaled_color;
if color.r != 0.0 || color.g != 0.0 || color.b != 0.0 {
scaled_color = match direction {
Direction::Top | Direction::Left => {
self.scale_color(color, if is_inset { 2.0/3.0 } else { 1.0 })
}
Direction::Right | Direction::Bottom => {
self.scale_color(color, if is_inset { 1.0 } else { 2.0/3.0 })
}
};
} else {
scaled_color = match direction {
Direction::Top | Direction::Left => {
if is_inset {
Color::new(0.3, 0.3, 0.3, color.a)
} else {
Color::new(0.7, 0.7, 0.7, color.a)
}
}
Direction::Right | Direction::Bottom => {
if is_inset {
Color::new(0.7, 0.7, 0.7, color.a)
} else {
Color::new(0.3, 0.3, 0.3, color.a)
}
}
};
}
self.draw_border_path(&original_bounds, direction, border, radius, scaled_color);
}
pub fn draw_text(&mut self, text: &TextDisplayItem) {
let current_transform = self.draw_target.get_transform();
// Optimization: Don’t set a transform matrix for upright text, and pass a start point to
// `draw_text_into_context`.
//
// For sideways text, it’s easier to do the rotation such that its center (the baseline’s
// start point) is at (0, 0) coordinates.
let baseline_origin = match text.orientation {
Upright => text.baseline_origin,
SidewaysLeft => {
let x = text.baseline_origin.x.to_subpx() as AzFloat;
let y = text.baseline_origin.y.to_subpx() as AzFloat;
self.draw_target.set_transform(¤t_transform.mul(&Matrix2D::new(0., -1.,
1., 0.,
x, y)));
Point2D::zero()
}
SidewaysRight => {
let x = text.baseline_origin.x.to_subpx() as AzFloat;
let y = text.baseline_origin.y.to_subpx() as AzFloat;
self.draw_target.set_transform(¤t_transform.mul(&Matrix2D::new(0., 1.,
-1., 0.,
x, y)));
Point2D::zero()
}
};
self.font_ctx
.get_paint_font_from_template(&text.text_run.font_template,
text.text_run.actual_pt_size)
.borrow()
.draw_text_into_context(self,
&*text.text_run,
&text.range,
baseline_origin,
text.text_color,
opts::get().enable_text_antialiasing);
// Undo the transform, only when we did one.
if text.orientation != Upright {
self.draw_target.set_transform(¤t_transform)
}
}
/// Draws a linear gradient in the given boundaries from the given start point to the given end
/// point with the given stops.
pub fn draw_linear_gradient(&self,
bounds: &Rect<Au>,
start_point: &Point2D<Au>,
end_point: &Point2D<Au>,
stops: &[GradientStop]) {
self.draw_target.make_current();
let stops = self.draw_target.create_gradient_stops(stops, ExtendMode::Clamp);
let pattern = LinearGradientPattern::new(&start_point.to_azure_point(),
&end_point.to_azure_point(),
stops,
&Matrix2D::identity());
self.draw_target.fill_rect(&bounds.to_azure_rect(),
PatternRef::LinearGradient(&pattern),
None);
}
pub fn get_or_create_temporary_draw_target(&mut self,
filters: &filter::T,
blend_mode: mix_blend_mode::T)
-> DrawTarget {
// Determine if we need a temporary draw target.
if !filters::temporary_draw_target_needed_for_style_filters(filters) &&
blend_mode == mix_blend_mode::T::normal {
// Reuse the draw target, but remove the transient clip. If we don't do the latter,
// we'll be in a state whereby the paint subcontext thinks it has no transient clip
// (see `StackingContext::optimize_and_draw_into_context`) but it actually does,
// resulting in a situation whereby display items are seemingly randomly clipped out.
self.remove_transient_clip_if_applicable();
return self.draw_target.clone()
}
// FIXME(pcwalton): This surface might be bigger than necessary and waste memory.
let size = self.draw_target.get_size();
let size = Size2D(size.width, size.height);
let temporary_draw_target =
self.draw_target.create_similar_draw_target(&size, self.draw_target.get_format());
temporary_draw_target.set_transform(&self.draw_target.get_transform());
temporary_draw_target
}
/// If we created a temporary draw target, then draw it to the main draw target. This is called
/// after doing all the painting, and the temporary draw target must not be used afterward.
pub fn draw_temporary_draw_target_if_necessary(&mut self,
temporary_draw_target: &DrawTarget,
filters: &filter::T,
blend_mode: mix_blend_mode::T) {
if (*temporary_draw_target) == self.draw_target {
// We're directly painting to the surface; nothing to do.
return
}
// Set up transforms.
let old_transform = self.draw_target.get_transform();
self.draw_target.set_transform(&Matrix2D::identity());
temporary_draw_target.set_transform(&Matrix2D::identity());
// Create the Azure filter pipeline.
let (filter_node, opacity) = filters::create_filters(&self.draw_target,
temporary_draw_target,
filters);
// Perform the blit operation.
let rect = Rect(Point2D(0.0, 0.0), self.draw_target.get_size().to_azure_size());
let mut draw_options = DrawOptions::new(opacity, 0);
draw_options.set_composition_op(blend_mode.to_azure_composition_op());
self.draw_target.draw_filter(&filter_node, &rect, &rect.origin, draw_options);
self.draw_target.set_transform(&old_transform);
}
/// Draws a box shadow with the given boundaries, color, offset, blur radius, and spread
/// radius. `box_bounds` represents the boundaries of the box.
pub fn draw_box_shadow(&mut self,
box_bounds: &Rect<Au>,
offset: &Point2D<Au>,
color: Color,
blur_radius: Au,
spread_radius: Au,
inset: bool) {
// Remove both the transient clip and the stacking context clip, because we may need to
// draw outside the stacking context's clip.
self.remove_transient_clip_if_applicable();
self.pop_clip_if_applicable();
// If we have blur, create a new draw target that's the same size as this tile, but with
// enough space around the edges to hold the entire blur. (If we don't do the latter, then
// there will be seams between tiles.)
//
// FIXME(pcwalton): This draw target might be larger than necessary and waste memory.
let side_inflation = (blur_radius * BOX_SHADOW_INFLATION_FACTOR).to_subpx().ceil() as i32;
let draw_target_transform = self.draw_target.get_transform();
let temporary_draw_target;
if blur_radius > Au(0) {
let draw_target_size = self.draw_target.get_size();
let draw_target_size = Size2D(draw_target_size.width, draw_target_size.height);
let inflated_draw_target_size = Size2D(draw_target_size.width + side_inflation * 2,
draw_target_size.height + side_inflation * 2);
temporary_draw_target =
self.draw_target.create_similar_draw_target(&inflated_draw_target_size,
self.draw_target.get_format());
temporary_draw_target.set_transform(
&Matrix2D::identity().translate(side_inflation as AzFloat,
side_inflation as AzFloat)
.mul(&draw_target_transform));
} else {
temporary_draw_target = self.draw_target.clone();
}
let shadow_bounds = box_bounds.translate(offset).inflate(spread_radius, spread_radius);
let path;
if inset {
path = temporary_draw_target.create_rectangular_border_path(&MAX_RECT, &shadow_bounds);
self.draw_target.push_clip(&self.draw_target.create_rectangular_path(box_bounds))
} else {
path = temporary_draw_target.create_rectangular_path(&shadow_bounds);
self.draw_target.push_clip(&self.draw_target
.create_rectangular_border_path(&MAX_RECT, box_bounds))
}
temporary_draw_target.fill(&path, &ColorPattern::new(color), &DrawOptions::new(1.0, 0));
// Blur, if we need to.
if blur_radius > Au(0) {
// Go ahead and create the blur now. Despite the name, Azure's notion of `StdDeviation`
// describes the blur radius, not the sigma for the Gaussian blur.
let blur_filter = self.draw_target.create_filter(FilterType::GaussianBlur);
blur_filter.set_attribute(GaussianBlurAttribute::StdDeviation(blur_radius.to_subpx() as
AzFloat));
blur_filter.set_input(GaussianBlurInput, &temporary_draw_target.snapshot());
// Blit the blur onto the tile. We undo the transforms here because we want to directly
// stack the temporary draw target onto the tile.
temporary_draw_target.set_transform(&Matrix2D::identity());
self.draw_target.set_transform(&Matrix2D::identity());
let temporary_draw_target_size = temporary_draw_target.get_size();
self.draw_target
.draw_filter(&blur_filter,
&Rect(Point2D(0.0, 0.0),
Size2D(temporary_draw_target_size.width as AzFloat,
temporary_draw_target_size.height as AzFloat)),
&Point2D(-side_inflation as AzFloat, -side_inflation as AzFloat),
DrawOptions::new(1.0, 0));
self.draw_target.set_transform(&draw_target_transform);
}
// Undo the draw target's clip.
self.draw_target.pop_clip();
// Push back the stacking context clip.
self.push_clip_if_applicable();
}
pub fn push_clip_if_applicable(&self) {
if let Some(ref clip_rect) = self.clip_rect {
self.draw_push_clip(clip_rect)
}
}
pub fn pop_clip_if_applicable(&self) {
if self.clip_rect.is_some() {
self.draw_pop_clip()
}
}
pub fn remove_transient_clip_if_applicable(&mut self) {
if let Some(old_transient_clip) = mem::replace(&mut self.transient_clip, None) {
for _ in old_transient_clip.complex.iter() {
self.draw_pop_clip()
}
self.draw_pop_clip()
}
}
/// Sets a new transient clipping region. Automatically calls
/// `remove_transient_clip_if_applicable()` first.
pub fn push_transient_clip(&mut self, clip_region: ClippingRegion) {
self.remove_transient_clip_if_applicable();
self.draw_push_clip(&clip_region.main);
for complex_region in clip_region.complex.iter() {
// FIXME(pcwalton): Actually draw a rounded rect.
self.push_rounded_rect_clip(&complex_region.rect.to_azure_rect(),
&complex_region.radii.to_radii_px())
}
self.transient_clip = Some(clip_region)
}
}
pub trait ToAzurePoint {
fn to_azure_point(&self) -> Point2D<AzFloat>;
}
impl ToAzurePoint for Point2D<Au> {
fn to_azure_point(&self) -> Point2D<AzFloat> {
Point2D(self.x.to_nearest_px() as AzFloat, self.y.to_nearest_px() as AzFloat)
}
}
pub trait ToAzureRect {
fn to_azure_rect(&self) -> Rect<AzFloat>;
}
impl ToAzureRect for Rect<Au> {
fn to_azure_rect(&self) -> Rect<AzFloat> {
Rect(self.origin.to_azure_point(),
Size2D(self.size.width.to_nearest_px() as AzFloat,
self.size.height.to_nearest_px() as AzFloat))
}
}
pub trait ToAzureSize {
fn to_azure_size(&self) -> Size2D<AzFloat>;
}
impl ToAzureSize for AzIntSize {
fn to_azure_size(&self) -> Size2D<AzFloat> {
Size2D(self.width as AzFloat, self.height as AzFloat)
}
}
trait ToAzureIntSize {
fn to_azure_int_size(&self) -> Size2D<i32>;
}
impl ToAzureIntSize for Size2D<Au> {
fn to_azure_int_size(&self) -> Size2D<i32> {
Size2D(self.width.to_nearest_px() as i32, self.height.to_nearest_px() as i32)
}
}
impl ToAzureIntSize for Size2D<AzFloat> {
fn to_azure_int_size(&self) -> Size2D<i32> {
Size2D(self.width as i32, self.height as i32)
}
}
impl ToAzureIntSize for Size2D<i32> {
fn to_azure_int_size(&self) -> Size2D<i32> {
Size2D(self.width, self.height)
}
}
trait ToSideOffsetsPx {
fn to_float_px(&self) -> SideOffsets2D<AzFloat>;
}
impl ToSideOffsetsPx for SideOffsets2D<Au> {
fn to_float_px(&self) -> SideOffsets2D<AzFloat> {
SideOffsets2D::new(self.top.to_nearest_px() as AzFloat,
self.right.to_nearest_px() as AzFloat,
self.bottom.to_nearest_px() as AzFloat,
self.left.to_nearest_px() as AzFloat)
}
}
trait ToRadiiPx {
fn to_radii_px(&self) -> BorderRadii<AzFloat>;
}
impl ToRadiiPx for BorderRadii<Au> {
fn to_radii_px(&self) -> BorderRadii<AzFloat> {
fn to_nearest_px(x: Au) -> AzFloat {
x.to_nearest_px() as AzFloat
}
BorderRadii {
top_left: to_nearest_px(self.top_left),
top_right: to_nearest_px(self.top_right),
bottom_left: to_nearest_px(self.bottom_left),
bottom_right: to_nearest_px(self.bottom_right),
}
}
}
trait ScaledFontExtensionMethods {
fn draw_text_into_context(&self,
rctx: &PaintContext,
run: &Box<TextRun>,
range: &Range<CharIndex>,
baseline_origin: Point2D<Au>,
color: Color,
antialias: bool);
}
impl ScaledFontExtensionMethods for ScaledFont {
fn draw_text_into_context(&self,
rctx: &PaintContext,
run: &Box<TextRun>,
range: &Range<CharIndex>,
baseline_origin: Point2D<Au>,
color: Color,
antialias: bool) {
let target = rctx.get_draw_target();
let pattern = ColorPattern::new(color);
let azure_pattern = pattern.azure_color_pattern;
assert!(azure_pattern.is_not_null());
let fields = if antialias {
0x0200
} else {
0
};
let mut options = struct__AzDrawOptions {
mAlpha: 1f64 as AzFloat,
fields: fields,
};
let mut origin = baseline_origin.clone();
let mut azglyphs = vec!();
azglyphs.reserve(range.length().to_uint());
for slice in run.natural_word_slices_in_range(range) {
for (_i, glyph) in slice.glyphs.iter_glyphs_for_char_range(&slice.range) {
let glyph_advance = glyph.advance();
let glyph_offset = glyph.offset().unwrap_or(Point2D::zero());
let azglyph = struct__AzGlyph {
mIndex: glyph.id() as uint32_t,
mPosition: struct__AzPoint {
x: (origin.x + glyph_offset.x).to_subpx() as AzFloat,
y: (origin.y + glyph_offset.y).to_subpx() as AzFloat
}
};
origin = Point2D(origin.x + glyph_advance, origin.y);
azglyphs.push(azglyph)
};
}
let azglyph_buf_len = azglyphs.len();
if azglyph_buf_len == 0 { return; } // Otherwise the Quartz backend will assert.
let mut glyphbuf = struct__AzGlyphBuffer {
mGlyphs: azglyphs.as_mut_ptr(),
mNumGlyphs: azglyph_buf_len as uint32_t
};
unsafe {
// TODO(Issue #64): this call needs to move into azure_hl.rs
AzDrawTargetFillGlyphs(target.azure_draw_target,
self.get_ref(),
&mut glyphbuf,
azure_pattern,
&mut options,
ptr::null_mut());
}
}
}
trait DrawTargetExtensions {
/// Creates and returns a path that represents a rectangular border. Like this:
///
/// ```text
/// +--------------------------------+
/// |################################|
/// |#######+---------------------+##|
/// |#######| |##|
/// |#######+---------------------+##|
/// |################################|
/// +--------------------------------+
/// ```
fn create_rectangular_border_path<T>(&self, outer_rect: &T, inner_rect: &T)
-> Path
where T: ToAzureRect;
/// Creates and returns a path that represents a rectangle.
fn create_rectangular_path(&self, rect: &Rect<Au>) -> Path;
}
impl DrawTargetExtensions for DrawTarget {
fn create_rectangular_border_path<T>(&self, outer_rect: &T, inner_rect: &T)
-> Path
where T: ToAzureRect {
// +-----------+
// |2 |1
// | |
// | +---+---+
// | |9 |6 |5, 10
// | | | |
// | +---+ |
// | 8 7 |
// | |
// +-----------+
// 3 4
let (outer_rect, inner_rect) = (outer_rect.to_azure_rect(), inner_rect.to_azure_rect());
let path_builder = self.create_path_builder();
path_builder.move_to(Point2D(outer_rect.max_x(), outer_rect.origin.y)); // 1
path_builder.line_to(Point2D(outer_rect.origin.x, outer_rect.origin.y)); // 2
path_builder.line_to(Point2D(outer_rect.origin.x, outer_rect.max_y())); // 3
path_builder.line_to(Point2D(outer_rect.max_x(), outer_rect.max_y())); // 4
path_builder.line_to(Point2D(outer_rect.max_x(), inner_rect.origin.y)); // 5
path_builder.line_to(Point2D(inner_rect.max_x(), inner_rect.origin.y)); // 6
path_builder.line_to(Point2D(inner_rect.max_x(), inner_rect.max_y())); // 7
path_builder.line_to(Point2D(inner_rect.origin.x, inner_rect.max_y())); // 8
path_builder.line_to(inner_rect.origin); // 9
path_builder.line_to(Point2D(outer_rect.max_x(), inner_rect.origin.y)); // 10
path_builder.finish()
}
fn create_rectangular_path(&self, rect: &Rect<Au>) -> Path {
let path_builder = self.create_path_builder();
path_builder.move_to(rect.origin.to_azure_point());
path_builder.line_to(Point2D(rect.max_x(), rect.origin.y).to_azure_point());
path_builder.line_to(Point2D(rect.max_x(), rect.max_y()).to_azure_point());
path_builder.line_to(Point2D(rect.origin.x, rect.max_y()).to_azure_point());
path_builder.finish()
}
}
/// Converts a CSS blend mode (per CSS-COMPOSITING) to an Azure `CompositionOp`.
trait ToAzureCompositionOp {
/// Converts a CSS blend mode (per CSS-COMPOSITING) to an Azure `CompositionOp`.
fn to_azure_composition_op(&self) -> CompositionOp;
}
impl ToAzureCompositionOp for mix_blend_mode::T {
fn to_azure_composition_op(&self) -> CompositionOp {
match *self {
mix_blend_mode::T::normal => CompositionOp::Over,
mix_blend_mode::T::multiply => CompositionOp::Multiply,
mix_blend_mode::T::screen => CompositionOp::Screen,
mix_blend_mode::T::overlay => CompositionOp::Overlay,
mix_blend_mode::T::darken => CompositionOp::Darken,
mix_blend_mode::T::lighten => CompositionOp::Lighten,
mix_blend_mode::T::color_dodge => CompositionOp::ColorDodge,
mix_blend_mode::T::color_burn => CompositionOp::ColorBurn,
mix_blend_mode::T::hard_light => CompositionOp::HardLight,
mix_blend_mode::T::soft_light => CompositionOp::SoftLight,
mix_blend_mode::T::difference => CompositionOp::Difference,
mix_blend_mode::T::exclusion => CompositionOp::Exclusion,
mix_blend_mode::T::hue => CompositionOp::Hue,
mix_blend_mode::T::saturation => CompositionOp::Saturation,
mix_blend_mode::T::color => CompositionOp::Color,
mix_blend_mode::T::luminosity => CompositionOp::Luminosity,
}
}
}<|fim▁end|> | |
<|file_name|>ScoreParameter.java<|end_file_name|><|fim▁begin|>package hugs.support;
import hugs.*;
public class ScoreParameter extends Parameter {
public Score value;
<|fim▁hole|> }
public Object getValue () {return value;}
public Parameter copy () {
return new ScoreParameter(name, (value == null ? null : value.copy()));
}
}<|fim▁end|> | public ScoreParameter (String name) { this(name,null);}
public ScoreParameter (String name, Score value) {
super(name);
this.value = value;
|
<|file_name|>Value.java<|end_file_name|><|fim▁begin|>package units.interfaces;
public abstract interface Value<T> extends MyComparable<T> {
// OPERATIONS
default boolean invariant() {
return true;
}
default void checkInvariant() {
if(!invariant()) {
System.exit(-1);
}
}
public abstract T newInstance(double value);
public abstract T abs();
public abstract T min(T value);
public abstract T max(T value);
public abstract T add(T value);
public abstract T sub(T value);
<|fim▁hole|> public abstract T div(double value);
public abstract double div(T value);
}<|fim▁end|> |
public abstract T mul(double value);
|
<|file_name|>makers_test.rs<|end_file_name|><|fim▁begin|><|fim▁hole|> let name = get_name();
assert_eq!(name, "makers");
}<|fim▁end|> | use super::*;
#[test]
fn get_name_test() { |
<|file_name|>base.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Base classes for writing management commands (named commands which can
be executed through ``django-admin`` or ``manage.py``).
"""
from __future__ import unicode_literals
import os
import sys
import warnings
from argparse import ArgumentParser
from optparse import OptionParser
import django
from django.core import checks
from django.core.management.color import color_style, no_style
from django.db import connections
from django.utils.deprecation import RemovedInDjango110Warning
from django.utils.encoding import force_str
class CommandError(Exception):
"""
Exception class indicating a problem while executing a management
command.
If this exception is raised during the execution of a management
command, it will be caught and turned into a nicely-printed error
message to the appropriate output stream (i.e., stderr); as a
result, raising this exception (with a sensible description of the
error) is the preferred way to indicate that something has gone
wrong in the execution of a command.
"""
pass
class SystemCheckError(CommandError):
"""
The system check framework detected unrecoverable errors.
"""
pass
class CommandParser(ArgumentParser):
"""
Customized ArgumentParser class to improve some error messages and prevent
SystemExit in several occasions, as SystemExit is unacceptable when a
command is called programmatically.
"""
def __init__(self, cmd, **kwargs):
self.cmd = cmd
super(CommandParser, self).__init__(**kwargs)
def parse_args(self, args=None, namespace=None):
# Catch missing argument for a better error message
if (hasattr(self.cmd, 'missing_args_message') and
not (args or any(not arg.startswith('-') for arg in args))):
self.error(self.cmd.missing_args_message)
return super(CommandParser, self).parse_args(args, namespace)
def error(self, message):
if self.cmd._called_from_command_line:
super(CommandParser, self).error(message)
else:
raise CommandError("Error: %s" % message)
def handle_default_options(options):
"""
Include any default options that all commands should accept here
so that ManagementUtility can handle them before searching for
user commands.
"""
if options.settings:
os.environ['DJANGO_SETTINGS_MODULE'] = options.settings
if options.pythonpath:
sys.path.insert(0, options.pythonpath)
class OutputWrapper(object):
"""
Wrapper around stdout/stderr
"""
@property
def style_func(self):
return self._style_func
@style_func.setter
def style_func(self, style_func):
if style_func and self.isatty():
self._style_func = style_func
else:
self._style_func = lambda x: x
def __init__(self, out, style_func=None, ending='\n'):
self._out = out
self.style_func = None
self.ending = ending
def __getattr__(self, name):
return getattr(self._out, name)
def isatty(self):
return hasattr(self._out, 'isatty') and self._out.isatty()
def write(self, msg, style_func=None, ending=None):
ending = self.ending if ending is None else ending
if ending and not msg.endswith(ending):
msg += ending
style_func = style_func or self.style_func
self._out.write(force_str(style_func(msg)))
class BaseCommand(object):
"""
The base class from which all management commands ultimately
derive.
Use this class if you want access to all of the mechanisms which
parse the command-line arguments and work out what code to call in
response; if you don't need to change any of that behavior,
consider using one of the subclasses defined in this file.
If you are interested in overriding/customizing various aspects of
the command-parsing and -execution behavior, the normal flow works
as follows:
1. ``django-admin`` or ``manage.py`` loads the command class
and calls its ``run_from_argv()`` method.
2. The ``run_from_argv()`` method calls ``create_parser()`` to get
an ``ArgumentParser`` for the arguments, parses them, performs
any environment changes requested by options like
``pythonpath``, and then calls the ``execute()`` method,
passing the parsed arguments.
3. The ``execute()`` method attempts to carry out the command by
calling the ``handle()`` method with the parsed arguments; any
output produced by ``handle()`` will be printed to standard
output and, if the command is intended to produce a block of
SQL statements, will be wrapped in ``BEGIN`` and ``COMMIT``.
4. If ``handle()`` or ``execute()`` raised any exception (e.g.
``CommandError``), ``run_from_argv()`` will instead print an error
message to ``stderr``.
Thus, the ``handle()`` method is typically the starting point for
subclasses; many built-in commands and command types either place
all of their logic in ``handle()``, or perform some additional
parsing work in ``handle()`` and then delegate from it to more
specialized methods as needed.
Several attributes affect behavior at various steps along the way:
``args``
A string listing the arguments accepted by the command,
suitable for use in help messages; e.g., a command which takes
a list of application names might set this to '<app_label
app_label ...>'.
``can_import_settings``
A boolean indicating whether the command needs to be able to
import Django settings; if ``True``, ``execute()`` will verify
that this is possible before proceeding. Default value is
``True``.
``help``
A short description of the command, which will be printed in
help messages.
``option_list``
This is the list of ``optparse`` options which will be fed
into the command's ``OptionParser`` for parsing arguments.
Deprecated and will be removed in Django 1.10.
``output_transaction``
A boolean indicating whether the command outputs SQL
statements; if ``True``, the output will automatically be
wrapped with ``BEGIN;`` and ``COMMIT;``. Default value is
``False``.
``requires_system_checks``
A boolean; if ``True``, entire Django project will be checked for errors
prior to executing the command. Default value is ``True``.
To validate an individual application's models
rather than all applications' models, call
``self.check(app_configs)`` from ``handle()``, where ``app_configs``
is the list of application's configuration provided by the
app registry.
``leave_locale_alone``
A boolean indicating whether the locale set in settings should be
preserved during the execution of the command instead of translations
being deactivated.
Default value is ``False``.
Make sure you know what you are doing if you decide to change the value
of this option in your custom command if it creates database content
that is locale-sensitive and such content shouldn't contain any
translations (like it happens e.g. with django.contrib.auth
permissions) as activating any locale might cause unintended effects.
This option can't be False when the can_import_settings option is set
to False too because attempting to deactivate translations needs access
to settings. This condition will generate a CommandError.
"""
# Metadata about this command.
option_list = ()
help = ''
args = ''
# Configuration shortcuts that alter various logic.
_called_from_command_line = False
can_import_settings = True
output_transaction = False # Whether to wrap the output in a "BEGIN; COMMIT;"
leave_locale_alone = False
requires_system_checks = True
def __init__(self, stdout=None, stderr=None, no_color=False):
self.stdout = OutputWrapper(stdout or sys.stdout)
self.stderr = OutputWrapper(stderr or sys.stderr)
if no_color:
self.style = no_style()
else:
self.style = color_style()
self.stderr.style_func = self.style.ERROR
@property
def use_argparse(self):
return not bool(self.option_list)
def get_version(self):
"""
Return the Django version, which should be correct for all
built-in Django commands. User-supplied commands should
override this method.
"""
return django.get_version()
def usage(self, subcommand):
"""
Return a brief description of how to use this command, by
default from the attribute ``self.help``.
"""
usage = '%%prog %s [options] %s' % (subcommand, self.args)
if self.help:
return '%s\n\n%s' % (usage, self.help)
else:
return usage
def create_parser(self, prog_name, subcommand):
"""
Create and return the ``ArgumentParser`` which will be used to
parse the arguments to this command.
"""
if not self.use_argparse:
def store_as_int(option, opt_str, value, parser):
setattr(parser.values, option.dest, int(value))
# Backwards compatibility: use deprecated optparse module
warnings.warn("OptionParser usage for Django management commands "
"is deprecated, use ArgumentParser instead",
RemovedInDjango110Warning)
parser = OptionParser(prog=prog_name,
usage=self.usage(subcommand),
version=self.get_version())
parser.add_option('-v', '--verbosity', action='callback', dest='verbosity', default=1,
type='choice', choices=['0', '1', '2', '3'], callback=store_as_int,
help='Verbosity level; 0=minimal output, 1=normal output, 2=verbose output, 3=very verbose output')
parser.add_option('--settings',
help=(
'The Python path to a settings module, e.g. '
'"myproject.settings.main". If this isn\'t provided, the '
'DJANGO_SETTINGS_MODULE environment variable will be used.'
),
)
parser.add_option('--pythonpath',
help='A directory to add to the Python path, e.g. "/home/djangoprojects/myproject".'),
parser.add_option('--traceback', action='store_true',
help='Raise on CommandError exceptions')
parser.add_option('--no-color', action='store_true', dest='no_color', default=False,
help="Don't colorize the command output.")
for opt in self.option_list:
parser.add_option(opt)
else:
parser = CommandParser(self, prog="%s %s" % (os.path.basename(prog_name), subcommand),
description=self.help or None)
parser.add_argument('--version', action='version', version=self.get_version())
parser.add_argument('-v', '--verbosity', action='store', dest='verbosity', default='1',
type=int, choices=[0, 1, 2, 3],
help='Verbosity level; 0=minimal output, 1=normal output, 2=verbose output, 3=very verbose output')
parser.add_argument('--settings',
help=(
'The Python path to a settings module, e.g. '
'"myproject.settings.main". If this isn\'t provided, the '
'DJANGO_SETTINGS_MODULE environment variable will be used.'
),
)
parser.add_argument('--pythonpath',
help='A directory to add to the Python path, e.g. "/home/djangoprojects/myproject".')
parser.add_argument('--traceback', action='store_true',
help='Raise on CommandError exceptions')
parser.add_argument('--no-color', action='store_true', dest='no_color', default=False,
help="Don't colorize the command output.")
if self.args:
# Keep compatibility and always accept positional arguments, like optparse when args is set
parser.add_argument('args', nargs='*')
self.add_arguments(parser)
return parser
def add_arguments(self, parser):
"""
Entry point for subclassed commands to add custom arguments.
"""
pass
def print_help(self, prog_name, subcommand):
"""
Print the help message for this command, derived from
``self.usage()``.
"""
parser = self.create_parser(prog_name, subcommand)
parser.print_help()
def run_from_argv(self, argv):
"""
Set up any environment changes requested (e.g., Python path
and Django settings), then run this command. If the
command raises a ``CommandError``, intercept it and print it sensibly
to stderr. If the ``--traceback`` option is present or the raised
``Exception`` is not ``CommandError``, raise it.
"""
self._called_from_command_line = True
parser = self.create_parser(argv[0], argv[1])
if self.use_argparse:
options = parser.parse_args(argv[2:])
cmd_options = vars(options)
# Move positional args out of options to mimic legacy optparse
args = cmd_options.pop('args', ())
else:
options, args = parser.parse_args(argv[2:])
cmd_options = vars(options)
handle_default_options(options)
try:
self.execute(*args, **cmd_options)
except Exception as e:
if options.traceback or not isinstance(e, CommandError):
raise
# SystemCheckError takes care of its own formatting.
if isinstance(e, SystemCheckError):
self.stderr.write(str(e), lambda x: x)
else:
self.stderr.write('%s: %s' % (e.__class__.__name__, e))
sys.exit(1)
finally:
connections.close_all()
def execute(self, *args, **options):
"""
Try to execute this command, performing system checks if needed (as
controlled by the ``requires_system_checks`` attribute, except if
force-skipped).
"""
if options.get('no_color'):
self.style = no_style()
self.stderr.style_func = None
if options.get('stdout'):
self.stdout = OutputWrapper(options['stdout'])
if options.get('stderr'):
self.stderr = OutputWrapper(options.get('stderr'), self.stderr.style_func)
saved_locale = None
if not self.leave_locale_alone:
# Only mess with locales if we can assume we have a working
# settings file, because django.utils.translation requires settings
# (The final saying about whether the i18n machinery is active will be
# found in the value of the USE_I18N setting)
if not self.can_import_settings:
raise CommandError("Incompatible values of 'leave_locale_alone' "
"(%s) and 'can_import_settings' (%s) command "
"options." % (self.leave_locale_alone,
self.can_import_settings))
# Deactivate translations, because django-admin creates database
# content like permissions, and those shouldn't contain any
# translations.
from django.utils import translation
saved_locale = translation.get_language()
translation.deactivate_all()
try:
if (self.requires_system_checks and
not options.get('skip_validation') and # Remove at the end of deprecation for `skip_validation`.
not options.get('skip_checks')):
self.check()
output = self.handle(*args, **options)
if output:
if self.output_transaction:
# This needs to be imported here, because it relies on
# settings.
from django.db import connections, DEFAULT_DB_ALIAS
connection = connections[options.get('database', DEFAULT_DB_ALIAS)]
if connection.ops.start_transaction_sql():
self.stdout.write(self.style.SQL_KEYWORD(connection.ops.start_transaction_sql()))
self.stdout.write(output)
if self.output_transaction:
self.stdout.write('\n' + self.style.SQL_KEYWORD(connection.ops.end_transaction_sql()))
finally:
if saved_locale is not None:
translation.activate(saved_locale)
def check(self, app_configs=None, tags=None, display_num_errors=False,
include_deployment_checks=False):
"""
Uses the system check framework to validate entire Django project.
Raises CommandError for any serious message (error or critical errors).
If there are only light messages (like warnings), they are printed to
stderr and no exception is raised.
"""
all_issues = checks.run_checks(
app_configs=app_configs,
tags=tags,
include_deployment_checks=include_deployment_checks,
)
header, body, footer = "", "", ""
visible_issue_count = 0 # excludes silenced warnings
if all_issues:<|fim▁hole|> warnings = [e for e in all_issues if checks.WARNING <= e.level < checks.ERROR and not e.is_silenced()]
errors = [e for e in all_issues if checks.ERROR <= e.level < checks.CRITICAL]
criticals = [e for e in all_issues if checks.CRITICAL <= e.level]
sorted_issues = [
(criticals, 'CRITICALS'),
(errors, 'ERRORS'),
(warnings, 'WARNINGS'),
(infos, 'INFOS'),
(debugs, 'DEBUGS'),
]
for issues, group_name in sorted_issues:
if issues:
visible_issue_count += len(issues)
formatted = (
self.style.ERROR(force_str(e))
if e.is_serious()
else self.style.WARNING(force_str(e))
for e in issues)
formatted = "\n".join(sorted(formatted))
body += '\n%s:\n%s\n' % (group_name, formatted)
if visible_issue_count:
header = "System check identified some issues:\n"
if display_num_errors:
if visible_issue_count:
footer += '\n'
footer += "System check identified %s (%s silenced)." % (
"no issues" if visible_issue_count == 0 else
"1 issue" if visible_issue_count == 1 else
"%s issues" % visible_issue_count,
len(all_issues) - visible_issue_count,
)
if any(e.is_serious() and not e.is_silenced() for e in all_issues):
msg = self.style.ERROR("SystemCheckError: %s" % header) + body + footer
raise SystemCheckError(msg)
else:
msg = header + body + footer
if msg:
if visible_issue_count:
self.stderr.write(msg, lambda x: x)
else:
self.stdout.write(msg)
def handle(self, *args, **options):
"""
The actual logic of the command. Subclasses must implement
this method.
"""
raise NotImplementedError('subclasses of BaseCommand must provide a handle() method')
class AppCommand(BaseCommand):
"""
A management command which takes one or more installed application labels
as arguments, and does something with each of them.
Rather than implementing ``handle()``, subclasses must implement
``handle_app_config()``, which will be called once for each application.
"""
missing_args_message = "Enter at least one application label."
def add_arguments(self, parser):
parser.add_argument('args', metavar='app_label', nargs='+',
help='One or more application label.')
def handle(self, *app_labels, **options):
from django.apps import apps
try:
app_configs = [apps.get_app_config(app_label) for app_label in app_labels]
except (LookupError, ImportError) as e:
raise CommandError("%s. Are you sure your INSTALLED_APPS setting is correct?" % e)
output = []
for app_config in app_configs:
app_output = self.handle_app_config(app_config, **options)
if app_output:
output.append(app_output)
return '\n'.join(output)
def handle_app_config(self, app_config, **options):
"""
Perform the command's actions for app_config, an AppConfig instance
corresponding to an application label given on the command line.
"""
raise NotImplementedError(
"Subclasses of AppCommand must provide"
"a handle_app_config() method.")
class LabelCommand(BaseCommand):
"""
A management command which takes one or more arbitrary arguments
(labels) on the command line, and does something with each of
them.
Rather than implementing ``handle()``, subclasses must implement
``handle_label()``, which will be called once for each label.
If the arguments should be names of installed applications, use
``AppCommand`` instead.
"""
label = 'label'
missing_args_message = "Enter at least one %s." % label
def add_arguments(self, parser):
parser.add_argument('args', metavar=self.label, nargs='+')
def handle(self, *labels, **options):
output = []
for label in labels:
label_output = self.handle_label(label, **options)
if label_output:
output.append(label_output)
return '\n'.join(output)
def handle_label(self, label, **options):
"""
Perform the command's actions for ``label``, which will be the
string as given on the command line.
"""
raise NotImplementedError('subclasses of LabelCommand must provide a handle_label() method')
class NoArgsCommand(BaseCommand):
"""
A command which takes no arguments on the command line.
Rather than implementing ``handle()``, subclasses must implement
``handle_noargs()``; ``handle()`` itself is overridden to ensure
no arguments are passed to the command.
Attempting to pass arguments will raise ``CommandError``.
"""
args = ''
def __init__(self):
warnings.warn(
"NoArgsCommand class is deprecated and will be removed in Django 1.10. "
"Use BaseCommand instead, which takes no arguments by default.",
RemovedInDjango110Warning
)
super(NoArgsCommand, self).__init__()
def handle(self, *args, **options):
if args:
raise CommandError("Command doesn't accept any arguments")
return self.handle_noargs(**options)
def handle_noargs(self, **options):
"""
Perform this command's actions.
"""
raise NotImplementedError('subclasses of NoArgsCommand must provide a handle_noargs() method')<|fim▁end|> | debugs = [e for e in all_issues if e.level < checks.INFO and not e.is_silenced()]
infos = [e for e in all_issues if checks.INFO <= e.level < checks.WARNING and not e.is_silenced()] |
<|file_name|>project-meta.ts<|end_file_name|><|fim▁begin|>import type { BlinkMRC, ProjectConfig } from '../types'
import configLoader from '@blinkmobile/blinkmrc'
import pkg from '../../package'
function projectConfig(cwd: string): ProjectConfig {
return configLoader.projectConfig({
name: pkg.name,
cwd: cwd,
})
}
function read(cwd: string): Promise<BlinkMRC> {
return projectConfig(cwd)
.load()
.catch(() => ({}))
}
async function write(
cwd: string,<|fim▁hole|> updater: (config: BlinkMRC) => BlinkMRC,
): Promise<BlinkMRC> {
return await projectConfig(cwd).update(updater)
}
export default {
projectConfig,
read,
write,
}<|fim▁end|> | |
<|file_name|>event_frequent.py<|end_file_name|><|fim▁begin|>import os, sys
origin_dir = 'del_201304now/'
new_dir = 'freq_event_state/'
files = os.listdir(origin_dir)
state_dir = {}
country_dir = {}
for file in files:
with open(origin_dir + file) as f:
event_dir = {}
for line in f:
tmp_content = line.split('\t')
code = tmp_content[4]
location = tmp_content[14]
tmp_loc = location.split(',')
length = len(tmp_loc)
state = ''
if length == 3:
state = tmp_loc[1]
elif length == 2:
state = tmp_loc[0]
else:
continue
country = tmp_loc[length-1]<|fim▁hole|> tmp_dir = country_dir[country][state]
if code in tmp_dir:
tmp_dir[code] += 1
else:
tmp_dir[code] = 1
else:
country_dir[country][state] = {}
country_dir[country][state][code] = 1
for country_name,countries in country_dir.items():
for state_name, states in countries.items():
dir_path = '%s%s/%s/'%(new_dir, country_name, state_name)
if not os.path.exists(dir_path):
os.makedirs(dir_path)
with open(dir_path+file, 'a') as writer:
for event, freq in states.items():
writer.write(event+': '+str(freq)+'\n')<|fim▁end|> | if country not in country_dir:
country_dir[country] = {}
if state in country_dir[country]: |
<|file_name|>RisingPlanHandler.java<|end_file_name|><|fim▁begin|>package com.xcode.mobile.smilealarm.alarmpointmanager;
import android.content.Context;
import java.io.IOException;
import java.sql.Date;
import java.sql.Time;
import java.util.Calendar;
import java.util.HashMap;
import com.xcode.mobile.smilealarm.DataHelper;
import com.xcode.mobile.smilealarm.DateTimeHelper;
import com.xcode.mobile.smilealarm.R;
public class RisingPlanHandler {
private static final int VALID_CODE = 1;
private static final int INVALID_CODE_EXP = 2;
private static final int INVALID_CODE_EXP_SOONER_AVG = 3;
private static final int INVALID_CODE_EXP_AVG_TOO_FAR = 4;
private static final int INVALID_CODE_EXP_AVG_TOO_NEAR = 5;
private static final int INVALID_CODE_STARTDATE_BEFORE_THE_DAY_AFTER_TOMORROW = 6;
private static final int[] ListOfDescreasingMinutes = new int[]{-1, 2, 5, 10, 15, 18, 20};
private HashMap<Date, AlarmPoint> _risingPlan;
private int _currentStage;
private Date _theDateBefore;
private Time _theWakingTimeBefore;
private Time _expWakingTime;
private int _checkCode;
public static int Connect(Context ctx) {
AlarmPointListHandler aplh_RisingPlan = new AlarmPointListHandler(true);
AlarmPointListHandler aplh_AlarmPoint = new AlarmPointListHandler(false);
return aplh_AlarmPoint.overwriteAlarmPointList(aplh_RisingPlan.getCurrentList(), ctx);
}
public int createRisingPlan(Time avgWakingTime, Time expWakingTime, Date startDate, Context ctx)
throws IOException {
_checkCode = checkParameters(avgWakingTime, expWakingTime, startDate);
if (_checkCode != VALID_CODE)
return _checkCode;
_risingPlan = new HashMap<Date, AlarmPoint>();
_currentStage = 1;
_theDateBefore = new Date(DateTimeHelper.GetTheDateBefore(startDate));
_theWakingTimeBefore = avgWakingTime;
_expWakingTime = expWakingTime;
AlarmPoint ap0 = new AlarmPoint(_theDateBefore);
ap0.setColor();
_risingPlan.put(ap0.getSQLDate(), ap0);
while (DateTimeHelper.DurationTwoSQLTime(_theWakingTimeBefore,
_expWakingTime) >= ListOfDescreasingMinutes[_currentStage]) {
generateRisingPlanInCurrentStage();
_currentStage++;
}
generateTheLastAlarmPoint();
DataHelper.getInstance().saveAlarmPointListToData(true, _risingPlan);
return ReturnCode.OK;
}
public String getNotificationFromErrorCode(Context ctx) {
switch (_checkCode) {
case INVALID_CODE_EXP:
return ctx.getString(R.string.not_ExpTime);
case INVALID_CODE_EXP_SOONER_AVG:
return ctx.getString(R.string.not_AvgTime);
case INVALID_CODE_EXP_AVG_TOO_FAR:
return ctx.getString(R.string.not_AvgExpTooLong);
case INVALID_CODE_EXP_AVG_TOO_NEAR:
return ctx.getString(R.string.not_AvgExpTooShort);
case INVALID_CODE_STARTDATE_BEFORE_THE_DAY_AFTER_TOMORROW:
return ctx.getString(R.string.not_StrDate);
default:
return "invalid Code";
}
}
private int checkParameters(Time avgTime, Time expTime, Date startDate) {
if (!isAfterTomorrow(startDate))
return INVALID_CODE_STARTDATE_BEFORE_THE_DAY_AFTER_TOMORROW;
if (DateTimeHelper.CompareTo(avgTime, expTime) < 0)
return INVALID_CODE_EXP_SOONER_AVG;
Time BeginExpTime = new Time(DateTimeHelper.GetSQLTime(5, 0));
Time EndExpTime = new Time(DateTimeHelper.GetSQLTime(8, 0));
if (DateTimeHelper.CompareTo(expTime, BeginExpTime) < 0 || DateTimeHelper.CompareTo(expTime, EndExpTime) > 0) {
return INVALID_CODE_EXP;
}
int maxMinutes = 875;
int minMinutes = 20;
if (DateTimeHelper.DurationTwoSQLTime(avgTime, expTime) > maxMinutes)
return INVALID_CODE_EXP_AVG_TOO_FAR;
if (DateTimeHelper.DurationTwoSQLTime(avgTime, expTime) < minMinutes)
return INVALID_CODE_EXP_AVG_TOO_NEAR;
return VALID_CODE;
}
private void generateRisingPlanInCurrentStage() {
int daysInStage = 10;
if (ListOfDescreasingMinutes[_currentStage] == 15)
daysInStage = 15;
for (int i = 0; i < daysInStage && DateTimeHelper.DurationTwoSQLTime(_theWakingTimeBefore,
_expWakingTime) >= ListOfDescreasingMinutes[_currentStage]; i++) {
// WakingTime
Date currentDate = new Date(DateTimeHelper.GetTheNextDate(_theDateBefore));
AlarmPoint ap = new AlarmPoint(currentDate);
ap.setColor();
Time currentWakingTime = DateTimeHelper.GetSQLTime(_theWakingTimeBefore,
-(ListOfDescreasingMinutes[_currentStage]));
ap.setTimePoint(currentWakingTime, 1);
// SleepingTime
Time sleepingTimeBefore = getSleepingTime(currentWakingTime);
AlarmPoint apBefore = _risingPlan.get(_theDateBefore);
apBefore.setTimePoint(sleepingTimeBefore, 2);
// put
_risingPlan.put(apBefore.getSQLDate(), apBefore);
_risingPlan.put(ap.getSQLDate(), ap);
// reset before
_theDateBefore = currentDate;
_theWakingTimeBefore = currentWakingTime;
}
}
private void generateTheLastAlarmPoint() {
// WakingTime
Date currentDate = new Date(DateTimeHelper.GetTheNextDate(_theDateBefore));
AlarmPoint ap = new AlarmPoint(currentDate);
ap.setColor();
Time currentWakingTime = _expWakingTime;
ap.setTimePoint(currentWakingTime, 1);
<|fim▁hole|> Time sleepingTimeBefore = getSleepingTime(currentWakingTime);
AlarmPoint apBefore = _risingPlan.get(_theDateBefore);
apBefore.setTimePoint(sleepingTimeBefore, 2);
// put
_risingPlan.put(apBefore.getSQLDate(), apBefore);
_risingPlan.put(ap.getSQLDate(), ap);
// reset before
_theDateBefore = currentDate;
_theWakingTimeBefore = currentWakingTime;
}
private Time getSleepingTime(Time wakingTime) {
// wakingTime - 11 PM of thedaybefore > 8 hours => 11 PM
// <=> if wakingTime >= 7 AM
// or wakingTime >= 7AM => 11PM
// 6AM <= wakingTime < 7AM => 10:30 PM
// 5AM <= wakingTime < 6AM => 10 PM
Time SevenAM = new Time(DateTimeHelper.GetSQLTime(7, 0));
Time SixAM = new Time(DateTimeHelper.GetSQLTime(6, 0));
Time FiveAM = new Time(DateTimeHelper.GetSQLTime(5, 0));
Time EleventPM = new Time(DateTimeHelper.GetSQLTime(23, 0));
Time Ten30PM = new Time(DateTimeHelper.GetSQLTime(22, 30));
Time TenPM = new Time(DateTimeHelper.GetSQLTime(22, 0));
if (DateTimeHelper.CompareTo(wakingTime, SevenAM) >= 0) {
return EleventPM;
}
if (DateTimeHelper.CompareTo(wakingTime, SevenAM) < 0 && DateTimeHelper.CompareTo(wakingTime, SixAM) >= 0) {
return Ten30PM;
}
if (DateTimeHelper.CompareTo(wakingTime, SixAM) < 0 && DateTimeHelper.CompareTo(wakingTime, FiveAM) >= 0) {
return TenPM;
}
return null;
}
private Boolean isAfterTomorrow(Date startDate) {
Date today = new Date(Calendar.getInstance().getTimeInMillis());
Date tomorrow = new Date(DateTimeHelper.GetTheNextDate(today));
return (DateTimeHelper.CompareTo(startDate, tomorrow) > 0);
}
}<|fim▁end|> |
// SleepingTime
|
<|file_name|>exceptionbase.cc<|end_file_name|><|fim▁begin|>/****************************************************************************
Copyright (c) 2011-2013,WebJet Business Division,CYOU
http://www.genesis-3d.com.cn
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
****************************************************************************/
#include "stdneb.h"
#include "exceptionbase.h"
namespace Exceptions
{
Exception::~Exception() throw()
{
}
Exception::Exception(const String& description_, const String& source_)
:line(0)
,type(EXT_UNDEF_TYPE)
,title("Exception")
,description(description_)
,source(source_)
{
// Log this error - not any more, allow catchers to do it
//LogManager::getSingleton().logMessage(this->getFullDescription());
}
Exception::Exception(const String& description_, const String& source_, const char* file_, long line_)
:type(EXT_UNDEF_TYPE)
,title("Exception")
,description(description_)
,source(source_)
,file(file_)
,line(line_)
<|fim▁hole|> // Log this error - not any more, allow catchers to do it
//LogManager::getSingleton().logMessage(this->getFullDescription());
}
Exception::Exception(int type_, const String& description_, const String& source_, const char* tile_, const char* file_, long line_)
:line(line_)
,type(type_)
,title(tile_)
,description(description_)
,source(source_)
,file(file_)
{
}
Exception::Exception(const Exception& rhs)
: line(rhs.line),
type(rhs.type),
title(rhs.title),
description(rhs.description),
source(rhs.source),
file(rhs.file)
{
}
void Exception::operator = (const Exception& rhs)
{
description = rhs.description;
type = rhs.type;
source = rhs.source;
file = rhs.file;
line = rhs.line;
title = rhs.title;
}
const String& Exception::GetFullDescription() const
{
if (0 == fullDesc.Length())
{
if( line > 0 )
{
fullDesc.Format("GENESIS EXCEPTION(%d:%s): \"%s\" in %s at %s(line, %d)",
type, title.AsCharPtr(), description.AsCharPtr(), source.AsCharPtr(), file.AsCharPtr(), line);
}
else
{
fullDesc.Format("GENESIS EXCEPTION(%d:%s): \"%s\" in %s", type, title.AsCharPtr(), description.AsCharPtr(), source.AsCharPtr());
}
}
return fullDesc;
}
int Exception::GetType(void) const throw()
{
return type;
}
const String &Exception::GetSource() const
{
return source;
}
const String &Exception::GetFile() const
{
return file;
}
long Exception::GetLine() const
{
return line;
}
const String &Exception::GetDescription(void) const
{
return description;
}
const char* Exception::what() const throw()
{
return GetFullDescription().AsCharPtr();
}
}<|fim▁end|> | { |
<|file_name|>05_table_maker.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# encoding: utf-8
"""
Creates a new sound table from random chunks of a soundfile.
"""
from pyo import *
import random, os
s = Server(sr=44100, nchnls=2, buffersize=512, duplex=0).boot()
path = "../snds/baseballmajeur_m.aif"<|fim▁hole|>a = Looper(t, pitch=[1.,1.], dur=t.getDur(), xfade=5, mul=amp).out()
def addsnd():
start = random.uniform(0, dur*0.7)
duration = random.uniform(.1, .3)
pos = random.uniform(0.05, t.getDur()-0.5)
cross = random.uniform(0.04, duration/2)
t.insert(path, pos=pos, crossfade=cross, start=start, stop=start+duration)
def delayed_generation():
start = random.uniform(0, dur*0.7)
duration = random.uniform(.1, .3)
t.setSound(path, start=start, stop=start+duration)
for i in range(10):
addsnd()
a.dur = t.getDur()
a.reset()
amp.play()
caller = CallAfter(function=delayed_generation, time=0.005).stop()
def gen():
amp.stop()
caller.play()
gen()
s.gui(locals())<|fim▁end|> | dur = sndinfo(path)[1]
t = SndTable(path, start=0, stop=1)
amp = Fader(fadein=0.005, fadeout=0.005, dur=0, mul=0.4).play() |
<|file_name|>EntryWidgetPieAlertsXDR.py<|end_file_name|><|fim▁begin|><|fim▁hole|>data = {
"Type": 17,
"ContentsFormat": "pie",
"Contents": {
"stats": [
{
"data": [
int(incident[0].get('CustomFields', {}).get('xdrhighseverityalertcount', 0))
],
"groups": None,
"name": "high",
"label": "incident.severity.high",
"color": "rgb(255, 23, 68)"
},
{
"data": [
int(incident[0].get('CustomFields', {}).get('xdrmediumseverityalertcount', 0))
],
"groups": None,
"name": "medium",
"label": "incident.severity.medium",
"color": "rgb(255, 144, 0)"
},
{
"data": [
int(incident[0].get('CustomFields', {}).get('xdrlowseverityalertcount', 0))
],
"groups": None,
"name": "low",
"label": "incident.severity.low",
"color": "rgb(0, 205, 51)"
},
],
"params": {
"layout": "horizontal"
}
}
}
demisto.results(data)<|fim▁end|> | import demistomock as demisto
incident = demisto.incidents() |
<|file_name|>LaizyImageFlipAnimationLayout.java<|end_file_name|><|fim▁begin|>/*
Copyright 2014 Dániel Sólyom
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package ds.framework.v4.widget;
import android.content.Context;
import android.content.res.TypedArray;
import android.util.AttributeSet;
import ds.framework.v4.R;
import ds.framework.v4.widget.LaizyImageView.LaizyImageViewInfo;
import ds.framework.v4.widget.LaizyImageView.OnImageSetListener;
public class LaizyImageFlipAnimationLayout extends FlipAnimationLayout {
private LaizyImageViewInfo mImageInfo;
private int mDirection = TOP_TO_BOTTOM;
private int mNextImagePosition = 0;
private int mNextLoadingPosition = 0;
private LaizyImageView mNextImageView;
private boolean mFirstImage = true;
private boolean mShowingLoading = false;
private boolean mNeedToShowLoading = false;
private boolean mFlipFirst;
private OnImageSetListener mOnImageSetListener = new OnImageSetListener() {
@Override
public void onDefaultSet(LaizyImageView view) {
if (!view.getInfo().info.equals(mImageInfo.info)) {
return;
}
onFinishedLoading();
}
@Override
public void onLoadingSet(LaizyImageView view) {
if (!view.getInfo().info.equals(mImageInfo.info)) {
return;
}
mShowingLoading = true;
if (mNextImagePosition == mNextLoadingPosition) {
// only happens when we are loading the first image and no need to flip
((LaizyImageView) getChildAt(0)).showLoading(mImageInfo);
return;
}
// just animate in the loading image
mNeedToShowLoading = true;
showLoading();
}
@Override
public void onErrorSet(LaizyImageView view) {
if (!view.getInfo().info.equals(mImageInfo.info)) {
return;
}
onFinishedLoading();
}
@Override
public void onImageSet(LaizyImageView view) {
if (!view.getInfo().info.equals(mImageInfo.info)) {
return;
}
onFinishedLoading();
}
};
public LaizyImageFlipAnimationLayout(Context context) {
this(context, null);
}
public LaizyImageFlipAnimationLayout(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public LaizyImageFlipAnimationLayout(Context context, AttributeSet attrs,
int defStyle) {
super(context, attrs, defStyle);
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.DsView, defStyle, 0);
mFlipFirst = a.getBoolean(R.styleable.DsView_flip_first, true);
a.recycle();
}
/**
* create third image view which will be used to lazy-load the images
*
* @param context
* @return
*/
protected LaizyImageView createThirdImageView(Context context) {
return new LaizyImageView(context);
}
public void reset() {
stop();
setCurrentChild(0);
mFirstImage = true;
}
/**
*
* @param always
*/
public void flipAlways(boolean always) {
mFlipFirst = always;
}
/**
*
* @param info
*/
public void loadImage(LaizyImageViewInfo info) {
if (mImageInfo != null && (info == null || info.info.equals(mImageInfo.info))) {
// already loading / loaded this image
return;
}
stop();
if (getChildCount() < 3) {
for(int i = getChildCount(); i < 3; ++i) {
final LaizyImageView thirdView = createThirdImageView(getContext());
addView(thirdView);
}
}
mNeedToShowLoading = false;
mImageInfo = info;
info.needFading = false;
// load the image
if (mFirstImage) {
// first image to load
mNextLoadingPosition = mNextImagePosition = 1;
// act like we are showing loading so we could do the flip even for the first image
mShowingLoading = mFlipFirst;
mFirstImage = false;
} else {
// load to the next empty position
mNextImagePosition = advancePosition(getCurrentChildPosition());
// the current third position which is not showing and not used to
// load into it will be the only empty position we can have
mNextLoadingPosition = advancePosition(mNextImagePosition);
}
mNextImageView = (LaizyImageView) getChildAt(mNextImagePosition);
mNextImageView.setOnImageSetListener(mOnImageSetListener);
mNextImageView.reset();
mNextImageView.load(mImageInfo);
}
/**
*
*/
public void stop() {
mNeedToShowLoading = false;
mShowingLoading = false;
if (mNextImageView != null) {
mNextImageView.stopLoading();
mNextImageView.setOnImageSetListener(null);
}
mNextImageView = null;
mImageInfo = null;
super.cancel();
}
/**
*
* @param direction
*/
public void setDirection(int direction) {
assert(direction >= 0 && direction < DIRECTIONS.length);
mDirection = direction;
}
@Override
void setState(int state) {
super.setState(state);
if (mNeedToShowLoading && state == STATE_CALM) {
// still loading the image and the previous flip is finished
// show loading
showLoading();
}
}
/**
*
*/
private void showLoading() {
if (!mNeedToShowLoading) {
return;
}
if (getState() == STATE_ANIMATING) {
// wait for the previous animation to finish when loading
return;
}
mNeedToShowLoading = false;
((LaizyImageView) getChildAt(mNextLoadingPosition)).showLoading(mImageInfo);
start(mDirection, mNextLoadingPosition);
}
/**
*
*/
private void onFinishedLoading() {
if (getCurrentChildPosition() == mNextImagePosition) {
// we are showing the image that just finished loading so nothing to do
// except we cancel the animation if there was any
// this would look messy if animating but mostly it is not the case
cancel();
return;
}
if (!mShowingLoading) {
// there was no need to show loading - image was right there
// just switch without animation
setCurrentChild(mNextImagePosition);
} else {
// flip to the loaded image
start(mDirection, mNextImagePosition);
}
mNextImageView.setOnImageSetListener(null);
}
/**
*
* @param resource
*/
public void setCurrentTo(int resId) {
stop();
((LaizyImageView) getCurrentChild()).setImageResource(resId);
}
/**
* <|fim▁hole|> private int advancePosition(int position) {
position++;
if (position > 2) {
position = 0;
}
return position;
}
}<|fim▁end|> | */ |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>mod day_1;
mod day_2;
mod day_3;
<|fim▁hole|> day_1::solutions();
day_2::solutions();
day_3::solutions();
}<|fim▁end|> |
fn main() { |
<|file_name|>clouddns.go<|end_file_name|><|fim▁begin|>// +skip_license_check
/*
This file contains portions of code directly taken from the 'xenolf/lego' project.
A copy of the license for this code can be found in the file named LICENSE in
this directory.
*/
// Package clouddns implements a DNS provider for solving the DNS-01
// challenge using Google Cloud DNS.
package clouddns
import (
"fmt"
"io/ioutil"
"os"
"time"
logf "github.com/jetstack/cert-manager/pkg/logs"
"github.com/go-logr/logr"
"golang.org/x/net/context"
"golang.org/x/oauth2/google"
"google.golang.org/api/dns/v1"
"google.golang.org/api/option"
"github.com/jetstack/cert-manager/pkg/issuer/acme/dns/util"
)
// DNSProvider is an implementation of the DNSProvider interface.
type DNSProvider struct {
hostedZoneName string
dns01Nameservers []string
project string
client *dns.Service
log logr.Logger
}
func NewDNSProvider(project string, saBytes []byte, dns01Nameservers []string, ambient bool, hostedZoneName string) (*DNSProvider, error) {
// project is a required field
if project == "" {
return nil, fmt.Errorf("Google Cloud project name missing")
}
// if the service account bytes are not provided, we will attempt to instantiate
// with 'ambient credentials' (if they are allowed/enabled)
if len(saBytes) == 0 {
if !ambient {
return nil, fmt.Errorf("unable to construct clouddns provider: empty credentials; perhaps you meant to enable ambient credentials?")
}
return NewDNSProviderCredentials(project, dns01Nameservers, hostedZoneName)
}
// if service account data is provided, we instantiate using that
if len(saBytes) != 0 {
return NewDNSProviderServiceAccountBytes(project, saBytes, dns01Nameservers, hostedZoneName)
}
return nil, fmt.Errorf("missing Google Cloud DNS provider credentials")
}
// NewDNSProviderEnvironment returns a DNSProvider instance configured for Google Cloud
// DNS. Project name must be passed in the environment variable: GCE_PROJECT.
// A Service Account file can be passed in the environment variable:
// GCE_SERVICE_ACCOUNT_FILE
func NewDNSProviderEnvironment(dns01Nameservers []string, hostedZoneName string) (*DNSProvider, error) {
project := os.Getenv("GCE_PROJECT")
if saFile, ok := os.LookupEnv("GCE_SERVICE_ACCOUNT_FILE"); ok {
return NewDNSProviderServiceAccount(project, saFile, dns01Nameservers, hostedZoneName)
}
return NewDNSProviderCredentials(project, dns01Nameservers, hostedZoneName)
}
// NewDNSProviderCredentials uses the supplied credentials to return a
// DNSProvider instance configured for Google Cloud DNS.
func NewDNSProviderCredentials(project string, dns01Nameservers []string, hostedZoneName string) (*DNSProvider, error) {
if project == "" {
return nil, fmt.Errorf("Google Cloud project name missing")
}
ctx := context.Background()
client, err := google.DefaultClient(ctx, dns.NdevClouddnsReadwriteScope)
if err != nil {
return nil, fmt.Errorf("Unable to get Google Cloud client: %v", err)
}
svc, err := dns.NewService(ctx, option.WithHTTPClient(client))
if err != nil {
return nil, fmt.Errorf("Unable to create Google Cloud DNS service: %v", err)
}
return &DNSProvider{
project: project,
client: svc,
dns01Nameservers: dns01Nameservers,
hostedZoneName: hostedZoneName,
log: logf.Log.WithName("clouddns"),
}, nil
}
// NewDNSProviderServiceAccount uses the supplied service account JSON file to
// return a DNSProvider instance configured for Google Cloud DNS.
func NewDNSProviderServiceAccount(project string, saFile string, dns01Nameservers []string, hostedZoneName string) (*DNSProvider, error) {
if project == "" {
return nil, fmt.Errorf("Google Cloud project name missing")
}
if saFile == "" {
return nil, fmt.Errorf("Google Cloud Service Account file missing")
}
dat, err := ioutil.ReadFile(saFile)
if err != nil {
return nil, fmt.Errorf("Unable to read Service Account file: %v", err)
}
return NewDNSProviderServiceAccountBytes(project, dat, dns01Nameservers, hostedZoneName)
}
// NewDNSProviderServiceAccountBytes uses the supplied service account JSON
// file data to return a DNSProvider instance configured for Google Cloud DNS.
func NewDNSProviderServiceAccountBytes(project string, saBytes []byte, dns01Nameservers []string, hostedZoneName string) (*DNSProvider, error) {
if project == "" {
return nil, fmt.Errorf("Google Cloud project name missing")
}
if len(saBytes) == 0 {
return nil, fmt.Errorf("Google Cloud Service Account data missing")
}
conf, err := google.JWTConfigFromJSON(saBytes, dns.NdevClouddnsReadwriteScope)
if err != nil {
return nil, fmt.Errorf("Unable to acquire config: %v", err)
}
ctx := context.Background()
client := conf.Client(ctx)
svc, err := dns.NewService(ctx, option.WithHTTPClient(client))
if err != nil {
return nil, fmt.Errorf("Unable to create Google Cloud DNS service: %v", err)
}<|fim▁hole|> dns01Nameservers: dns01Nameservers,
hostedZoneName: hostedZoneName,
log: logf.Log.WithName("clouddns"),
}, nil
}
// Present creates a TXT record to fulfil the dns-01 challenge.
func (c *DNSProvider) Present(domain, fqdn, value string) error {
zone, err := c.getHostedZone(fqdn)
if err != nil {
return err
}
rec := &dns.ResourceRecordSet{
Name: fqdn,
Rrdatas: []string{value},
Ttl: int64(60),
Type: "TXT",
}
change := &dns.Change{
Additions: []*dns.ResourceRecordSet{rec},
}
// Look for existing records.
list, err := c.client.ResourceRecordSets.List(c.project, zone).Name(fqdn).Type("TXT").Do()
if err != nil {
return err
}
if len(list.Rrsets) > 0 {
// Attempt to delete the existing records when adding our new one.
change.Deletions = list.Rrsets
}
chg, err := c.client.Changes.Create(c.project, zone, change).Do()
if err != nil {
return err
}
// wait for change to be acknowledged
for chg.Status == "pending" {
time.Sleep(time.Second)
chg, err = c.client.Changes.Get(c.project, zone, chg.Id).Do()
if err != nil {
return err
}
}
return nil
}
// CleanUp removes the TXT record matching the specified parameters.
func (c *DNSProvider) CleanUp(domain, fqdn, value string) error {
zone, err := c.getHostedZone(fqdn)
if err != nil {
return err
}
records, err := c.findTxtRecords(zone, fqdn)
if err != nil {
return err
}
for _, rec := range records {
change := &dns.Change{
Deletions: []*dns.ResourceRecordSet{rec},
}
_, err = c.client.Changes.Create(c.project, zone, change).Do()
if err != nil {
return err
}
}
return nil
}
// getHostedZone returns the managed-zone
func (c *DNSProvider) getHostedZone(domain string) (string, error) {
if c.hostedZoneName != "" {
return c.hostedZoneName, nil
}
authZone, err := util.FindZoneByFqdn(util.ToFqdn(domain), c.dns01Nameservers)
if err != nil {
return "", err
}
zones, err := c.client.ManagedZones.
List(c.project).
DnsName(authZone).
Do()
if err != nil {
return "", fmt.Errorf("GoogleCloud API call failed: %v", err)
}
if len(zones.ManagedZones) == 0 {
return "", fmt.Errorf("No matching GoogleCloud domain found for domain %s", authZone)
}
// attempt to get the first public zone
for _, zone := range zones.ManagedZones {
if zone.Visibility == "public" {
return zone.Name, nil
}
}
c.log.V(logf.DebugLevel).Info("No matching public GoogleCloud managed-zone for domain, falling back to a private managed-zone", authZone)
// fall back to first available zone, if none public
return zones.ManagedZones[0].Name, nil
}
func (c *DNSProvider) findTxtRecords(zone, fqdn string) ([]*dns.ResourceRecordSet, error) {
recs, err := c.client.ResourceRecordSets.List(c.project, zone).Do()
if err != nil {
return nil, err
}
found := []*dns.ResourceRecordSet{}
for _, r := range recs.Rrsets {
if r.Type == "TXT" && r.Name == fqdn {
found = append(found, r)
}
}
return found, nil
}<|fim▁end|> | return &DNSProvider{
project: project,
client: svc, |
<|file_name|>repoItemClassifier.filter.js<|end_file_name|><|fim▁begin|>'use strict';
/*@ngInject*/
function repoItemClassifierFilter() {
return function(node) {
if (node.type) {
return 'uib-repository__item--movable';
} else {
return 'uib-repository__group';
}<|fim▁hole|> };
}
module.exports = repoItemClassifierFilter;<|fim▁end|> | |
<|file_name|>arksearch.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Copyright 2016 Major Hayden
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Searches Intel's ARK site and returns data about various processors.
TOTALLY UNOFFICIAL. ;)
"""
from bs4 import BeautifulSoup
import click
import requests
from terminaltables import AsciiTable
USER_AGENT = ("Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like"
"Gecko) Chrome/47.0.2526.111 Safari/537.36")
def get_full_ark_url(quickurl):
full_url = "http://ark.intel.com{0}".format(quickurl)
return full_url
def get_cpu_html(quickurl):
"""Connect to Intel's ark website and retrieve HTML."""
full_url = get_full_ark_url(quickurl)
headers = {
'User-Agent': USER_AGENT,
}
r = requests.get(full_url, headers=headers)
return r.text
def generate_table_data(html_output):
"""Generate an ASCII table based on the HTML provided."""
soup = BeautifulSoup(html_output, 'html.parser')
table_data = [
['Parameter', 'Value']
]
for table in soup.select('table.specs'):
rows = table.find_all("tr")
for row in rows[1:]:
cells = [cell.get_text("\n", strip=True)
for cell in row.find_all('td')]
if cells[0] == 'T\nCASE':
cells[0] = 'T(CASE)'
if "\n" in cells[0]:
cells[0] = cells[0][:cells[0].index("\n")]
table_data.append(cells)
<|fim▁hole|> return table_data
def quick_search(search_term):
url = "http://ark.intel.com/search/AutoComplete?term={0}"
headers = {
'User-Agent': USER_AGENT,
}
r = requests.get(url.format(search_term, headers=headers))
return r.json()
@click.command()
@click.argument('search_term')
@click.pass_context
def search(ctx, search_term):
"""Main function of the script."""
ark_json = quick_search(search_term)
if len(ark_json) < 1:
click.echo("Couldn't find any processors matching "
"{0}".format(search_term))
ctx.exit(0)
click.echo(u"Processors found: {0}".format(len(ark_json)))
choice_dict = {}
counter = 0
for cpu in ark_json:
choice_dict[counter] = cpu['quickUrl']
click.echo(u"[{0}] {1}".format(counter, cpu['value']))
counter += 1
if len(ark_json) > 1:
choice = click.prompt(u"Which processor", prompt_suffix='? ', type=int)
else:
choice = 0
cpu_data = get_cpu_html(choice_dict[int(choice)])
table_data = generate_table_data(cpu_data)
table = AsciiTable(table_data)
click.echo(table.table)
ctx.exit(0)
if __name__ == '__main__':
search()<|fim▁end|> | |
<|file_name|>trigger.js<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2019, WSO2 Inc. (http://www.wso2.org) Apache License, Version 2.0 http://www.apache.org/licenses/LICENSE-2.0
*/
define(['require', 'elementUtils'],
function (require, ElementUtils) {
/**
* @class Trigger
* @constructor
* @class Trigger Creates a Trigger
* @param {Object} options Rendering options for the view
*/
var Trigger = function (options) {
/*
Data storing structure as follows
id*: '',
previousCommentSegment:'',
name*: '',
at*: ‘’,
atEvery*: '',
annotationList: [annotation1, annotation2, ...]
*/
if (options !== undefined) {
this.id = options.id;
this.previousCommentSegment = options.previousCommentSegment;
this.name = options.name;
this.criteria = options.criteria;
this.criteriaType = options.criteriaType;
}
this.annotationList = [];
};
Trigger.prototype.addAnnotation = function (annotation) {
this.annotationList.push(annotation);
};
Trigger.prototype.clearAnnotationList = function () {
ElementUtils.prototype.removeAllElements(this.annotationList);
};<|fim▁hole|>
Trigger.prototype.getName = function () {
return this.name;
};
Trigger.prototype.getCriteria = function () {
return this.criteria;
};
Trigger.prototype.getCriteriaType = function () {
return this.criteriaType;
};
Trigger.prototype.getAnnotationList = function () {
return this.annotationList;
};
Trigger.prototype.setId = function (id) {
this.id = id;
};
Trigger.prototype.setName = function (name) {
this.name = name;
};
Trigger.prototype.setCriteria = function (criteria) {
this.criteria = criteria;
};
Trigger.prototype.setCriteriaType = function (criteriaType) {
this.criteriaType = criteriaType;
};
Trigger.prototype.setAnnotationList = function (annotationList) {
this.annotationList = annotationList;
};
return Trigger;
});<|fim▁end|> |
Trigger.prototype.getId = function () {
return this.id;
}; |
<|file_name|>parse_un.py<|end_file_name|><|fim▁begin|>from xml.etree import ElementTree
from os.path import dirname, realpath
directory_of_sources = dirname(realpath(__file__)) + "/sources/"
d = {}
d['AR'] = "Arabic"<|fim▁hole|>d['FR'] = "French"
d['RU'] = "Russian"
d['ZH'] = "Mandarin"
filepath = '/tmp/uncorpora_plain_20090831.tmx'
count = 0
for event, elem in ElementTree.iterparse(filepath, events=('start', 'end', 'start-ns', 'end-ns')):
if event == "start":
print event, elem
if elem.tag == "tu":
uid = elem.attrib['tuid']
if elem.tag == "tuv":
language = elem.attrib['{http://www.w3.org/XML/1998/namespace}lang']
if elem.tag == "seg":
text = elem.text
print language, "text is", text
if text and len(text) > 200:
with open(directory_of_sources + d[language] + "/" + uid, "wb") as f:
f.write(text.encode("utf-8"))
count += 1
if count == 50000:
break<|fim▁end|> | d['EN'] = "English"
d['ES'] = "Spanish" |
<|file_name|>TitleRepository.java<|end_file_name|><|fim▁begin|>package com.app.server.repository;
import com.athena.server.repository.SearchInterface;
import com.athena.annotation.Complexity;
import com.athena.annotation.SourceCodeAuthorClass;
import com.athena.framework.server.exception.repository.SpartanPersistenceException;<|fim▁hole|>import com.athena.framework.server.exception.biz.SpartanConstraintViolationException;
@SourceCodeAuthorClass(createdBy = "john.doe", updatedBy = "", versionNumber = "1", comments = "Repository for Title Master table Entity", complexity = Complexity.LOW)
public interface TitleRepository<T> extends SearchInterface {
public List<T> findAll() throws SpartanPersistenceException;
public T save(T entity) throws SpartanPersistenceException;
public List<T> save(List<T> entity) throws SpartanPersistenceException;
public void delete(String id) throws SpartanPersistenceException;
public void update(T entity) throws SpartanConstraintViolationException, SpartanPersistenceException;
public void update(List<T> entity) throws SpartanPersistenceException;
public T findById(String titleId) throws Exception, SpartanPersistenceException;
}<|fim▁end|> | import java.util.List; |
<|file_name|>webhook.rs<|end_file_name|><|fim▁begin|>//! Webhook model and implementations.
use super::{
id::{
ChannelId,
GuildId,
WebhookId
},
user::User
};
#[cfg(feature = "model")]
use builder::ExecuteWebhook;
#[cfg(feature = "model")]
use internal::prelude::*;
#[cfg(feature = "model")]
use std::mem;
#[cfg(feature = "model")]
use super::channel::Message;
#[cfg(feature = "model")]
use {http, utils};
/// A representation of a webhook, which is a low-effort way to post messages to
/// channels. They do not necessarily require a bot user or authentication to
/// use.
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Webhook {
/// The unique Id.
///
/// Can be used to calculate the creation date of the webhook.
pub id: WebhookId,
/// The default avatar.
///
/// This can be modified via [`ExecuteWebhook::avatar`].
///
/// [`ExecuteWebhook::avatar`]: ../../builder/struct.ExecuteWebhook.html#method.avatar
pub avatar: Option<String>,
/// The Id of the channel that owns the webhook.
pub channel_id: ChannelId,
/// The Id of the guild that owns the webhook.
pub guild_id: Option<GuildId>,
/// The default name of the webhook.
///
/// This can be modified via [`ExecuteWebhook::username`].
///
/// [`ExecuteWebhook::username`]: ../../builder/struct.ExecuteWebhook.html#method.username
pub name: Option<String>,
/// The webhook's secure token.
pub token: String,
/// The user that created the webhook.
///
/// **Note**: This is not received when getting a webhook by its token.
pub user: Option<User>,
}
#[cfg(feature = "model")]
impl Webhook {
/// Deletes the webhook.
///
/// As this calls the [`http::delete_webhook_with_token`] function,
/// authentication is not required.
///
/// [`http::delete_webhook_with_token`]: ../../http/fn.delete_webhook_with_token.html
#[inline]
pub fn delete(&self) -> Result<()> { http::delete_webhook_with_token(self.id.0, &self.token) }
///
/// Edits the webhook in-place. All fields are optional.
///
/// To nullify the avatar, pass `Some("")`. Otherwise, passing `None` will
/// not modify the avatar.
///
/// Refer to [`http::edit_webhook`] for httprictions on editing webhooks.
///
/// As this calls the [`http::edit_webhook_with_token`] function,
/// authentication is not required.
///
/// # Examples
///
/// Editing a webhook's name:
///
/// ```rust,no_run
/// use serenity::http;
///
/// let id = 245037420704169985;
/// let token = "ig5AO-wdVWpCBtUUMxmgsWryqgsW3DChbKYOINftJ4DCrUbnkedoYZD0VOH1QLr-S3sV";
///
/// let mut webhook = http::get_webhook_with_token(id, token)
/// .expect("valid webhook");
///
/// let _ = webhook.edit(Some("new name"), None).expect("Error editing");
/// ```
///
/// Setting a webhook's avatar:
///
/// ```rust,no_run
/// use serenity::http;
///
/// let id = 245037420704169985;
/// let token = "ig5AO-wdVWpCBtUUMxmgsWryqgsW3DChbKYOINftJ4DCrUbnkedoYZD0VOH1QLr-S3sV";
///
/// let mut webhook = http::get_webhook_with_token(id, token)
/// .expect("valid webhook");
///
/// let image = serenity::utils::read_image("./webhook_img.png")
/// .expect("Error reading image");
///
/// let _ = webhook.edit(None, Some(&image)).expect("Error editing");
/// ```
///
/// [`http::edit_webhook`]: ../../http/fn.edit_webhook.html
/// [`http::edit_webhook_with_token`]: ../../http/fn.edit_webhook_with_token.html
pub fn edit(&mut self, name: Option<&str>, avatar: Option<&str>) -> Result<()> {
if name.is_none() && avatar.is_none() {
return Ok(());
}
let mut map = Map::new();
if let Some(avatar) = avatar {
map.insert(
"avatar".to_string(),
if avatar.is_empty() {
Value::Null
} else {
Value::String(avatar.to_string())
},
);
}
if let Some(name) = name {
map.insert("name".to_string(), Value::String(name.to_string()));
}
match http::edit_webhook_with_token(self.id.0, &self.token, &map) {
Ok(replacement) => {
mem::replace(self, replacement);
Ok(())
},
Err(why) => Err(why),
}
}
/// Executes a webhook with the fields set via the given builder.
///
/// The builder provides a method of setting only the fields you need,
/// without needing to pass a long set of arguments.
///
/// # Examples
///
/// Execute a webhook with message content of `test`:
///
/// ```rust,no_run
/// use serenity::http;
///
/// let id = 245037420704169985;
/// let token = "ig5AO-wdVWpCBtUUMxmgsWryqgsW3DChbKYOINftJ4DCrUbnkedoYZD0VOH1QLr-S3sV";
///
/// let mut webhook = http::get_webhook_with_token(id, token)
/// .expect("valid webhook");
///
/// let _ = webhook.execute(false, |w| w.content("test")).expect("Error executing");
/// ```
///
/// Execute a webhook with message content of `test`, overriding the
/// username to `serenity`, and sending an embed:
///
/// ```rust,no_run
/// use serenity::http;
/// use serenity::model::channel::Embed;
///
/// let id = 245037420704169985;
/// let token = "ig5AO-wdVWpCBtUUMxmgsWryqgsW3DChbKYOINftJ4DCrUbnkedoYZD0VOH1QLr-S3sV";
///
/// let mut webhook = http::get_webhook_with_token(id, token)
/// .expect("valid webhook");
///
/// let embed = Embed::fake(|e| e
/// .title("Rust's website")
/// .description("Rust is a systems programming language that runs
/// blazingly fast, prevents segfaults, and guarantees
/// thread safety.")
/// .url("https://rust-lang.org"));
///
/// let _ = webhook.execute(false, |w| w
/// .content("test")
/// .username("serenity")
/// .embeds(vec![embed]))
/// .expect("Error executing");
/// ```
#[inline]
pub fn execute<F: FnOnce(ExecuteWebhook) -> ExecuteWebhook>(&self,
wait: bool,
f: F)
-> Result<Option<Message>> {
let map = utils::vecmap_to_json_map(f(ExecuteWebhook::default()).0);
http::execute_webhook(self.id.0, &self.token, wait, &map)
}
/// Retrieves the latest information about the webhook, editing the
/// webhook in-place.
///
/// As this calls the [`http::get_webhook_with_token`] function,
/// authentication is not required.
///
/// [`http::get_webhook_with_token`]: ../../http/fn.get_webhook_with_token.html
pub fn refresh(&mut self) -> Result<()> {
match http::get_webhook_with_token(self.id.0, &self.token) {
Ok(replacement) => {
let _ = mem::replace(self, replacement);
Ok(())
},
Err(why) => Err(why),
}
}
}
#[cfg(feature = "model")]
impl WebhookId {
/// Retrieves the webhook by the Id.
///
/// **Note**: Requires the [Manage Webhooks] permission.
///
/// [Manage Webhooks]: ../../model/permissions/struct.Permissions.html#associatedconstant.MANAGE_WEBHOOKS
#[inline]
#[deprecated(since = "0.5.8", note = "Use the `to_webhook`-method instead.")]
pub fn get(self) -> Result<Webhook> { self.to_webhook() }
/// Requests [`Webhook`] over REST API.
///
/// **Note**: Requires the [Manage Webhooks] permission.<|fim▁hole|> /// [Manage Webhooks]: ../../model/permissions/struct.Permissions.html#associatedconstant.MANAGE_WEBHOOKS
#[inline]
pub fn to_webhook(self) -> Result<Webhook> { http::get_webhook(self.0) }
}<|fim▁end|> | ///
/// [`Webhook`]: struct.Webhook.html |
<|file_name|>jquery.inputmask.regex.extensions.min.js<|end_file_name|><|fim▁begin|>/*!
* jquery.inputmask.regex.extensions.min.js
* http://github.com/RobinHerbots/jquery.inputmask
* Copyright (c) 2010 - 2015 Robin Herbots
* Licensed under the MIT license (http://www.opensource.org/licenses/mit-license.php)
* Version: 3.1.62
*/
!function (a) {
"function" == typeof define && define.amd ? define(["jquery", "./jquery.inputmask"], a) : a(jQuery)
}(function (a) {
return a.extend(a.inputmask.defaults.aliases, {
Regex: {
mask: "r",
greedy: !1,
repeat: "*",
regex: null,
regexTokens: null,
tokenizer: /\[\^?]?(?:[^\\\]]+|\\[\S\s]?)*]?|\\(?:0(?:[0-3][0-7]{0,2}|[4-7][0-7]?)?|[1-9][0-9]*|x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|c[A-Za-z]|[\S\s]?)|\((?:\?[:=!]?)?|(?:[?*+]|\{[0-9]+(?:,[0-9]*)?\})\??|[^.?*+^${[()|\\]+|./g,
quantifierFilter: /[0-9]+[^,]/,
isComplete: function (a, b) {
return new RegExp(b.regex).test(a.join(""))
},
definitions: {
r: {
validator: function (b, c, d, e, f) {
function g(a, b) {
this.matches = [], this.isGroup = a || !1, this.isQuantifier = b || !1, this.quantifier = {
min: 1,
max: 1
}, this.repeaterPart = void 0
}
function h() {
var a, b, c = new g, d = [];
for (f.regexTokens = []; a = f.tokenizer.exec(f.regex);)switch (b = a[0], b.charAt(0)) {
case"(":
d.push(new g(!0));
break;
case")":
var e = d.pop();
d.length > 0 ? d[d.length - 1].matches.push(e) : c.matches.push(e);
break;
case"{":
case"+":
case"*":
var h = new g(!1, !0);
b = b.replace(/[{}]/g, "");
var i = b.split(","), j = isNaN(i[0]) ? i[0] : parseInt(i[0]), k = 1 == i.length ? j : isNaN(i[1]) ? i[1] : parseInt(i[1]);
if (h.quantifier = {min: j, max: k}, d.length > 0) {
var l = d[d.length - 1].matches;
if (a = l.pop(), !a.isGroup) {
var e = new g(!0);
e.matches.push(a), a = e
}
l.push(a), l.push(h)
} else {
if (a = c.matches.pop(), !a.isGroup) {
var e = new g(!0);
e.matches.push(a), a = e
}
c.matches.push(a), c.matches.push(h)
}
break;
default:
d.length > 0 ? d[d.length - 1].matches.push(b) : c.matches.push(b)
}
c.matches.length > 0 && f.regexTokens.push(c)
}
function i(b, c) {
var d = !1;
c && (k += "(", m++);
for (var e = 0; e < b.matches.length; e++) {
var f = b.matches[e];
if (1 == f.isGroup) {
d = i(f, !0);
} else if (1 == f.isQuantifier) {
var g = a.inArray(f, b.matches), h = b.matches[g - 1], j = k;
if (isNaN(f.quantifier.max)) {
for (; f.repeaterPart && f.repeaterPart != k && f.repeaterPart.length > k.length && !(d = i(h, !0)););
d = d || i(h, !0), d && (f.repeaterPart = k), k = j + f.quantifier.max
} else {
for (var l = 0, o = f.quantifier.max - 1; o > l && !(d = i(h, !0)); l++);
k = j + "{" + f.quantifier.min + "," + f.quantifier.max + "}"
}
} else if (void 0 != f.matches) {
for (var p = 0; p < f.length && !(d = i(f[p], c)); p++);
} else {
var q;
if ("[" == f.charAt(0)) {
q = k, q += f;
for (var r = 0; m > r; r++)q += ")";
var s = new RegExp("^(" + q + ")$");
d = s.test(n)
} else {
for (var t = 0, u = f.length; u > t; t++)if ("\\" != f.charAt(t)) {
q = k, q += f.substr(0, t + 1), q = q.replace(/\|$/, "");
for (var r = 0; m > r; r++)q += ")";
var s = new RegExp("^(" + q + ")$");
if (d = s.test(n)) {
break
}
}
}
k += f
}
if (d) {
break
}
}
return c && (k += ")", m--), d
}
null == f.regexTokens && h();
var j = c.buffer.slice(), k = "", l = !1, m = 0;
j.splice(d, 0, b);
for (var n = j.join(""), o = 0; o < f.regexTokens.length; o++) {
var g = f.regexTokens[o];
if (l = i(g, g.isGroup)) {
break<|fim▁hole|> }
}
}
}), a.fn.inputmask
});<|fim▁end|> | }
}
return l
}, cardinality: 1 |
<|file_name|>course_search.py<|end_file_name|><|fim▁begin|>from bs4 import BeautifulSoup
from models.course import Course
import requests
default_postdata = {
'CAMPUS': '0',
'TERMYEAR': '201709',
'CORE_CODE': 'AR%',
'subj_code': '',
'CRSE_NUMBER': '',
'crn': '',
'open_only': 'on',
'BTN_PRESSED': 'FIND class sections',
}
url = 'https://banweb.banner.vt.edu/ssb/prod/HZSKVTSC.P_ProcRequest'
def _get_open_courses(data):
req = requests.post(url, data=data)
soup = BeautifulSoup(req.content, 'html5lib')
rows = soup.select('table.dataentrytable tbody tr')
open_courses = list()
# The first row is the header row with the column labels
# If there's only one row, the rest of the table is empty, so there are no results
if len(rows) > 1:
rows = rows[1:]
for row in rows:
cells = row.select('td')
cells_text = list(map(lambda x: x.get_text(), cells))
crn = cells_text[0].strip()
label = cells_text[1].strip()
title = cells_text[2].strip()
professor = cells_text[6].strip()
open_courses.append(Course(crn, label, title, professor))
return open_courses
def get_open_courses_by_course(subj, num, semester):
""" Get the open courses that match the course subject and number passed in
:param subj: The subject abbreviation
:param num: The course number<|fim▁hole|> """
postdata = default_postdata.copy()
postdata['subj_code'] = subj.strip().upper()
postdata['CRSE_NUMBER'] = num.strip()
postdata['TERMYEAR'] = semester
return _get_open_courses(postdata)
def get_open_courses_by_crn(crn, semester):
""" Get the open course that matches the crn passed in
:param crn: The course request number of the course section
:return: Returns a list of the open courses that are matched
"""
postdata = default_postdata.copy()
postdata['crn'] = crn.strip()
postdata['TERMYEAR'] = semester
return _get_open_courses(postdata)<|fim▁end|> | :return: Returns a list of the open courses that are matched |
<|file_name|>order.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
from openerp.osv import fields, osv
from openerp.tools.translate import _
class sale_order_line(osv.Model):
"""
OpenERP Model : sale_order_line
"""
_inherit = 'sale.order.line'
_columns = {
'att_bro': fields.boolean('Attach Brochure', required=False, help="""If you check this
option, the first attachment related to the product_id marked as brochure will be printed<|fim▁hole|> """
OpenERP Model : sale_order_line
"""
_inherit = 'sale.order'
def print_with_attachment(self, cr, user, ids, context={}):
for o in self.browse(cr, user, ids, context):
for ol in o.order_line:
if ol.att_bro:
print "Im Here i will go to print %s " % ol.name
return True
def __get_company_object(self, cr, uid):
user = self.pool.get('res.users').browse(cr, uid, uid)
print user
if not user.company_id:
raise except_osv(_('ERROR !'), _(
'There is no company configured for this user'))
return user.company_id
def _get_report_name(self, cr, uid, context):
report = self.__get_company_object(cr, uid).sale_report_id
if not report:
rep_id = self.pool.get("ir.actions.report.xml").search(
cr, uid, [('model', '=', 'sale.order'), ], order="id")[0]
report = self.pool.get(
"ir.actions.report.xml").browse(cr, uid, rep_id)
return report.report_name
def print_quotation(self, cr, uid, ids, context=None):
pq = super(sale_order, self).print_quotation(cr,uid,ids, context)
return {'type': 'ir.actions.report.xml', 'report_name': self._get_report_name(cr, uid,
context), 'datas': pq['datas'], 'nodestroy': True}<|fim▁end|> | as extra info with sale order"""),
}
class sale_order(osv.Model): |
<|file_name|>Actions.java<|end_file_name|><|fim▁begin|>/**
*A PUBLIC CLASS FOR ACTIONS.JAVA
*/
class Actions{
public Fonts font = new Fonts();
/**
<|fim▁hole|> */
public void fonT(){
font.setVisible(true); //setting the visible is true
font.pack(); //pack the panel
//making an action for ok button, so we can change the font
font.getOkjb().addActionListener(new ActionListener(){
public void actionPerformed(ActionEvent ae){
n.getTextArea().setFont(font.font());
//after we chose the font, then the JDialog will be closed
font.setVisible(false);
}
});
//making an action for cancel button, so we can return to the old font.
font.getCajb().addActionListener(new ActionListener(){
public void actionPerformed(ActionEvent ae){
//after we cancel the, then the JDialog will be closed
font.setVisible(false);
}
});
}
//for wraping the line & wraping the style word
public void lineWraP(){
if(n.getLineWrap().isSelected()){
/**
*make the line wrap & wrap style word is true
*when the line wrap is selected
*/
n.getTextArea().setLineWrap(true);
n.getTextArea().setWrapStyleWord(true);
}
else{
/**
*make the line wrap & wrap style word is false
*when the line wrap isn't selected
*/
n.getTextArea().setLineWrap(false);
n.getTextArea().setWrapStyleWord(false);
}
}
}<|fim▁end|> | *@see FONTS.JAVA
*this is a font class which is for changing the font, style & size
|
<|file_name|>restfulApi.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
from flask import Flask, jsonify, request
app = Flask(__name__)
from charge.chargeManager import ChargeManager
from data.dataProvider import DataProvider
@app.route('/')
def hello_world():
return jsonify(testPreMa(['棉花'],20))
@app.route('/result')
def get_result():
name = request.args.get('name').encode('utf-8')
print name<|fim▁hole|>
def testPreMa(nameArray,period):
for name in nameArray:
print 'preMa----------------%s--%d周期-------------------' % (name, period)
dp = DataProvider(name=name)
p_list = dp.getData(['date', 'close'])
cm = ChargeManager(p_list, period, nodeStat=False)
cm.startCharge('preMa')
return cm.resultJson()
if __name__ == '__main__':
app.run(host='localhost')<|fim▁end|> | return jsonify(testPreMa([name], 20)) |
<|file_name|>oobe_browsertest.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "base/command_line.h"
#include "base/path_service.h"
#include "chrome/browser/chrome_browser_main.h"
#include "chrome/browser/chrome_browser_main_extra_parts.h"
#include "chrome/browser/chrome_content_browser_client.h"
#include "chrome/browser/chromeos/cros/cros_in_process_browser_test.h"
#include "chrome/browser/chromeos/login/existing_user_controller.h"
#include "chrome/browser/chromeos/login/webui_login_display.h"
#include "chrome/browser/chromeos/login/wizard_controller.h"
#include "chrome/browser/ui/browser.h"
#include "chrome/common/chrome_notification_types.h"
#include "chrome/common/chrome_paths.h"
#include "chrome/common/chrome_switches.h"
#include "chrome/test/base/in_process_browser_test.h"
#include "chrome/test/base/interactive_test_utils.h"
#include "chrome/test/base/ui_test_utils.h"
#include "chromeos/chromeos_switches.h"
#include "content/public/browser/notification_observer.h"
#include "content/public/browser/notification_registrar.h"
#include "content/public/browser/notification_service.h"
#include "content/public/test/test_utils.h"
#include "google_apis/gaia/gaia_switches.h"
#include "net/test/embedded_test_server/embedded_test_server.h"
#include "net/test/embedded_test_server/http_request.h"
#include "net/test/embedded_test_server/http_response.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
using namespace net::test_server;
namespace {
// Used to add an observer to NotificationService after it's created.
class TestBrowserMainExtraParts
: public ChromeBrowserMainExtraParts,
public content::NotificationObserver {
public:
TestBrowserMainExtraParts()
: webui_visible_(false),
browsing_data_removed_(false),
signin_screen_shown_(false) {}
virtual ~TestBrowserMainExtraParts() {}
// ChromeBrowserMainExtraParts implementation.
virtual void PreEarlyInitialization() OVERRIDE {
registrar_.Add(this, chrome::NOTIFICATION_LOGIN_WEBUI_VISIBLE,
content::NotificationService::AllSources());
registrar_.Add(this, chrome::NOTIFICATION_SESSION_STARTED,
content::NotificationService::AllSources());
registrar_.Add(this, chrome::NOTIFICATION_BROWSING_DATA_REMOVED,
content::NotificationService::AllSources());
}
void set_quit_task(const base::Closure& quit_task) { quit_task_ = quit_task; }
void set_gaia_url(const GURL& url) { gaia_url_ = url; }
private:
// Overridden from content::NotificationObserver:
virtual void Observe(int type,
const content::NotificationSource& source,
const content::NotificationDetails& details) OVERRIDE {
if (type == chrome::NOTIFICATION_LOGIN_WEBUI_VISIBLE) {
LOG(INFO) << "NOTIFICATION_LOGIN_WEBUI_VISIBLE";
webui_visible_ = true;
if (browsing_data_removed_ && !signin_screen_shown_) {
signin_screen_shown_ = true;
ShowSigninScreen();
}
} else if (type == chrome::NOTIFICATION_BROWSING_DATA_REMOVED) {
LOG(INFO) << "chrome::NOTIFICATION_BROWSING_DATA_REMOVED";
browsing_data_removed_ = true;
if (webui_visible_ && !signin_screen_shown_) {
signin_screen_shown_ = true;
ShowSigninScreen();
}
} else if (type == chrome::NOTIFICATION_SESSION_STARTED) {
LOG(INFO) << "chrome::NOTIFICATION_SESSION_STARTED";
quit_task_.Run();
} else {
NOTREACHED();
}
}
void ShowSigninScreen() {
chromeos::ExistingUserController* controller =
chromeos::ExistingUserController::current_controller();
CHECK(controller);
chromeos::WebUILoginDisplay* webui_login_display =
static_cast<chromeos::WebUILoginDisplay*>(
controller->login_display());
CHECK(webui_login_display);
webui_login_display->SetGaiaUrlForTesting(gaia_url_);
webui_login_display->ShowSigninScreenForCreds("username", "password");
// TODO(glotov): mock GAIA server (test_server_) should support
// username/password configuration.
}
bool webui_visible_, browsing_data_removed_, signin_screen_shown_;
content::NotificationRegistrar registrar_;
base::Closure quit_task_;
GURL gaia_url_;
DISALLOW_COPY_AND_ASSIGN(TestBrowserMainExtraParts);
};
class TestContentBrowserClient : public chrome::ChromeContentBrowserClient {
public:
TestContentBrowserClient() {}
virtual ~TestContentBrowserClient() {}
virtual content::BrowserMainParts* CreateBrowserMainParts(
const content::MainFunctionParams& parameters) OVERRIDE {
ChromeBrowserMainParts* main_parts = static_cast<ChromeBrowserMainParts*>(
ChromeContentBrowserClient::CreateBrowserMainParts(parameters));
browser_main_extra_parts_ = new TestBrowserMainExtraParts();
main_parts->AddParts(browser_main_extra_parts_);
return main_parts;
}
TestBrowserMainExtraParts* browser_main_extra_parts_;
private:
DISALLOW_COPY_AND_ASSIGN(TestContentBrowserClient);
};
const base::FilePath kServiceLogin("chromeos/service_login.html");
class OobeTest : public chromeos::CrosInProcessBrowserTest {
protected:
virtual void SetUpCommandLine(CommandLine* command_line) OVERRIDE {
command_line->AppendSwitch(chromeos::switches::kLoginManager);
command_line->AppendSwitch(chromeos::switches::kForceLoginManagerInTests);
command_line->AppendSwitch(
chromeos::switches::kDisableChromeCaptivePortalDetector);
command_line->AppendSwitchASCII(chromeos::switches::kLoginProfile, "user");
command_line->AppendSwitchASCII(
chromeos::switches::kAuthExtensionPath, "gaia_auth");
}
virtual void SetUpInProcessBrowserTestFixture() OVERRIDE {
content_browser_client_.reset(new TestContentBrowserClient());
original_content_browser_client_ = content::SetBrowserClientForTesting(
content_browser_client_.get());
base::FilePath test_data_dir;
PathService::Get(chrome::DIR_TEST_DATA, &test_data_dir);
CHECK(file_util::ReadFileToString(test_data_dir.Append(kServiceLogin),
&service_login_response_));
}
virtual void SetUpOnMainThread() OVERRIDE {
test_server_ = new EmbeddedTestServer(
content::BrowserThread::GetMessageLoopProxyForThread(
content::BrowserThread::IO));
CHECK(test_server_->InitializeAndWaitUntilReady());
test_server_->RegisterRequestHandler(
base::Bind(&OobeTest::HandleRequest, base::Unretained(this)));
LOG(INFO) << "Set up http server at " << test_server_->base_url();
CHECK(test_server_->port() >= 8040 && test_server_->port() < 8045)
<< "Current manifest_test.json for gaia_login restrictions "
<< "does not allow this port";
const GURL gaia_url("http://localhost:" + test_server_->base_url().port());
content_browser_client_->browser_main_extra_parts_->set_gaia_url(gaia_url);
}
virtual void CleanUpOnMainThread() OVERRIDE {
LOG(INFO) << "Stopping the http server.";
EXPECT_TRUE(test_server_->ShutdownAndWaitUntilComplete());
delete test_server_; // Destructor wants UI thread.
}<|fim▁hole|> scoped_ptr<HttpResponse> HandleRequest(const HttpRequest& request) {
GURL url = test_server_->GetURL(request.relative_url);
LOG(INFO) << "Http request: " << url.spec();
scoped_ptr<HttpResponse> http_response(new HttpResponse());
if (url.path() == "/ServiceLogin") {
http_response->set_code(net::test_server::SUCCESS);
http_response->set_content(service_login_response_);
http_response->set_content_type("text/html");
} else if (url.path() == "/ServiceLoginAuth") {
LOG(INFO) << "Params: " << request.content;
static const char kContinueParam[] = "continue=";
int continue_arg_begin = request.content.find(kContinueParam) +
arraysize(kContinueParam) - 1;
int continue_arg_end = request.content.find("&", continue_arg_begin);
const std::string continue_url = request.content.substr(
continue_arg_begin, continue_arg_end - continue_arg_begin);
http_response->set_code(net::test_server::SUCCESS);
const std::string redirect_js =
"document.location.href = unescape('" + continue_url + "');";
http_response->set_content(
"<HTML><HEAD><SCRIPT>\n" + redirect_js + "\n</SCRIPT></HEAD></HTML>");
http_response->set_content_type("text/html");
} else {
NOTREACHED() << url.path();
}
return http_response.Pass();
}
scoped_ptr<TestContentBrowserClient> content_browser_client_;
content::ContentBrowserClient* original_content_browser_client_;
std::string service_login_response_;
EmbeddedTestServer* test_server_; // cant use scoped_ptr because destructor
// needs UI thread.
};
IN_PROC_BROWSER_TEST_F(OobeTest, NewUser) {
chromeos::WizardController::SkipPostLoginScreensForTesting();
chromeos::WizardController* wizard_controller =
chromeos::WizardController::default_controller();
CHECK(wizard_controller);
wizard_controller->SkipToLoginForTesting();
scoped_refptr<content::MessageLoopRunner> runner =
new content::MessageLoopRunner;
content_browser_client_->browser_main_extra_parts_->set_quit_task(
runner->QuitClosure());
runner->Run();
}
}<|fim▁end|> | |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin
<|fim▁hole|># Register your models here.
from Aplicacio.models import Movie, Character, Location, Team, Power
admin.site.register(Movie)
admin.site.register(Character)
admin.site.register(Location)
admin.site.register(Team)
admin.site.register(Power)<|fim▁end|> | |
<|file_name|>oojs-ui-windows.js<|end_file_name|><|fim▁begin|>/*!
* OOUI v0.40.3
* https://www.mediawiki.org/wiki/OOUI
*
* Copyright 2011–2020 OOUI Team and other contributors.
* Released under the MIT license
* http://oojs.mit-license.org
*
* Date: 2020-09-02T15:42:49Z
*/
( function ( OO ) {
'use strict';
/**
* An ActionWidget is a {@link OO.ui.ButtonWidget button widget} that executes an action.
* Action widgets are used with OO.ui.ActionSet, which manages the behavior and availability
* of the actions.
*
* Both actions and action sets are primarily used with {@link OO.ui.Dialog Dialogs}.
* Please see the [OOUI documentation on MediaWiki] [1] for more information
* and examples.
*
* [1]: https://www.mediawiki.org/wiki/OOUI/Windows/Process_Dialogs#Action_sets
*
* @class
* @extends OO.ui.ButtonWidget
* @mixins OO.ui.mixin.PendingElement
*
* @constructor
* @param {Object} [config] Configuration options
* @cfg {string} [action] Symbolic name of the action (e.g., ‘continue’ or ‘cancel’).
* @cfg {string[]} [modes] Symbolic names of the modes (e.g., ‘edit’ or ‘read’) in which the action
* should be made available. See the action set's {@link OO.ui.ActionSet#setMode setMode} method
* for more information about setting modes.
* @cfg {boolean} [framed=false] Render the action button with a frame
*/
OO.ui.ActionWidget = function OoUiActionWidget( config ) {
// Configuration initialization
config = $.extend( { framed: false }, config );
// Parent constructor
OO.ui.ActionWidget.super.call( this, config );
// Mixin constructors
OO.ui.mixin.PendingElement.call( this, config );
// Properties
this.action = config.action || '';
this.modes = config.modes || [];
this.width = 0;
this.height = 0;
// Initialization
this.$element.addClass( 'oo-ui-actionWidget' );
};
/* Setup */
OO.inheritClass( OO.ui.ActionWidget, OO.ui.ButtonWidget );
OO.mixinClass( OO.ui.ActionWidget, OO.ui.mixin.PendingElement );
/* Methods */
/**
* Check if the action is configured to be available in the specified `mode`.
*
* @param {string} mode Name of mode
* @return {boolean} The action is configured with the mode
*/
OO.ui.ActionWidget.prototype.hasMode = function ( mode ) {
return this.modes.indexOf( mode ) !== -1;
};
/**
* Get the symbolic name of the action (e.g., ‘continue’ or ‘cancel’).
*
* @return {string}
*/
OO.ui.ActionWidget.prototype.getAction = function () {
return this.action;
};
/**
* Get the symbolic name of the mode or modes for which the action is configured to be available.
*
* The current mode is set with the action set's {@link OO.ui.ActionSet#setMode setMode} method.
* Only actions that are configured to be available in the current mode will be visible.
* All other actions are hidden.
*
* @return {string[]}
*/
OO.ui.ActionWidget.prototype.getModes = function () {
return this.modes.slice();
};
/* eslint-disable no-unused-vars */
/**
* ActionSets manage the behavior of the {@link OO.ui.ActionWidget action widgets} that
* comprise them.
* Actions can be made available for specific contexts (modes) and circumstances
* (abilities). Action sets are primarily used with {@link OO.ui.Dialog Dialogs}.
*
* ActionSets contain two types of actions:
*
* - Special: Special actions are the first visible actions with special flags, such as 'safe' and
* 'primary', the default special flags. Additional special flags can be configured in subclasses
* with the static #specialFlags property.
* - Other: Other actions include all non-special visible actions.
*
* See the [OOUI documentation on MediaWiki][1] for more information.
*
* @example
* // Example: An action set used in a process dialog
* function MyProcessDialog( config ) {
* MyProcessDialog.super.call( this, config );
* }
* OO.inheritClass( MyProcessDialog, OO.ui.ProcessDialog );
* MyProcessDialog.static.title = 'An action set in a process dialog';
* MyProcessDialog.static.name = 'myProcessDialog';
* // An action set that uses modes ('edit' and 'help' mode, in this example).
* MyProcessDialog.static.actions = [
* {
* action: 'continue',
* modes: 'edit',
* label: 'Continue',
* flags: [ 'primary', 'progressive' ]
* },
* { action: 'help', modes: 'edit', label: 'Help' },
* { modes: 'edit', label: 'Cancel', flags: 'safe' },
* { action: 'back', modes: 'help', label: 'Back', flags: 'safe' }
* ];
*
* MyProcessDialog.prototype.initialize = function () {
* MyProcessDialog.super.prototype.initialize.apply( this, arguments );
* this.panel1 = new OO.ui.PanelLayout( { padded: true, expanded: false } );
* this.panel1.$element.append( '<p>This dialog uses an action set (continue, help, ' +
* 'cancel, back) configured with modes. This is edit mode. Click \'help\' to see ' +
* 'help mode.</p>' );
* this.panel2 = new OO.ui.PanelLayout( { padded: true, expanded: false } );
* this.panel2.$element.append( '<p>This is help mode. Only the \'back\' action widget ' +
* 'is configured to be visible here. Click \'back\' to return to \'edit\' mode.' +
* '</p>' );
* this.stackLayout = new OO.ui.StackLayout( {
* items: [ this.panel1, this.panel2 ]
* } );
* this.$body.append( this.stackLayout.$element );
* };
* MyProcessDialog.prototype.getSetupProcess = function ( data ) {
* return MyProcessDialog.super.prototype.getSetupProcess.call( this, data )
* .next( function () {
* this.actions.setMode( 'edit' );
* }, this );
* };
* MyProcessDialog.prototype.getActionProcess = function ( action ) {
* if ( action === 'help' ) {
* this.actions.setMode( 'help' );
* this.stackLayout.setItem( this.panel2 );
* } else if ( action === 'back' ) {
* this.actions.setMode( 'edit' );
* this.stackLayout.setItem( this.panel1 );
* } else if ( action === 'continue' ) {
* var dialog = this;
* return new OO.ui.Process( function () {
* dialog.close();
* } );
* }
* return MyProcessDialog.super.prototype.getActionProcess.call( this, action );
* };
* MyProcessDialog.prototype.getBodyHeight = function () {
* return this.panel1.$element.outerHeight( true );
* };
* var windowManager = new OO.ui.WindowManager();
* $( document.body ).append( windowManager.$element );
* var dialog = new MyProcessDialog( {
* size: 'medium'
* } );
* windowManager.addWindows( [ dialog ] );
* windowManager.openWindow( dialog );
*
* [1]: https://www.mediawiki.org/wiki/OOUI/Windows/Process_Dialogs#Action_sets
*
* @abstract
* @class
* @mixins OO.EventEmitter
*
* @constructor
* @param {Object} [config] Configuration options
*/
OO.ui.ActionSet = function OoUiActionSet( config ) {
// Configuration initialization
config = config || {};
// Mixin constructors
OO.EventEmitter.call( this );
// Properties
this.list = [];
this.categories = {
actions: 'getAction',
flags: 'getFlags',
modes: 'getModes'
};
this.categorized = {};
this.special = {};
this.others = [];
this.organized = false;
this.changing = false;
this.changed = false;
};
/* eslint-enable no-unused-vars */
/* Setup */
OO.mixinClass( OO.ui.ActionSet, OO.EventEmitter );
/* Static Properties */
/**
* Symbolic name of the flags used to identify special actions. Special actions are displayed in the
* header of a {@link OO.ui.ProcessDialog process dialog}.
* See the [OOUI documentation on MediaWiki][2] for more information and examples.
*
* [2]:https://www.mediawiki.org/wiki/OOUI/Windows/Process_Dialogs
*
* @abstract
* @static
* @inheritable
* @property {string}
*/
OO.ui.ActionSet.static.specialFlags = [ 'safe', 'primary' ];
/* Events */
/**
* @event click
*
* A 'click' event is emitted when an action is clicked.
*
* @param {OO.ui.ActionWidget} action Action that was clicked
*/
/**
* @event add
*
* An 'add' event is emitted when actions are {@link #method-add added} to the action set.
*
* @param {OO.ui.ActionWidget[]} added Actions added
*/
/**
* @event remove
*
* A 'remove' event is emitted when actions are {@link #method-remove removed}
* or {@link #clear cleared}.
*
* @param {OO.ui.ActionWidget[]} added Actions removed
*/
/**
* @event change
*
* A 'change' event is emitted when actions are {@link #method-add added}, {@link #clear cleared},
* or {@link #method-remove removed} from the action set or when the {@link #setMode mode}
* is changed.
*
*/
/* Methods */
/**
* Handle action change events.
*
* @private
* @fires change
*/
OO.ui.ActionSet.prototype.onActionChange = function () {
this.organized = false;
if ( this.changing ) {
this.changed = true;
} else {
this.emit( 'change' );
}
};
/**
* Check if an action is one of the special actions.
*
* @param {OO.ui.ActionWidget} action Action to check
* @return {boolean} Action is special
*/
OO.ui.ActionSet.prototype.isSpecial = function ( action ) {
var flag;
for ( flag in this.special ) {
if ( action === this.special[ flag ] ) {
return true;
}
}
return false;
};
/**
* Get action widgets based on the specified filter: ‘actions’, ‘flags’, ‘modes’, ‘visible’,
* or ‘disabled’.
*
* @param {Object} [filters] Filters to use, omit to get all actions
* @param {string|string[]} [filters.actions] Actions that action widgets must have
* @param {string|string[]} [filters.flags] Flags that action widgets must have (e.g., 'safe')
* @param {string|string[]} [filters.modes] Modes that action widgets must have
* @param {boolean} [filters.visible] Action widgets must be visible
* @param {boolean} [filters.disabled] Action widgets must be disabled
* @return {OO.ui.ActionWidget[]} Action widgets matching all criteria
*/
OO.ui.ActionSet.prototype.get = function ( filters ) {
var i, len, list, category, actions, index, match, matches;
if ( filters ) {
this.organize();
// Collect category candidates
matches = [];
for ( category in this.categorized ) {
list = filters[ category ];
if ( list ) {
if ( !Array.isArray( list ) ) {
list = [ list ];
}
for ( i = 0, len = list.length; i < len; i++ ) {
actions = this.categorized[ category ][ list[ i ] ];
if ( Array.isArray( actions ) ) {
matches.push.apply( matches, actions );
}
}
}
}
// Remove by boolean filters
for ( i = 0, len = matches.length; i < len; i++ ) {
match = matches[ i ];
if (
( filters.visible !== undefined && match.isVisible() !== filters.visible ) ||
( filters.disabled !== undefined && match.isDisabled() !== filters.disabled )
) {
matches.splice( i, 1 );
len--;
i--;
}
}
// Remove duplicates
for ( i = 0, len = matches.length; i < len; i++ ) {
match = matches[ i ];
index = matches.lastIndexOf( match );
while ( index !== i ) {
matches.splice( index, 1 );
len--;
index = matches.lastIndexOf( match );
}
}
return matches;
}
return this.list.slice();
};
/**
* Get 'special' actions.
*
* Special actions are the first visible action widgets with special flags, such as 'safe' and
* 'primary'.
* Special flags can be configured in subclasses by changing the static #specialFlags property.
*
* @return {OO.ui.ActionWidget[]|null} 'Special' action widgets.
*/
OO.ui.ActionSet.prototype.getSpecial = function () {
this.organize();
return $.extend( {}, this.special );
};
/**
* Get 'other' actions.
*
* Other actions include all non-special visible action widgets.
*
* @return {OO.ui.ActionWidget[]} 'Other' action widgets
*/
OO.ui.ActionSet.prototype.getOthers = function () {
this.organize();
return this.others.slice();
};
/**
* Set the mode (e.g., ‘edit’ or ‘view’). Only {@link OO.ui.ActionWidget#modes actions} configured
* to be available in the specified mode will be made visible. All other actions will be hidden.
*
* @param {string} mode The mode. Only actions configured to be available in the specified
* mode will be made visible.
* @chainable
* @return {OO.ui.ActionSet} The widget, for chaining
* @fires toggle
* @fires change
*/
OO.ui.ActionSet.prototype.setMode = function ( mode ) {
var i, len, action;
this.changing = true;
for ( i = 0, len = this.list.length; i < len; i++ ) {
action = this.list[ i ];
action.toggle( action.hasMode( mode ) );
}
this.organized = false;
this.changing = false;
this.emit( 'change' );
return this;
};
/**
* Set the abilities of the specified actions.
*
* Action widgets that are configured with the specified actions will be enabled
* or disabled based on the boolean values specified in the `actions`
* parameter.
*
* @param {Object.<string,boolean>} actions A list keyed by action name with boolean
* values that indicate whether or not the action should be enabled.
* @chainable
* @return {OO.ui.ActionSet} The widget, for chaining
*/
OO.ui.ActionSet.prototype.setAbilities = function ( actions ) {
var i, len, action, item;
for ( i = 0, len = this.list.length; i < len; i++ ) {
item = this.list[ i ];
action = item.getAction();
if ( actions[ action ] !== undefined ) {
item.setDisabled( !actions[ action ] );
}
}
return this;
};
/**
* Executes a function once per action.
*
* When making changes to multiple actions, use this method instead of iterating over the actions
* manually to defer emitting a #change event until after all actions have been changed.
*
* @param {Object|null} filter Filters to use to determine which actions to iterate over; see #get
* @param {Function} callback Callback to run for each action; callback is invoked with three
* arguments: the action, the action's index, the list of actions being iterated over
* @chainable
* @return {OO.ui.ActionSet} The widget, for chaining
*/
OO.ui.ActionSet.prototype.forEach = function ( filter, callback ) {
this.changed = false;
this.changing = true;
this.get( filter ).forEach( callback );
this.changing = false;
if ( this.changed ) {
this.emit( 'change' );
}
return this;
};
/**
* Add action widgets to the action set.
*
* @param {OO.ui.ActionWidget[]} actions Action widgets to add
* @chainable
* @return {OO.ui.ActionSet} The widget, for chaining
* @fires add
* @fires change
*/
OO.ui.ActionSet.prototype.add = function ( actions ) {
var i, len, action;
this.changing = true;
for ( i = 0, len = actions.length; i < len; i++ ) {
action = actions[ i ];
action.connect( this, {
click: [ 'emit', 'click', action ],
toggle: [ 'onActionChange' ]
} );
this.list.push( action );
}
this.organized = false;
this.emit( 'add', actions );
this.changing = false;
this.emit( 'change' );
return this;
};
/**
* Remove action widgets from the set.
*
* To remove all actions, you may wish to use the #clear method instead.
*
* @param {OO.ui.ActionWidget[]} actions Action widgets to remove
* @chainable
* @return {OO.ui.ActionSet} The widget, for chaining
* @fires remove
* @fires change
*/
OO.ui.ActionSet.prototype.remove = function ( actions ) {
var i, len, index, action;
this.changing = true;
for ( i = 0, len = actions.length; i < len; i++ ) {
action = actions[ i ];
index = this.list.indexOf( action );
if ( index !== -1 ) {
action.disconnect( this );
this.list.splice( index, 1 );
}
}
this.organized = false;
this.emit( 'remove', actions );
this.changing = false;
this.emit( 'change' );
return this;
};
/**
* Remove all action widgets from the set.
*
* To remove only specified actions, use the {@link #method-remove remove} method instead.
*
* @chainable
* @return {OO.ui.ActionSet} The widget, for chaining
* @fires remove
* @fires change
*/
OO.ui.ActionSet.prototype.clear = function () {
var i, len, action,
removed = this.list.slice();
this.changing = true;
for ( i = 0, len = this.list.length; i < len; i++ ) {
action = this.list[ i ];
action.disconnect( this );
}
this.list = [];
this.organized = false;
this.emit( 'remove', removed );
this.changing = false;
this.emit( 'change' );
return this;
};
/**
* Organize actions.
*
* This is called whenever organized information is requested. It will only reorganize the actions
* if something has changed since the last time it ran.
*
* @private
* @chainable
* @return {OO.ui.ActionSet} The widget, for chaining
*/
OO.ui.ActionSet.prototype.organize = function () {
var i, iLen, j, jLen, flag, action, category, list, item, special,
specialFlags = this.constructor.static.specialFlags;
if ( !this.organized ) {
this.categorized = {};
this.special = {};
this.others = [];
for ( i = 0, iLen = this.list.length; i < iLen; i++ ) {
action = this.list[ i ];
if ( action.isVisible() ) {
// Populate categories
for ( category in this.categories ) {
if ( !this.categorized[ category ] ) {
this.categorized[ category ] = {};
}
list = action[ this.categories[ category ] ]();
if ( !Array.isArray( list ) ) {
list = [ list ];
}
for ( j = 0, jLen = list.length; j < jLen; j++ ) {
item = list[ j ];
if ( !this.categorized[ category ][ item ] ) {
this.categorized[ category ][ item ] = [];
}
this.categorized[ category ][ item ].push( action );
}
}
// Populate special/others
special = false;
for ( j = 0, jLen = specialFlags.length; j < jLen; j++ ) {
flag = specialFlags[ j ];
if ( !this.special[ flag ] && action.hasFlag( flag ) ) {
this.special[ flag ] = action;
special = true;
break;
}
}
if ( !special ) {
this.others.push( action );
}
}
}
this.organized = true;
}
return this;
};
/**
* Errors contain a required message (either a string or jQuery selection) that is used to describe
* what went wrong in a {@link OO.ui.Process process}. The error's #recoverable and #warning
* configurations are used to customize the appearance and functionality of the error interface.
*
* The basic error interface contains a formatted error message as well as two buttons: 'Dismiss'
* and 'Try again' (i.e., the error is 'recoverable' by default). If the error is not recoverable,
* the 'Try again' button will not be rendered and the widget that initiated the failed process will
* be disabled.
*
* If the error is a warning, the error interface will include a 'Dismiss' and a 'Continue' button,
* which will try the process again.
*
* For an example of error interfaces, please see the [OOUI documentation on MediaWiki][1].
*
* [1]: https://www.mediawiki.org/wiki/OOUI/Windows/Process_Dialogs#Processes_and_errors
*
* @class
*
* @constructor
* @param {string|jQuery} message Description of error
* @param {Object} [config] Configuration options
* @cfg {boolean} [recoverable=true] Error is recoverable.
* By default, errors are recoverable, and users can try the process again.
* @cfg {boolean} [warning=false] Error is a warning.
* If the error is a warning, the error interface will include a
* 'Dismiss' and a 'Continue' button. It is the responsibility of the developer to ensure that the
* warning is not triggered a second time if the user chooses to continue.
*/
OO.ui.Error = function OoUiError( message, config ) {
// Allow passing positional parameters inside the config object
if ( OO.isPlainObject( message ) && config === undefined ) {
config = message;
message = config.message;
}
// Configuration initialization
config = config || {};
// Properties
this.message = message instanceof $ ? message : String( message );
this.recoverable = config.recoverable === undefined || !!config.recoverable;
this.warning = !!config.warning;
};
/* Setup */
OO.initClass( OO.ui.Error );
/* Methods */
/**
* Check if the error is recoverable.
*
* If the error is recoverable, users are able to try the process again.
*
* @return {boolean} Error is recoverable
*/
OO.ui.Error.prototype.isRecoverable = function () {
return this.recoverable;
};
/**
* Check if the error is a warning.
*
* If the error is a warning, the error interface will include a 'Dismiss' and a 'Continue' button.
*
* @return {boolean} Error is warning
*/
OO.ui.Error.prototype.isWarning = function () {
return this.warning;
};
/**
* Get error message as DOM nodes.
*
* @return {jQuery} Error message in DOM nodes
*/
OO.ui.Error.prototype.getMessage = function () {
return this.message instanceof $ ?
this.message.clone() :
$( '<div>' ).text( this.message ).contents();
};
/**
* Get the error message text.
*
* @return {string} Error message
*/
OO.ui.Error.prototype.getMessageText = function () {
return this.message instanceof $ ? this.message.text() : this.message;
};
/**
* A Process is a list of steps that are called in sequence. The step can be a number, a
* promise (jQuery, native, or any other “thenable”), or a function:
*
* - **number**: the process will wait for the specified number of milliseconds before proceeding.
* - **promise**: the process will continue to the next step when the promise is successfully
* resolved or stop if the promise is rejected.
* - **function**: the process will execute the function. The process will stop if the function
* returns either a boolean `false` or a promise that is rejected; if the function returns a
* number, the process will wait for that number of milliseconds before proceeding.
*
* If the process fails, an {@link OO.ui.Error error} is generated. Depending on how the error is
* configured, users can dismiss the error and try the process again, or not. If a process is
* stopped, its remaining steps will not be performed.
*
* @class
*
* @constructor
* @param {number|jQuery.Promise|Function} step Number of milliseconds to wait before proceeding,
* promise that must be resolved before proceeding, or a function to execute. See #createStep for
* more information. See #createStep for more information.
* @param {Object} [context=null] Execution context of the function. The context is ignored if the
* step is a number or promise.
*/
OO.ui.Process = function ( step, context ) {
// Properties
this.steps = [];
// Initialization
if ( step !== undefined ) {
this.next( step, context );
}
};
/* Setup */
OO.initClass( OO.ui.Process );
/* Methods */
/**
* Start the process.
*
* @return {jQuery.Promise} Promise that is resolved when all steps have successfully completed.
* If any of the steps return a promise that is rejected or a boolean false, this promise is
* rejected and any remaining steps are not performed.
*/
OO.ui.Process.prototype.execute = function () {
var i, len, promise;
/**
* Continue execution.
*
* @ignore
* @param {Array} step A function and the context it should be called in
* @return {Function} Function that continues the process
*/
function proceed( step ) {
return function () {
// Execute step in the correct context
var deferred,
result = step.callback.call( step.context );
if ( result === false ) {
// Use rejected promise for boolean false results
return $.Deferred().reject( [] ).promise();
}
if ( typeof result === 'number' ) {
if ( result < 0 ) {
throw new Error( 'Cannot go back in time: flux capacitor is out of service' );
}
// Use a delayed promise for numbers, expecting them to be in milliseconds
deferred = $.Deferred();
setTimeout( deferred.resolve, result );
return deferred.promise();
}
if ( result instanceof OO.ui.Error ) {
// Use rejected promise for error
return $.Deferred().reject( [ result ] ).promise();
}
if ( Array.isArray( result ) && result.length && result[ 0 ] instanceof OO.ui.Error ) {
// Use rejected promise for list of errors
return $.Deferred().reject( result ).promise();
}
// Duck-type the object to see if it can produce a promise
if ( result && typeof result.then === 'function' ) {
// Use a promise generated from the result
return $.when( result ).promise();
}
// Use resolved promise for other results
return $.Deferred().resolve().promise();
};
}
if ( this.steps.length ) {
// Generate a chain reaction of promises
promise = proceed( this.steps[ 0 ] )();
for ( i = 1, len = this.steps.length; i < len; i++ ) {
promise = promise.then( proceed( this.steps[ i ] ) );
}
} else {
promise = $.Deferred().resolve().promise();
}
return promise;
};
/**
* Create a process step.
*
* @private
* @param {number|jQuery.Promise|Function} step
*
* - Number of milliseconds to wait before proceeding
* - Promise that must be resolved before proceeding
* - Function to execute
* - If the function returns a boolean false the process will stop
* - If the function returns a promise, the process will continue to the next
* step when the promise is resolved or stop if the promise is rejected
* - If the function returns a number, the process will wait for that number of
* milliseconds before proceeding
* @param {Object} [context=null] Execution context of the function. The context is
* ignored if the step is a number or promise.
* @return {Object} Step object, with `callback` and `context` properties
*/
OO.ui.Process.prototype.createStep = function ( step, context ) {
if ( typeof step === 'number' || typeof step.then === 'function' ) {
return {
callback: function () {
return step;
},
context: null
};
}
if ( typeof step === 'function' ) {
return {
callback: step,
context: context
};
}
throw new Error( 'Cannot create process step: number, promise or function expected' );
};
/**
* Add step to the beginning of the process.
*
* @inheritdoc #createStep
* @return {OO.ui.Process} this
* @chainable
*/
OO.ui.Process.prototype.first = function ( step, context ) {
this.steps.unshift( this.createStep( step, context ) );
return this;
};
/**
* Add step to the end of the process.
*
* @inheritdoc #createStep
* @return {OO.ui.Process} this
* @chainable
*/
OO.ui.Process.prototype.next = function ( step, context ) {
this.steps.push( this.createStep( step, context ) );
return this;
};
/**
* A window instance represents the life cycle for one single opening of a window
* until its closing.
*
* While OO.ui.WindowManager will reuse OO.ui.Window objects, each time a window is
* opened, a new lifecycle starts.
*
* For more information, please see the [OOUI documentation on MediaWiki] [1].
*
* [1]: https://www.mediawiki.org/wiki/OOUI/Windows
*
* @class
*
* @constructor
*/
OO.ui.WindowInstance = function OoUiWindowInstance() {
var deferreds = {
opening: $.Deferred(),
opened: $.Deferred(),
closing: $.Deferred(),
closed: $.Deferred()
};
/**
* @private
* @property {Object}
*/
this.deferreds = deferreds;
// Set these up as chained promises so that rejecting of
// an earlier stage automatically rejects the subsequent
// would-be stages as well.
/**
* @property {jQuery.Promise}
*/
this.opening = deferreds.opening.promise();
/**
* @property {jQuery.Promise}
*/
this.opened = this.opening.then( function () {
return deferreds.opened;
} );
/**
* @property {jQuery.Promise}
*/
this.closing = this.opened.then( function () {
return deferreds.closing;
} );
/**
* @property {jQuery.Promise}
*/
this.closed = this.closing.then( function () {
return deferreds.closed;
} );
};
/* Setup */
OO.initClass( OO.ui.WindowInstance );
/**
* Check if window is opening.
*
* @return {boolean} Window is opening
*/
OO.ui.WindowInstance.prototype.isOpening = function () {
return this.deferreds.opened.state() === 'pending';
};
/**
* Check if window is opened.
*
* @return {boolean} Window is opened
*/
OO.ui.WindowInstance.prototype.isOpened = function () {
return this.deferreds.opened.state() === 'resolved' &&
this.deferreds.closing.state() === 'pending';
};
/**
* Check if window is closing.
*
* @return {boolean} Window is closing
*/
OO.ui.WindowInstance.prototype.isClosing = function () {
return this.deferreds.closing.state() === 'resolved' &&
this.deferreds.closed.state() === 'pending';
};
/**
* Check if window is closed.
*
* @return {boolean} Window is closed
*/
OO.ui.WindowInstance.prototype.isClosed = function () {
return this.deferreds.closed.state() === 'resolved';
};
/**
* Window managers are used to open and close {@link OO.ui.Window windows} and control their
* presentation. Managed windows are mutually exclusive. If a new window is opened while a current
* window is opening or is opened, the current window will be closed and any on-going
* {@link OO.ui.Process process} will be cancelled. Windows
* themselves are persistent and—rather than being torn down when closed—can be repopulated with the
* pertinent data and reused.
*
* Over the lifecycle of a window, the window manager makes available three promises: `opening`,
* `opened`, and `closing`, which represent the primary stages of the cycle:
*
* **Opening**: the opening stage begins when the window manager’s #openWindow or a window’s
* {@link OO.ui.Window#open open} method is used, and the window manager begins to open the window.
*
* - an `opening` event is emitted with an `opening` promise
* - the #getSetupDelay method is called and the returned value is used to time a pause in execution
* before the window’s {@link OO.ui.Window#method-setup setup} method is called which executes
* OO.ui.Window#getSetupProcess.
* - a `setup` progress notification is emitted from the `opening` promise
* - the #getReadyDelay method is called the returned value is used to time a pause in execution
* before the window’s {@link OO.ui.Window#method-ready ready} method is called which executes
* OO.ui.Window#getReadyProcess.
* - a `ready` progress notification is emitted from the `opening` promise
* - the `opening` promise is resolved with an `opened` promise
*
* **Opened**: the window is now open.
*
* **Closing**: the closing stage begins when the window manager's #closeWindow or the
* window's {@link OO.ui.Window#close close} methods is used, and the window manager begins
* to close the window.
*
* - the `opened` promise is resolved with `closing` promise and a `closing` event is emitted
* - the #getHoldDelay method is called and the returned value is used to time a pause in execution
* before the window's {@link OO.ui.Window#getHoldProcess getHoldProcess} method is called on the
* window and its result executed
* - a `hold` progress notification is emitted from the `closing` promise
* - the #getTeardownDelay() method is called and the returned value is used to time a pause in
* execution before the window's {@link OO.ui.Window#getTeardownProcess getTeardownProcess} method
* is called on the window and its result executed
* - a `teardown` progress notification is emitted from the `closing` promise
* - the `closing` promise is resolved. The window is now closed
*
* See the [OOUI documentation on MediaWiki][1] for more information.
*
* [1]: https://www.mediawiki.org/wiki/OOUI/Windows/Window_managers
*
* @class
* @extends OO.ui.Element
* @mixins OO.EventEmitter
*
* @constructor
* @param {Object} [config] Configuration options
* @cfg {OO.Factory} [factory] Window factory to use for automatic instantiation
* Note that window classes that are instantiated with a factory must have
* a {@link OO.ui.Dialog#static-name static name} property that specifies a symbolic name.
* @cfg {boolean} [modal=true] Prevent interaction outside the dialog
*/
OO.ui.WindowManager = function OoUiWindowManager( config ) {
// Configuration initialization
config = config || {};
// Parent constructor
OO.ui.WindowManager.super.call( this, config );
// Mixin constructors
OO.EventEmitter.call( this );
// Properties
this.factory = config.factory;
this.modal = config.modal === undefined || !!config.modal;
this.windows = {};
// Deprecated placeholder promise given to compatOpening in openWindow()
// that is resolved in closeWindow().
this.compatOpened = null;
this.preparingToOpen = null;
this.preparingToClose = null;
this.currentWindow = null;
this.globalEvents = false;
this.$returnFocusTo = null;
this.$ariaHidden = null;
this.onWindowResizeTimeout = null;
this.onWindowResizeHandler = this.onWindowResize.bind( this );
this.afterWindowResizeHandler = this.afterWindowResize.bind( this );
// Initialization
this.$element
.addClass( 'oo-ui-windowManager' )
.toggleClass( 'oo-ui-windowManager-modal', this.modal );
if ( this.modal ) {
this.$element.attr( 'aria-hidden', true );
}
};
/* Setup */
OO.inheritClass( OO.ui.WindowManager, OO.ui.Element );
OO.mixinClass( OO.ui.WindowManager, OO.EventEmitter );
/* Events */
/**
* An 'opening' event is emitted when the window begins to be opened.
*
* @event opening
* @param {OO.ui.Window} win Window that's being opened
* @param {jQuery.Promise} opened A promise resolved with a value when the window is opened
* successfully. This promise also emits `setup` and `ready` notifications. When this promise is
* resolved, the first argument of the value is an 'closed' promise, the second argument is the
* opening data.
* @param {Object} data Window opening data
*/
/**
* A 'closing' event is emitted when the window begins to be closed.
*
* @event closing
* @param {OO.ui.Window} win Window that's being closed
* @param {jQuery.Promise} closed A promise resolved with a value when the window is closed
* successfully. This promise also emits `hold` and `teardown` notifications. When this promise is
* resolved, the first argument of its value is the closing data.
* @param {Object} data Window closing data
*/
/**
* A 'resize' event is emitted when a window is resized.
*
* @event resize
* @param {OO.ui.Window} win Window that was resized
*/
/* Static Properties */
/**
* Map of the symbolic name of each window size and its CSS properties.
*
* @static
* @inheritable
* @property {Object}
*/
OO.ui.WindowManager.static.sizes = {
small: {
width: 300
},
medium: {
width: 500
},
large: {
width: 700
},
larger: {
width: 900
},
full: {
// These can be non-numeric because they are never used in calculations
width: '100%',
height: '100%'
}
};
/**
* Symbolic name of the default window size.
*
* The default size is used if the window's requested size is not recognized.
*
* @static
* @inheritable
* @property {string}
*/
OO.ui.WindowManager.static.defaultSize = 'medium';
/* Methods */
/**
* Handle window resize events.
*
* @private
* @param {jQuery.Event} e Window resize event
*/
OO.ui.WindowManager.prototype.onWindowResize = function () {
clearTimeout( this.onWindowResizeTimeout );
this.onWindowResizeTimeout = setTimeout( this.afterWindowResizeHandler, 200 );
};
/**
* Handle window resize events.
*
* @private
* @param {jQuery.Event} e Window resize event
*/
OO.ui.WindowManager.prototype.afterWindowResize = function () {
var currentFocusedElement = document.activeElement;
if ( this.currentWindow ) {
this.updateWindowSize( this.currentWindow );
// Restore focus to the original element if it has changed.
// When a layout change is made on resize inputs lose focus
// on Android (Chrome and Firefox), see T162127.
if ( currentFocusedElement !== document.activeElement ) {
currentFocusedElement.focus();
}
}
};
/**
* Check if window is opening.
*
* @param {OO.ui.Window} win Window to check
* @return {boolean} Window is opening
*/
OO.ui.WindowManager.prototype.isOpening = function ( win ) {
return win === this.currentWindow && !!this.lifecycle &&
this.lifecycle.isOpening();
};
/**
* Check if window is closing.
*
* @param {OO.ui.Window} win Window to check
* @return {boolean} Window is closing
*/
OO.ui.WindowManager.prototype.isClosing = function ( win ) {
return win === this.currentWindow && !!this.lifecycle &&
this.lifecycle.isClosing();
};
/**
* Check if window is opened.
*
* @param {OO.ui.Window} win Window to check
* @return {boolean} Window is opened
*/
OO.ui.WindowManager.prototype.isOpened = function ( win ) {
return win === this.currentWindow && !!this.lifecycle &&
this.lifecycle.isOpened();
};
/**
* Check if a window is being managed.
*
* @param {OO.ui.Window} win Window to check
* @return {boolean} Window is being managed
*/
OO.ui.WindowManager.prototype.hasWindow = function ( win ) {
var name;
for ( name in this.windows ) {
if ( this.windows[ name ] === win ) {
return true;
}
}
return false;
};
/**
* Get the number of milliseconds to wait after opening begins before executing the ‘setup’ process.
*
* @param {OO.ui.Window} win Window being opened
* @param {Object} [data] Window opening data
* @return {number} Milliseconds to wait
*/
OO.ui.WindowManager.prototype.getSetupDelay = function () {
return 0;
};
/**
* Get the number of milliseconds to wait after setup has finished before executing the ‘ready’
* process.
*
* @param {OO.ui.Window} win Window being opened
* @param {Object} [data] Window opening data
* @return {number} Milliseconds to wait
*/
OO.ui.WindowManager.prototype.getReadyDelay = function () {
return this.modal ? OO.ui.theme.getDialogTransitionDuration() : 0;
};
/**
* Get the number of milliseconds to wait after closing has begun before executing the 'hold'
* process.
*
* @param {OO.ui.Window} win Window being closed
* @param {Object} [data] Window closing data
* @return {number} Milliseconds to wait
*/
OO.ui.WindowManager.prototype.getHoldDelay = function () {
return 0;
};
/**
* Get the number of milliseconds to wait after the ‘hold’ process has finished before
* executing the ‘teardown’ process.
*
* @param {OO.ui.Window} win Window being closed
* @param {Object} [data] Window closing data
* @return {number} Milliseconds to wait
*/
OO.ui.WindowManager.prototype.getTeardownDelay = function () {
return this.modal ? OO.ui.theme.getDialogTransitionDuration() : 0;
};
/**
* Get a window by its symbolic name.
*
* If the window is not yet instantiated and its symbolic name is recognized by a factory, it will
* be instantiated and added to the window manager automatically. Please see the [OOUI documentation
* on MediaWiki][3] for more information about using factories.
* [3]: https://www.mediawiki.org/wiki/OOUI/Windows/Window_managers
*
* @param {string} name Symbolic name of the window
* @return {jQuery.Promise} Promise resolved with matching window, or rejected with an OO.ui.Error
* @throws {Error} An error is thrown if the symbolic name is not recognized by the factory.
* @throws {Error} An error is thrown if the named window is not recognized as a managed window.
*/
OO.ui.WindowManager.prototype.getWindow = function ( name ) {
var deferred = $.Deferred(),
win = this.windows[ name ];
if ( !( win instanceof OO.ui.Window ) ) {
if ( this.factory ) {
if ( !this.factory.lookup( name ) ) {
deferred.reject( new OO.ui.Error(
'Cannot auto-instantiate window: symbolic name is unrecognized by the factory'
) );
} else {
win = this.factory.create( name );
this.addWindows( [ win ] );
deferred.resolve( win );
}
} else {
deferred.reject( new OO.ui.Error(
'Cannot get unmanaged window: symbolic name unrecognized as a managed window'
) );
}
} else {
deferred.resolve( win );
}
return deferred.promise();
};
/**
* Get current window.
*
* @return {OO.ui.Window|null} Currently opening/opened/closing window
*/
OO.ui.WindowManager.prototype.getCurrentWindow = function () {
return this.currentWindow;
};
/**
* Open a window.
*
* @param {OO.ui.Window|string} win Window object or symbolic name of window to open
* @param {Object} [data] Window opening data
* @param {jQuery|null} [data.$returnFocusTo] Element to which the window will return focus when
* closed. Defaults the current activeElement. If set to null, focus isn't changed on close.
* @param {OO.ui.WindowInstance} [lifecycle] Used internally
* @param {jQuery.Deferred} [compatOpening] Used internally
* @return {OO.ui.WindowInstance} A lifecycle object representing this particular
* opening of the window. For backwards-compatibility, then object is also a Thenable that is
* resolved when the window is done opening, with nested promise for when closing starts. This
* behaviour is deprecated and is not compatible with jQuery 3, see T163510.
* @fires opening
*/
OO.ui.WindowManager.prototype.openWindow = function ( win, data, lifecycle, compatOpening ) {
var error,
manager = this;
data = data || {};
// Internal parameter 'lifecycle' allows this method to always return
// a lifecycle even if the window still needs to be created
// asynchronously when 'win' is a string.
lifecycle = lifecycle || new OO.ui.WindowInstance();
compatOpening = compatOpening || $.Deferred();
// Turn lifecycle into a Thenable for backwards-compatibility with
// the deprecated nested-promise behaviour, see T163510.
[ 'state', 'always', 'catch', 'pipe', 'then', 'promise', 'progress', 'done', 'fail' ]
.forEach( function ( method ) {
lifecycle[ method ] = function () {
OO.ui.warnDeprecation(
'Using the return value of openWindow as a promise is deprecated. ' +
'Use .openWindow( ... ).opening.' + method + '( ... ) instead.'
);
return compatOpening[ method ].apply( this, arguments );
};
} );
// Argument handling
if ( typeof win === 'string' ) {
this.getWindow( win ).then(
function ( w ) {
manager.openWindow( w, data, lifecycle, compatOpening );
},
function ( err ) {
lifecycle.deferreds.opening.reject( err );
}
);
return lifecycle;
}
// Error handling
if ( !this.hasWindow( win ) ) {
error = 'Cannot open window: window is not attached to manager';
} else if ( this.lifecycle && this.lifecycle.isOpened() ) {
error = 'Cannot open window: another window is open';
} else if ( this.preparingToOpen || ( this.lifecycle && this.lifecycle.isOpening() ) ) {
error = 'Cannot open window: another window is opening';
}
if ( error ) {
compatOpening.reject( new OO.ui.Error( error ) );
lifecycle.deferreds.opening.reject( new OO.ui.Error( error ) );
return lifecycle;
}
// If a window is currently closing, wait for it to complete
this.preparingToOpen = $.when( this.lifecycle && this.lifecycle.closed );
// Ensure handlers get called after preparingToOpen is set
this.preparingToOpen.done( function () {
if ( manager.modal ) {
manager.toggleGlobalEvents( true );
manager.toggleAriaIsolation( true );
}
manager.$returnFocusTo = data.$returnFocusTo !== undefined ?
data.$returnFocusTo :
$( document.activeElement );
manager.currentWindow = win;
manager.lifecycle = lifecycle;
manager.preparingToOpen = null;
manager.emit( 'opening', win, compatOpening, data );
lifecycle.deferreds.opening.resolve( data );
setTimeout( function () {
manager.compatOpened = $.Deferred();
win.setup( data ).then( function () {
compatOpening.notify( { state: 'setup' } );
setTimeout( function () {
win.ready( data ).then( function () {
compatOpening.notify( { state: 'ready' } );
lifecycle.deferreds.opened.resolve( data );
compatOpening.resolve( manager.compatOpened.promise(), data );
manager.togglePreventIosScrolling( true );
}, function ( dataOrErr ) {
lifecycle.deferreds.opened.reject();
compatOpening.reject();
manager.closeWindow( win );
if ( dataOrErr instanceof Error ) {
setTimeout( function () {
throw dataOrErr;
} );
}
} );
}, manager.getReadyDelay() );
}, function ( dataOrErr ) {
lifecycle.deferreds.opened.reject();
compatOpening.reject();
manager.closeWindow( win );
if ( dataOrErr instanceof Error ) {
setTimeout( function () {
throw dataOrErr;
} );
}
} );
}, manager.getSetupDelay() );
} );
return lifecycle;
};
/**
* Close a window.
*
* @param {OO.ui.Window|string} win Window object or symbolic name of window to close
* @param {Object} [data] Window closing data
* @return {OO.ui.WindowInstance} A lifecycle object representing this particular
* opening of the window. For backwards-compatibility, the object is also a Thenable that is
* resolved when the window is done closing, see T163510.
* @fires closing
*/
OO.ui.WindowManager.prototype.closeWindow = function ( win, data ) {
var error,
manager = this,
compatClosing = $.Deferred(),
lifecycle = this.lifecycle,
compatOpened;
// Argument handling
if ( typeof win === 'string' ) {
win = this.windows[ win ];
} else if ( !this.hasWindow( win ) ) {
win = null;
}
// Error handling
if ( !lifecycle ) {
error = 'Cannot close window: no window is currently open';
} else if ( !win ) {
error = 'Cannot close window: window is not attached to manager';
} else if ( win !== this.currentWindow || this.lifecycle.isClosed() ) {
error = 'Cannot close window: window already closed with different data';
} else if ( this.preparingToClose || this.lifecycle.isClosing() ) {
error = 'Cannot close window: window already closing with different data';
}
if ( error ) {
// This function was called for the wrong window and we don't want to mess with the current
// window's state.
lifecycle = new OO.ui.WindowInstance();
// Pretend the window has been opened, so that we can pretend to fail to close it.
lifecycle.deferreds.opening.resolve( {} );
lifecycle.deferreds.opened.resolve( {} );
}
// Turn lifecycle into a Thenable for backwards-compatibility with
// the deprecated nested-promise behaviour, see T163510.
[ 'state', 'always', 'catch', 'pipe', 'then', 'promise', 'progress', 'done', 'fail' ]
.forEach( function ( method ) {
lifecycle[ method ] = function () {
OO.ui.warnDeprecation(
'Using the return value of closeWindow as a promise is deprecated. ' +
'Use .closeWindow( ... ).closed.' + method + '( ... ) instead.'
);
return compatClosing[ method ].apply( this, arguments );
};
} );
if ( error ) {
compatClosing.reject( new OO.ui.Error( error ) );
lifecycle.deferreds.closing.reject( new OO.ui.Error( error ) );
return lifecycle;
}
// If the window is currently opening, close it when it's done
this.preparingToClose = $.when( this.lifecycle.opened );
// Ensure handlers get called after preparingToClose is set
this.preparingToClose.always( function () {
manager.preparingToClose = null;
manager.emit( 'closing', win, compatClosing, data );
lifecycle.deferreds.closing.resolve( data );
compatOpened = manager.compatOpened;
manager.compatOpened = null;
compatOpened.resolve( compatClosing.promise(), data );
manager.togglePreventIosScrolling( false );
setTimeout( function () {
win.hold( data ).then( function () {
compatClosing.notify( { state: 'hold' } );
setTimeout( function () {
win.teardown( data ).then( function () {
compatClosing.notify( { state: 'teardown' } );
if ( manager.modal ) {
manager.toggleGlobalEvents( false );
manager.toggleAriaIsolation( false );
}
if ( manager.$returnFocusTo && manager.$returnFocusTo.length ) {
manager.$returnFocusTo[ 0 ].focus();
}
manager.currentWindow = null;
manager.lifecycle = null;
lifecycle.deferreds.closed.resolve( data );
compatClosing.resolve( data );
} );
}, manager.getTeardownDelay() );
} );
}, manager.getHoldDelay() );
} );
return lifecycle;
};
/**
* Add windows to the window manager.
*
* Windows can be added by reference, symbolic name, or explicitly defined symbolic names.
* See the [OOUI documentation on MediaWiki] [2] for examples.
* [2]: https://www.mediawiki.org/wiki/OOUI/Windows/Window_managers
*
* This function can be called in two manners:
*
* 1. `.addWindows( [ winA, winB, ... ] )` (where `winA`, `winB` are OO.ui.Window objects)
*
* This syntax registers windows under the symbolic names defined in their `.static.name`
* properties. For example, if `windowA.constructor.static.name` is `'nameA'`, calling
* `.openWindow( 'nameA' )` afterwards will open the window `windowA`. This syntax requires the
* static name to be set, otherwise an exception will be thrown.
*
* This is the recommended way, as it allows for an easier switch to using a window factory.
*
* 2. `.addWindows( { nameA: winA, nameB: winB, ... } )`
*
* This syntax registers windows under the explicitly given symbolic names. In this example,
* calling `.openWindow( 'nameA' )` afterwards will open the window `windowA`, regardless of what
* its `.static.name` is set to. The static name is not required to be set.
*
* This should only be used if you need to override the default symbolic names.
*
* Example:
*
* var windowManager = new OO.ui.WindowManager();
* $( document.body ).append( windowManager.$element );
*
* // Add a window under the default name: see OO.ui.MessageDialog.static.name
* windowManager.addWindows( [ new OO.ui.MessageDialog() ] );
* // Add a window under an explicit name
* windowManager.addWindows( { myMessageDialog: new OO.ui.MessageDialog() } );
*
* // Open window by default name
* windowManager.openWindow( 'message' );
* // Open window by explicitly given name
* windowManager.openWindow( 'myMessageDialog' );
*
*
* @param {Object.<string,OO.ui.Window>|OO.ui.Window[]} windows An array of window objects specified
* by reference, symbolic name, or explicitly defined symbolic names.
* @throws {Error} An error is thrown if a window is added by symbolic name, but has neither an
* explicit nor a statically configured symbolic name.
*/
OO.ui.WindowManager.prototype.addWindows = function ( windows ) {
var i, len, win, name, list;
if ( Array.isArray( windows ) ) {
// Convert to map of windows by looking up symbolic names from static configuration
list = {};
for ( i = 0, len = windows.length; i < len; i++ ) {
name = windows[ i ].constructor.static.name;
if ( !name ) {
throw new Error( 'Windows must have a `name` static property defined.' );
}
list[ name ] = windows[ i ];
}
} else if ( OO.isPlainObject( windows ) ) {
list = windows;
}
// Add windows
for ( name in list ) {
win = list[ name ];
this.windows[ name ] = win.toggle( false );
this.$element.append( win.$element );
win.setManager( this );<|fim▁hole|> }
};
/**
* Remove the specified windows from the windows manager.
*
* Windows will be closed before they are removed. If you wish to remove all windows, you may wish
* to use the #clearWindows method instead. If you no longer need the window manager and want to
* ensure that it no longer listens to events, use the #destroy method.
*
* @param {string[]} names Symbolic names of windows to remove
* @return {jQuery.Promise} Promise resolved when window is closed and removed
* @throws {Error} An error is thrown if the named windows are not managed by the window manager.
*/
OO.ui.WindowManager.prototype.removeWindows = function ( names ) {
var promises,
manager = this;
function cleanup( name, win ) {
delete manager.windows[ name ];
win.$element.detach();
}
promises = names.map( function ( name ) {
var cleanupWindow,
win = manager.windows[ name ];
if ( !win ) {
throw new Error( 'Cannot remove window' );
}
cleanupWindow = cleanup.bind( null, name, win );
return manager.closeWindow( name ).closed.then( cleanupWindow, cleanupWindow );
} );
return $.when.apply( $, promises );
};
/**
* Remove all windows from the window manager.
*
* Windows will be closed before they are removed. Note that the window manager, though not in use,
* will still listen to events. If the window manager will not be used again, you may wish to use
* the #destroy method instead. To remove just a subset of windows, use the #removeWindows method.
*
* @return {jQuery.Promise} Promise resolved when all windows are closed and removed
*/
OO.ui.WindowManager.prototype.clearWindows = function () {
return this.removeWindows( Object.keys( this.windows ) );
};
/**
* Set dialog size. In general, this method should not be called directly.
*
* Fullscreen mode will be used if the dialog is too wide to fit in the screen.
*
* @param {OO.ui.Window} win Window to update, should be the current window
* @chainable
* @return {OO.ui.WindowManager} The manager, for chaining
*/
OO.ui.WindowManager.prototype.updateWindowSize = function ( win ) {
var isFullscreen;
// Bypass for non-current, and thus invisible, windows
if ( win !== this.currentWindow ) {
return;
}
isFullscreen = win.getSize() === 'full';
this.$element.toggleClass( 'oo-ui-windowManager-fullscreen', isFullscreen );
this.$element.toggleClass( 'oo-ui-windowManager-floating', !isFullscreen );
win.setDimensions( win.getSizeProperties() );
this.emit( 'resize', win );
return this;
};
/**
* Prevent scrolling of the document on iOS devices that don't respect `body { overflow: hidden; }`.
*
* This function is called when the window is opened (ready), and so the background is covered up,
* and the user won't see that we're doing weird things to the scroll position.
*
* @private
* @param {boolean} on
* @chainable
* @return {OO.ui.WindowManager} The manager, for chaining
*/
OO.ui.WindowManager.prototype.togglePreventIosScrolling = function ( on ) {
var
isIos = /ipad|iphone|ipod/i.test( navigator.userAgent ),
$body = $( this.getElementDocument().body ),
scrollableRoot = OO.ui.Element.static.getRootScrollableElement( $body[ 0 ] ),
stackDepth = $body.data( 'windowManagerGlobalEvents' ) || 0;
// Only if this is the first/last WindowManager (see #toggleGlobalEvents)
if ( !isIos || stackDepth !== 1 ) {
return this;
}
if ( on ) {
// We can't apply this workaround for non-fullscreen dialogs, because the user would see the
// scroll position change. If they have content that needs scrolling, you're out of luck…
// Always remember the scroll position in case dialog is closed with different size.
this.iosOrigScrollPosition = scrollableRoot.scrollTop;
if ( this.getCurrentWindow().getSize() === 'full' ) {
$body.add( $body.parent() ).addClass( 'oo-ui-windowManager-ios-modal-ready' );
}
} else {
// Always restore ability to scroll in case dialog was opened with different size.
$body.add( $body.parent() ).removeClass( 'oo-ui-windowManager-ios-modal-ready' );
if ( this.getCurrentWindow().getSize() === 'full' ) {
scrollableRoot.scrollTop = this.iosOrigScrollPosition;
}
}
return this;
};
/**
* Bind or unbind global events for scrolling.
*
* @private
* @param {boolean} [on] Bind global events
* @chainable
* @return {OO.ui.WindowManager} The manager, for chaining
*/
OO.ui.WindowManager.prototype.toggleGlobalEvents = function ( on ) {
var scrollWidth, bodyMargin,
$body = $( this.getElementDocument().body ),
// We could have multiple window managers open so only modify
// the body css at the bottom of the stack
stackDepth = $body.data( 'windowManagerGlobalEvents' ) || 0;
on = on === undefined ? !!this.globalEvents : !!on;
if ( on ) {
if ( !this.globalEvents ) {
$( this.getElementWindow() ).on( {
// Start listening for top-level window dimension changes
'orientationchange resize': this.onWindowResizeHandler
} );
if ( stackDepth === 0 ) {
scrollWidth = window.innerWidth - document.documentElement.clientWidth;
bodyMargin = parseFloat( $body.css( 'margin-right' ) ) || 0;
$body.addClass( 'oo-ui-windowManager-modal-active' );
$body.css( 'margin-right', bodyMargin + scrollWidth );
}
stackDepth++;
this.globalEvents = true;
}
} else if ( this.globalEvents ) {
$( this.getElementWindow() ).off( {
// Stop listening for top-level window dimension changes
'orientationchange resize': this.onWindowResizeHandler
} );
stackDepth--;
if ( stackDepth === 0 ) {
$body.removeClass( 'oo-ui-windowManager-modal-active' );
$body.css( 'margin-right', '' );
}
this.globalEvents = false;
}
$body.data( 'windowManagerGlobalEvents', stackDepth );
return this;
};
/**
* Toggle screen reader visibility of content other than the window manager.
*
* @private
* @param {boolean} [isolate] Make only the window manager visible to screen readers
* @chainable
* @return {OO.ui.WindowManager} The manager, for chaining
*/
OO.ui.WindowManager.prototype.toggleAriaIsolation = function ( isolate ) {
var $topLevelElement;
isolate = isolate === undefined ? !this.$ariaHidden : !!isolate;
if ( isolate ) {
if ( !this.$ariaHidden ) {
// Find the top level element containing the window manager or the
// window manager's element itself in case its a direct child of body
$topLevelElement = this.$element.parentsUntil( 'body' ).last();
$topLevelElement = $topLevelElement.length === 0 ? this.$element : $topLevelElement;
// In case previously set by another window manager
this.$element.removeAttr( 'aria-hidden' );
// Hide everything other than the window manager from screen readers
this.$ariaHidden = $( document.body )
.children()
.not( 'script' )
.not( $topLevelElement )
.attr( 'aria-hidden', true );
}
} else if ( this.$ariaHidden ) {
// Restore screen reader visibility
this.$ariaHidden.removeAttr( 'aria-hidden' );
this.$ariaHidden = null;
// and hide the window manager
this.$element.attr( 'aria-hidden', true );
}
return this;
};
/**
* Destroy the window manager.
*
* Destroying the window manager ensures that it will no longer listen to events. If you would like
* to continue using the window manager, but wish to remove all windows from it, use the
* #clearWindows method instead.
*/
OO.ui.WindowManager.prototype.destroy = function () {
this.toggleGlobalEvents( false );
this.toggleAriaIsolation( false );
this.clearWindows();
this.$element.remove();
};
/**
* A window is a container for elements that are in a child frame. They are used with
* a window manager (OO.ui.WindowManager), which is used to open and close the window and control
* its presentation. The size of a window is specified using a symbolic name (e.g., ‘small’,
* ‘medium’, ‘large’), which is interpreted by the window manager. If the requested size is not
* recognized, the window manager will choose a sensible fallback.
*
* The lifecycle of a window has three primary stages (opening, opened, and closing) in which
* different processes are executed:
*
* **opening**: The opening stage begins when the window manager's
* {@link OO.ui.WindowManager#openWindow openWindow} or the window's {@link #open open} methods are
* used, and the window manager begins to open the window.
*
* - {@link #getSetupProcess} method is called and its result executed
* - {@link #getReadyProcess} method is called and its result executed
*
* **opened**: The window is now open
*
* **closing**: The closing stage begins when the window manager's
* {@link OO.ui.WindowManager#closeWindow closeWindow}
* or the window's {@link #close} methods are used, and the window manager begins to close the
* window.
*
* - {@link #getHoldProcess} method is called and its result executed
* - {@link #getTeardownProcess} method is called and its result executed. The window is now closed
*
* Each of the window's processes (setup, ready, hold, and teardown) can be extended in subclasses
* by overriding the window's #getSetupProcess, #getReadyProcess, #getHoldProcess and
* #getTeardownProcess methods. Note that each {@link OO.ui.Process process} is executed in series,
* so asynchronous processing can complete. Always assume window processes are executed
* asynchronously.
*
* For more information, please see the [OOUI documentation on MediaWiki] [1].
*
* [1]: https://www.mediawiki.org/wiki/OOUI/Windows
*
* @abstract
* @class
* @extends OO.ui.Element
* @mixins OO.EventEmitter
*
* @constructor
* @param {Object} [config] Configuration options
* @cfg {string} [size] Symbolic name of the dialog size: `small`, `medium`, `large`, `larger` or
* `full`. If omitted, the value of the {@link #static-size static size} property will be used.
*/
OO.ui.Window = function OoUiWindow( config ) {
// Configuration initialization
config = config || {};
// Parent constructor
OO.ui.Window.super.call( this, config );
// Mixin constructors
OO.EventEmitter.call( this );
// Properties
this.manager = null;
this.size = config.size || this.constructor.static.size;
this.$frame = $( '<div>' );
/**
* Overlay element to use for the `$overlay` configuration option of widgets that support it.
* Things put inside it are overlaid on top of the window and are not bound to its dimensions.
* See <https://www.mediawiki.org/wiki/OOUI/Concepts#Overlays>.
*
* MyDialog.prototype.initialize = function () {
* ...
* var popupButton = new OO.ui.PopupButtonWidget( {
* $overlay: this.$overlay,
* label: 'Popup button',
* popup: {
* $content: $( '<p>Popup content.</p><p>More content.</p><p>Yet more content.</p>' ),
* padded: true
* }
* } );
* ...
* };
*
* @property {jQuery}
*/
this.$overlay = $( '<div>' );
this.$content = $( '<div>' );
this.$focusTrapBefore = $( '<div>' ).prop( 'tabIndex', 0 );
this.$focusTrapAfter = $( '<div>' ).prop( 'tabIndex', 0 );
this.$focusTraps = this.$focusTrapBefore.add( this.$focusTrapAfter );
// Initialization
this.$overlay.addClass( 'oo-ui-window-overlay' );
this.$content
.addClass( 'oo-ui-window-content' )
.attr( 'tabindex', -1 );
this.$frame
.addClass( 'oo-ui-window-frame' )
.append( this.$focusTrapBefore, this.$content, this.$focusTrapAfter );
this.$element
.addClass( 'oo-ui-window' )
.append( this.$frame, this.$overlay );
// Initially hidden - using #toggle may cause errors if subclasses override toggle with methods
// that reference properties not initialized at that time of parent class construction
// TODO: Find a better way to handle post-constructor setup
this.visible = false;
this.$element.addClass( 'oo-ui-element-hidden' );
};
/* Setup */
OO.inheritClass( OO.ui.Window, OO.ui.Element );
OO.mixinClass( OO.ui.Window, OO.EventEmitter );
/* Static Properties */
/**
* Symbolic name of the window size: `small`, `medium`, `large`, `larger` or `full`.
*
* The static size is used if no #size is configured during construction.
*
* @static
* @inheritable
* @property {string}
*/
OO.ui.Window.static.size = 'medium';
/* Methods */
/**
* Handle mouse down events.
*
* @private
* @param {jQuery.Event} e Mouse down event
* @return {OO.ui.Window} The window, for chaining
*/
OO.ui.Window.prototype.onMouseDown = function ( e ) {
// Prevent clicking on the click-block from stealing focus
if ( e.target === this.$element[ 0 ] ) {
return false;
}
};
/**
* Check if the window has been initialized.
*
* Initialization occurs when a window is added to a manager.
*
* @return {boolean} Window has been initialized
*/
OO.ui.Window.prototype.isInitialized = function () {
return !!this.manager;
};
/**
* Check if the window is visible.
*
* @return {boolean} Window is visible
*/
OO.ui.Window.prototype.isVisible = function () {
return this.visible;
};
/**
* Check if the window is opening.
*
* This method is a wrapper around the window manager's
* {@link OO.ui.WindowManager#isOpening isOpening} method.
*
* @return {boolean} Window is opening
*/
OO.ui.Window.prototype.isOpening = function () {
return this.manager.isOpening( this );
};
/**
* Check if the window is closing.
*
* This method is a wrapper around the window manager's
* {@link OO.ui.WindowManager#isClosing isClosing} method.
*
* @return {boolean} Window is closing
*/
OO.ui.Window.prototype.isClosing = function () {
return this.manager.isClosing( this );
};
/**
* Check if the window is opened.
*
* This method is a wrapper around the window manager's
* {@link OO.ui.WindowManager#isOpened isOpened} method.
*
* @return {boolean} Window is opened
*/
OO.ui.Window.prototype.isOpened = function () {
return this.manager.isOpened( this );
};
/**
* Get the window manager.
*
* All windows must be attached to a window manager, which is used to open
* and close the window and control its presentation.
*
* @return {OO.ui.WindowManager} Manager of window
*/
OO.ui.Window.prototype.getManager = function () {
return this.manager;
};
/**
* Get the symbolic name of the window size (e.g., `small` or `medium`).
*
* @return {string} Symbolic name of the size: `small`, `medium`, `large`, `larger`, `full`
*/
OO.ui.Window.prototype.getSize = function () {
var viewport = OO.ui.Element.static.getDimensions( this.getElementWindow() ),
sizes = this.manager.constructor.static.sizes,
size = this.size;
if ( !sizes[ size ] ) {
size = this.manager.constructor.static.defaultSize;
}
if ( size !== 'full' && viewport.rect.right - viewport.rect.left < sizes[ size ].width ) {
size = 'full';
}
return size;
};
/**
* Get the size properties associated with the current window size
*
* @return {Object} Size properties
*/
OO.ui.Window.prototype.getSizeProperties = function () {
return this.manager.constructor.static.sizes[ this.getSize() ];
};
/**
* Disable transitions on window's frame for the duration of the callback function, then enable them
* back.
*
* @private
* @param {Function} callback Function to call while transitions are disabled
*/
OO.ui.Window.prototype.withoutSizeTransitions = function ( callback ) {
// Temporarily resize the frame so getBodyHeight() can use scrollHeight measurements.
// Disable transitions first, otherwise we'll get values from when the window was animating.
// We need to build the transition CSS properties using these specific properties since
// Firefox doesn't return anything useful when asked just for 'transition'.
var oldTransition = this.$frame.css( 'transition-property' ) + ' ' +
this.$frame.css( 'transition-duration' ) + ' ' +
this.$frame.css( 'transition-timing-function' ) + ' ' +
this.$frame.css( 'transition-delay' );
this.$frame.css( 'transition', 'none' );
callback();
// Force reflow to make sure the style changes done inside callback
// really are not transitioned
this.$frame.height();
this.$frame.css( 'transition', oldTransition );
};
/**
* Get the height of the full window contents (i.e., the window head, body and foot together).
*
* What constitutes the head, body, and foot varies depending on the window type.
* A {@link OO.ui.MessageDialog message dialog} displays a title and message in its body,
* and any actions in the foot. A {@link OO.ui.ProcessDialog process dialog} displays a title
* and special actions in the head, and dialog content in the body.
*
* To get just the height of the dialog body, use the #getBodyHeight method.
*
* @return {number} The height of the window contents (the dialog head, body and foot) in pixels
*/
OO.ui.Window.prototype.getContentHeight = function () {
var bodyHeight,
win = this,
bodyStyleObj = this.$body[ 0 ].style,
frameStyleObj = this.$frame[ 0 ].style;
// Temporarily resize the frame so getBodyHeight() can use scrollHeight measurements.
// Disable transitions first, otherwise we'll get values from when the window was animating.
this.withoutSizeTransitions( function () {
var oldHeight = frameStyleObj.height,
oldPosition = bodyStyleObj.position;
frameStyleObj.height = '1px';
// Force body to resize to new width
bodyStyleObj.position = 'relative';
bodyHeight = win.getBodyHeight();
frameStyleObj.height = oldHeight;
bodyStyleObj.position = oldPosition;
} );
return (
// Add buffer for border
( this.$frame.outerHeight() - this.$frame.innerHeight() ) +
// Use combined heights of children
( this.$head.outerHeight( true ) + bodyHeight + this.$foot.outerHeight( true ) )
);
};
/**
* Get the height of the window body.
*
* To get the height of the full window contents (the window body, head, and foot together),
* use #getContentHeight.
*
* When this function is called, the window will temporarily have been resized
* to height=1px, so .scrollHeight measurements can be taken accurately.
*
* @return {number} Height of the window body in pixels
*/
OO.ui.Window.prototype.getBodyHeight = function () {
return this.$body[ 0 ].scrollHeight;
};
/**
* Get the directionality of the frame (right-to-left or left-to-right).
*
* @return {string} Directionality: `'ltr'` or `'rtl'`
*/
OO.ui.Window.prototype.getDir = function () {
return OO.ui.Element.static.getDir( this.$content ) || 'ltr';
};
/**
* Get the 'setup' process.
*
* The setup process is used to set up a window for use in a particular context, based on the `data`
* argument. This method is called during the opening phase of the window’s lifecycle (before the
* opening animation). You can add elements to the window in this process or set their default
* values.
*
* Override this method to add additional steps to the ‘setup’ process the parent method provides
* using the {@link OO.ui.Process#first first} and {@link OO.ui.Process#next next} methods
* of OO.ui.Process.
*
* To add window content that persists between openings, you may wish to use the #initialize method
* instead.
*
* @param {Object} [data] Window opening data
* @return {OO.ui.Process} Setup process
*/
OO.ui.Window.prototype.getSetupProcess = function () {
return new OO.ui.Process();
};
/**
* Get the ‘ready’ process.
*
* The ready process is used to ready a window for use in a particular context, based on the `data`
* argument. This method is called during the opening phase of the window’s lifecycle, after the
* window has been {@link #getSetupProcess setup} (after the opening animation). You can focus
* elements in the window in this process, or open their dropdowns.
*
* Override this method to add additional steps to the ‘ready’ process the parent method
* provides using the {@link OO.ui.Process#first first} and {@link OO.ui.Process#next next}
* methods of OO.ui.Process.
*
* @param {Object} [data] Window opening data
* @return {OO.ui.Process} Ready process
*/
OO.ui.Window.prototype.getReadyProcess = function () {
return new OO.ui.Process();
};
/**
* Get the 'hold' process.
*
* The hold process is used to keep a window from being used in a particular context, based on the
* `data` argument. This method is called during the closing phase of the window’s lifecycle (before
* the closing animation). You can close dropdowns of elements in the window in this process, if
* they do not get closed automatically.
*
* Override this method to add additional steps to the 'hold' process the parent method provides
* using the {@link OO.ui.Process#first first} and {@link OO.ui.Process#next next} methods
* of OO.ui.Process.
*
* @param {Object} [data] Window closing data
* @return {OO.ui.Process} Hold process
*/
OO.ui.Window.prototype.getHoldProcess = function () {
return new OO.ui.Process();
};
/**
* Get the ‘teardown’ process.
*
* The teardown process is used to teardown a window after use. During teardown, user interactions
* within the window are conveyed and the window is closed, based on the `data` argument. This
* method is called during the closing phase of the window’s lifecycle (after the closing
* animation). You can remove elements in the window in this process or clear their values.
*
* Override this method to add additional steps to the ‘teardown’ process the parent method provides
* using the {@link OO.ui.Process#first first} and {@link OO.ui.Process#next next} methods
* of OO.ui.Process.
*
* @param {Object} [data] Window closing data
* @return {OO.ui.Process} Teardown process
*/
OO.ui.Window.prototype.getTeardownProcess = function () {
return new OO.ui.Process();
};
/**
* Set the window manager.
*
* This will cause the window to initialize. Calling it more than once will cause an error.
*
* @param {OO.ui.WindowManager} manager Manager for this window
* @throws {Error} An error is thrown if the method is called more than once
* @chainable
* @return {OO.ui.Window} The window, for chaining
*/
OO.ui.Window.prototype.setManager = function ( manager ) {
if ( this.manager ) {
throw new Error( 'Cannot set window manager, window already has a manager' );
}
this.manager = manager;
this.initialize();
return this;
};
/**
* Set the window size by symbolic name (e.g., 'small' or 'medium')
*
* @param {string} size Symbolic name of size: `small`, `medium`, `large`, `larger` or
* `full`
* @chainable
* @return {OO.ui.Window} The window, for chaining
*/
OO.ui.Window.prototype.setSize = function ( size ) {
this.size = size;
this.updateSize();
return this;
};
/**
* Update the window size.
*
* @throws {Error} An error is thrown if the window is not attached to a window manager
* @chainable
* @return {OO.ui.Window} The window, for chaining
*/
OO.ui.Window.prototype.updateSize = function () {
if ( !this.manager ) {
throw new Error( 'Cannot update window size, must be attached to a manager' );
}
this.manager.updateWindowSize( this );
return this;
};
/**
* Set window dimensions. This method is called by the {@link OO.ui.WindowManager window manager}
* when the window is opening. In general, setDimensions should not be called directly.
*
* To set the size of the window, use the #setSize method.
*
* @param {Object} dim CSS dimension properties
* @param {string|number} [dim.width] Width
* @param {string|number} [dim.minWidth] Minimum width
* @param {string|number} [dim.maxWidth] Maximum width
* @param {string|number} [dim.height] Height, omit to set based on height of contents
* @param {string|number} [dim.minHeight] Minimum height
* @param {string|number} [dim.maxHeight] Maximum height
* @chainable
* @return {OO.ui.Window} The window, for chaining
*/
OO.ui.Window.prototype.setDimensions = function ( dim ) {
var height,
win = this,
styleObj = this.$frame[ 0 ].style;
// Calculate the height we need to set using the correct width
if ( dim.height === undefined ) {
this.withoutSizeTransitions( function () {
var oldWidth = styleObj.width;
win.$frame.css( 'width', dim.width || '' );
height = win.getContentHeight();
styleObj.width = oldWidth;
} );
} else {
height = dim.height;
}
this.$frame.css( {
width: dim.width || '',
minWidth: dim.minWidth || '',
maxWidth: dim.maxWidth || '',
height: height || '',
minHeight: dim.minHeight || '',
maxHeight: dim.maxHeight || ''
} );
return this;
};
/**
* Initialize window contents.
*
* Before the window is opened for the first time, #initialize is called so that content that
* persists between openings can be added to the window.
*
* To set up a window with new content each time the window opens, use #getSetupProcess.
*
* @throws {Error} An error is thrown if the window is not attached to a window manager
* @chainable
* @return {OO.ui.Window} The window, for chaining
*/
OO.ui.Window.prototype.initialize = function () {
if ( !this.manager ) {
throw new Error( 'Cannot initialize window, must be attached to a manager' );
}
// Properties
this.$head = $( '<div>' );
this.$body = $( '<div>' );
this.$foot = $( '<div>' );
this.$document = $( this.getElementDocument() );
// Events
this.$element.on( 'mousedown', this.onMouseDown.bind( this ) );
// Initialization
this.$head.addClass( 'oo-ui-window-head' );
this.$body.addClass( 'oo-ui-window-body' );
this.$foot.addClass( 'oo-ui-window-foot' );
this.$content.append( this.$head, this.$body, this.$foot );
return this;
};
/**
* Called when someone tries to focus the hidden element at the end of the dialog.
* Sends focus back to the start of the dialog.
*
* @param {jQuery.Event} event Focus event
*/
OO.ui.Window.prototype.onFocusTrapFocused = function ( event ) {
var backwards = this.$focusTrapBefore.is( event.target ),
element = OO.ui.findFocusable( this.$content, backwards );
if ( element ) {
// There's a focusable element inside the content, at the front or
// back depending on which focus trap we hit; select it.
element.focus();
} else {
// There's nothing focusable inside the content. As a fallback,
// this.$content is focusable, and focusing it will keep our focus
// properly trapped. It's not a *meaningful* focus, since it's just
// the content-div for the Window, but it's better than letting focus
// escape into the page.
this.$content.trigger( 'focus' );
}
};
/**
* Open the window.
*
* This method is a wrapper around a call to the window
* manager’s {@link OO.ui.WindowManager#openWindow openWindow} method.
*
* To customize the window each time it opens, use #getSetupProcess or #getReadyProcess.
*
* @param {Object} [data] Window opening data
* @return {OO.ui.WindowInstance} See OO.ui.WindowManager#openWindow
* @throws {Error} An error is thrown if the window is not attached to a window manager
*/
OO.ui.Window.prototype.open = function ( data ) {
if ( !this.manager ) {
throw new Error( 'Cannot open window, must be attached to a manager' );
}
return this.manager.openWindow( this, data );
};
/**
* Close the window.
*
* This method is a wrapper around a call to the window
* manager’s {@link OO.ui.WindowManager#closeWindow closeWindow} method.
*
* The window's #getHoldProcess and #getTeardownProcess methods are called during the closing
* phase of the window’s lifecycle and can be used to specify closing behavior each time
* the window closes.
*
* @param {Object} [data] Window closing data
* @return {OO.ui.WindowInstance} See OO.ui.WindowManager#closeWindow
* @throws {Error} An error is thrown if the window is not attached to a window manager
*/
OO.ui.Window.prototype.close = function ( data ) {
if ( !this.manager ) {
throw new Error( 'Cannot close window, must be attached to a manager' );
}
return this.manager.closeWindow( this, data );
};
/**
* Setup window.
*
* This is called by OO.ui.WindowManager during window opening (before the animation), and should
* not be called directly by other systems.
*
* @param {Object} [data] Window opening data
* @return {jQuery.Promise} Promise resolved when window is setup
*/
OO.ui.Window.prototype.setup = function ( data ) {
var win = this;
this.toggle( true );
this.focusTrapHandler = OO.ui.bind( this.onFocusTrapFocused, this );
this.$focusTraps.on( 'focus', this.focusTrapHandler );
return this.getSetupProcess( data ).execute().then( function () {
win.updateSize();
// Force redraw by asking the browser to measure the elements' widths
win.$element.addClass( 'oo-ui-window-active oo-ui-window-setup' ).width();
win.$content.addClass( 'oo-ui-window-content-setup' ).width();
} );
};
/**
* Ready window.
*
* This is called by OO.ui.WindowManager during window opening (after the animation), and should not
* be called directly by other systems.
*
* @param {Object} [data] Window opening data
* @return {jQuery.Promise} Promise resolved when window is ready
*/
OO.ui.Window.prototype.ready = function ( data ) {
var win = this;
this.$content.trigger( 'focus' );
return this.getReadyProcess( data ).execute().then( function () {
// Force redraw by asking the browser to measure the elements' widths
win.$element.addClass( 'oo-ui-window-ready' ).width();
win.$content.addClass( 'oo-ui-window-content-ready' ).width();
} );
};
/**
* Hold window.
*
* This is called by OO.ui.WindowManager during window closing (before the animation), and should
* not be called directly by other systems.
*
* @param {Object} [data] Window closing data
* @return {jQuery.Promise} Promise resolved when window is held
*/
OO.ui.Window.prototype.hold = function ( data ) {
var win = this;
return this.getHoldProcess( data ).execute().then( function () {
// Get the focused element within the window's content
var $focus = win.$content.find(
OO.ui.Element.static.getDocument( win.$content ).activeElement
);
// Blur the focused element
if ( $focus.length ) {
$focus[ 0 ].blur();
}
// Force redraw by asking the browser to measure the elements' widths
win.$element.removeClass( 'oo-ui-window-ready oo-ui-window-setup' ).width();
win.$content.removeClass( 'oo-ui-window-content-ready oo-ui-window-content-setup' ).width();
} );
};
/**
* Teardown window.
*
* This is called by OO.ui.WindowManager during window closing (after the animation), and should not
* be called directly by other systems.
*
* @param {Object} [data] Window closing data
* @return {jQuery.Promise} Promise resolved when window is torn down
*/
OO.ui.Window.prototype.teardown = function ( data ) {
var win = this;
return this.getTeardownProcess( data ).execute().then( function () {
// Force redraw by asking the browser to measure the elements' widths
win.$element.removeClass( 'oo-ui-window-active' ).width();
win.$focusTraps.off( 'focus', win.focusTrapHandler );
win.toggle( false );
} );
};
/**
* The Dialog class serves as the base class for the other types of dialogs.
* Unless extended to include controls, the rendered dialog box is a simple window
* that users can close by hitting the Escape key. Dialog windows are used with OO.ui.WindowManager,
* which opens, closes, and controls the presentation of the window. See the
* [OOUI documentation on MediaWiki] [1] for more information.
*
* @example
* // A simple dialog window.
* function MyDialog( config ) {
* MyDialog.super.call( this, config );
* }
* OO.inheritClass( MyDialog, OO.ui.Dialog );
* MyDialog.static.name = 'myDialog';
* MyDialog.prototype.initialize = function () {
* MyDialog.super.prototype.initialize.call( this );
* this.content = new OO.ui.PanelLayout( { padded: true, expanded: false } );
* this.content.$element.append( '<p>A simple dialog window. Press Escape key to ' +
* 'close.</p>' );
* this.$body.append( this.content.$element );
* };
* MyDialog.prototype.getBodyHeight = function () {
* return this.content.$element.outerHeight( true );
* };
* var myDialog = new MyDialog( {
* size: 'medium'
* } );
* // Create and append a window manager, which opens and closes the window.
* var windowManager = new OO.ui.WindowManager();
* $( document.body ).append( windowManager.$element );
* windowManager.addWindows( [ myDialog ] );
* // Open the window!
* windowManager.openWindow( myDialog );
*
* [1]: https://www.mediawiki.org/wiki/OOUI/Windows/Dialogs
*
* @abstract
* @class
* @extends OO.ui.Window
* @mixins OO.ui.mixin.PendingElement
*
* @constructor
* @param {Object} [config] Configuration options
*/
OO.ui.Dialog = function OoUiDialog( config ) {
// Parent constructor
OO.ui.Dialog.super.call( this, config );
// Mixin constructors
OO.ui.mixin.PendingElement.call( this );
// Properties
this.actions = new OO.ui.ActionSet();
this.attachedActions = [];
this.currentAction = null;
this.onDialogKeyDownHandler = this.onDialogKeyDown.bind( this );
// Events
this.actions.connect( this, {
click: 'onActionClick',
change: 'onActionsChange'
} );
// Initialization
this.$element
.addClass( 'oo-ui-dialog' )
.attr( 'role', 'dialog' );
};
/* Setup */
OO.inheritClass( OO.ui.Dialog, OO.ui.Window );
OO.mixinClass( OO.ui.Dialog, OO.ui.mixin.PendingElement );
/* Static Properties */
/**
* Symbolic name of dialog.
*
* The dialog class must have a symbolic name in order to be registered with OO.Factory.
* Please see the [OOUI documentation on MediaWiki] [3] for more information.
*
* [3]: https://www.mediawiki.org/wiki/OOUI/Windows/Window_managers
*
* @abstract
* @static
* @inheritable
* @property {string}
*/
OO.ui.Dialog.static.name = '';
/**
* The dialog title.
*
* The title can be specified as a plaintext string, a {@link OO.ui.mixin.LabelElement Label} node,
* or a function that will produce a Label node or string. The title can also be specified with data
* passed to the constructor (see #getSetupProcess). In this case, the static value will be
* overridden.
*
* @abstract
* @static
* @inheritable
* @property {jQuery|string|Function}
*/
OO.ui.Dialog.static.title = '';
/**
* An array of configured {@link OO.ui.ActionWidget action widgets}.
*
* Actions can also be specified with data passed to the constructor (see #getSetupProcess). In this
* case, the static value will be overridden.
*
* [2]: https://www.mediawiki.org/wiki/OOUI/Windows/Process_Dialogs#Action_sets
*
* @static
* @inheritable
* @property {Object[]}
*/
OO.ui.Dialog.static.actions = [];
/**
* Close the dialog when the Escape key is pressed.
*
* @static
* @abstract
* @inheritable
* @property {boolean}
*/
OO.ui.Dialog.static.escapable = true;
/* Methods */
/**
* Handle frame document key down events.
*
* @private
* @param {jQuery.Event} e Key down event
*/
OO.ui.Dialog.prototype.onDialogKeyDown = function ( e ) {
var actions;
if ( e.which === OO.ui.Keys.ESCAPE && this.constructor.static.escapable ) {
this.executeAction( '' );
e.preventDefault();
e.stopPropagation();
} else if ( e.which === OO.ui.Keys.ENTER && ( e.ctrlKey || e.metaKey ) ) {
actions = this.actions.get( { flags: 'primary', visible: true, disabled: false } );
if ( actions.length > 0 ) {
this.executeAction( actions[ 0 ].getAction() );
e.preventDefault();
e.stopPropagation();
}
}
};
/**
* Handle action click events.
*
* @private
* @param {OO.ui.ActionWidget} action Action that was clicked
*/
OO.ui.Dialog.prototype.onActionClick = function ( action ) {
if ( !this.isPending() ) {
this.executeAction( action.getAction() );
}
};
/**
* Handle actions change event.
*
* @private
*/
OO.ui.Dialog.prototype.onActionsChange = function () {
this.detachActions();
if ( !this.isClosing() ) {
this.attachActions();
if ( !this.isOpening() ) {
// If the dialog is currently opening, this will be called automatically soon.
this.updateSize();
}
}
};
/**
* Get the set of actions used by the dialog.
*
* @return {OO.ui.ActionSet}
*/
OO.ui.Dialog.prototype.getActions = function () {
return this.actions;
};
/**
* Get a process for taking action.
*
* When you override this method, you can create a new OO.ui.Process and return it, or add
* additional accept steps to the process the parent method provides using the
* {@link OO.ui.Process#first 'first'} and {@link OO.ui.Process#next 'next'} methods of
* OO.ui.Process.
*
* @param {string} [action] Symbolic name of action
* @return {OO.ui.Process} Action process
*/
OO.ui.Dialog.prototype.getActionProcess = function ( action ) {
return new OO.ui.Process()
.next( function () {
if ( !action ) {
// An empty action always closes the dialog without data, which should always be
// safe and make no changes
this.close();
}
}, this );
};
/**
* @inheritdoc
*
* @param {Object} [data] Dialog opening data
* @param {jQuery|string|Function|null} [data.title] Dialog title, omit to use
* the {@link #static-title static title}
* @param {Object[]} [data.actions] List of configuration options for each
* {@link OO.ui.ActionWidget action widget}, omit to use {@link #static-actions static actions}.
*/
OO.ui.Dialog.prototype.getSetupProcess = function ( data ) {
data = data || {};
// Parent method
return OO.ui.Dialog.super.prototype.getSetupProcess.call( this, data )
.next( function () {
var config = this.constructor.static,
actions = data.actions !== undefined ? data.actions : config.actions,
title = data.title !== undefined ? data.title : config.title;
this.title.setLabel( title ).setTitle( title );
this.actions.add( this.getActionWidgets( actions ) );
this.$element.on( 'keydown', this.onDialogKeyDownHandler );
}, this );
};
/**
* @inheritdoc
*/
OO.ui.Dialog.prototype.getTeardownProcess = function ( data ) {
// Parent method
return OO.ui.Dialog.super.prototype.getTeardownProcess.call( this, data )
.first( function () {
this.$element.off( 'keydown', this.onDialogKeyDownHandler );
this.actions.clear();
this.currentAction = null;
}, this );
};
/**
* @inheritdoc
*/
OO.ui.Dialog.prototype.initialize = function () {
// Parent method
OO.ui.Dialog.super.prototype.initialize.call( this );
// Properties
this.title = new OO.ui.LabelWidget();
// Initialization
this.$content.addClass( 'oo-ui-dialog-content' );
this.$element.attr( 'aria-labelledby', this.title.getElementId() );
this.setPendingElement( this.$head );
};
/**
* Get action widgets from a list of configs
*
* @param {Object[]} actions Action widget configs
* @return {OO.ui.ActionWidget[]} Action widgets
*/
OO.ui.Dialog.prototype.getActionWidgets = function ( actions ) {
var i, len, widgets = [];
for ( i = 0, len = actions.length; i < len; i++ ) {
widgets.push( this.getActionWidget( actions[ i ] ) );
}
return widgets;
};
/**
* Get action widget from config
*
* Override this method to change the action widget class used.
*
* @param {Object} config Action widget config
* @return {OO.ui.ActionWidget} Action widget
*/
OO.ui.Dialog.prototype.getActionWidget = function ( config ) {
return new OO.ui.ActionWidget( this.getActionWidgetConfig( config ) );
};
/**
* Get action widget config
*
* Override this method to modify the action widget config
*
* @param {Object} config Initial action widget config
* @return {Object} Action widget config
*/
OO.ui.Dialog.prototype.getActionWidgetConfig = function ( config ) {
return config;
};
/**
* Attach action actions.
*
* @protected
*/
OO.ui.Dialog.prototype.attachActions = function () {
// Remember the list of potentially attached actions
this.attachedActions = this.actions.get();
};
/**
* Detach action actions.
*
* @protected
* @chainable
* @return {OO.ui.Dialog} The dialog, for chaining
*/
OO.ui.Dialog.prototype.detachActions = function () {
var i, len;
// Detach all actions that may have been previously attached
for ( i = 0, len = this.attachedActions.length; i < len; i++ ) {
this.attachedActions[ i ].$element.detach();
}
this.attachedActions = [];
return this;
};
/**
* Execute an action.
*
* @param {string} action Symbolic name of action to execute
* @return {jQuery.Promise} Promise resolved when action completes, rejected if it fails
*/
OO.ui.Dialog.prototype.executeAction = function ( action ) {
this.pushPending();
this.currentAction = action;
return this.getActionProcess( action ).execute()
.always( this.popPending.bind( this ) );
};
/**
* MessageDialogs display a confirmation or alert message. By default, the rendered dialog box
* consists of a header that contains the dialog title, a body with the message, and a footer that
* contains any {@link OO.ui.ActionWidget action widgets}. The MessageDialog class is the only type
* of {@link OO.ui.Dialog dialog} that is usually instantiated directly.
*
* There are two basic types of message dialogs, confirmation and alert:
*
* - **confirmation**: the dialog title describes what a progressive action will do and the message
* provides more details about the consequences.
* - **alert**: the dialog title describes which event occurred and the message provides more
* information about why the event occurred.
*
* The MessageDialog class specifies two actions: ‘accept’, the primary
* action (e.g., ‘ok’) and ‘reject,’ the safe action (e.g., ‘cancel’). Both will close the window,
* passing along the selected action.
*
* For more information and examples, please see the [OOUI documentation on MediaWiki][1].
*
* @example
* // Example: Creating and opening a message dialog window.
* var messageDialog = new OO.ui.MessageDialog();
*
* // Create and append a window manager.
* var windowManager = new OO.ui.WindowManager();
* $( document.body ).append( windowManager.$element );
* windowManager.addWindows( [ messageDialog ] );
* // Open the window.
* windowManager.openWindow( messageDialog, {
* title: 'Basic message dialog',
* message: 'This is the message'
* } );
*
* [1]: https://www.mediawiki.org/wiki/OOUI/Windows/Message_Dialogs
*
* @class
* @extends OO.ui.Dialog
*
* @constructor
* @param {Object} [config] Configuration options
*/
OO.ui.MessageDialog = function OoUiMessageDialog( config ) {
// Parent constructor
OO.ui.MessageDialog.super.call( this, config );
// Properties
this.verticalActionLayout = null;
// Initialization
this.$element.addClass( 'oo-ui-messageDialog' );
};
/* Setup */
OO.inheritClass( OO.ui.MessageDialog, OO.ui.Dialog );
/* Static Properties */
/**
* @static
* @inheritdoc
*/
OO.ui.MessageDialog.static.name = 'message';
/**
* @static
* @inheritdoc
*/
OO.ui.MessageDialog.static.size = 'small';
/**
* Dialog title.
*
* The title of a confirmation dialog describes what a progressive action will do. The
* title of an alert dialog describes which event occurred.
*
* @static
* @inheritable
* @property {jQuery|string|Function|null}
*/
OO.ui.MessageDialog.static.title = null;
/**
* The message displayed in the dialog body.
*
* A confirmation message describes the consequences of a progressive action. An alert
* message describes why an event occurred.
*
* @static
* @inheritable
* @property {jQuery|string|Function|null}
*/
OO.ui.MessageDialog.static.message = null;
/**
* @static
* @inheritdoc
*/
OO.ui.MessageDialog.static.actions = [
// Note that OO.ui.alert() and OO.ui.confirm() rely on these.
{ action: 'accept', label: OO.ui.deferMsg( 'ooui-dialog-message-accept' ), flags: 'primary' },
{ action: 'reject', label: OO.ui.deferMsg( 'ooui-dialog-message-reject' ), flags: 'safe' }
];
/* Methods */
/**
* Toggle action layout between vertical and horizontal.
*
* @private
* @param {boolean} [value] Layout actions vertically, omit to toggle
* @chainable
* @return {OO.ui.MessageDialog} The dialog, for chaining
*/
OO.ui.MessageDialog.prototype.toggleVerticalActionLayout = function ( value ) {
value = value === undefined ? !this.verticalActionLayout : !!value;
if ( value !== this.verticalActionLayout ) {
this.verticalActionLayout = value;
this.$actions
.toggleClass( 'oo-ui-messageDialog-actions-vertical', value )
.toggleClass( 'oo-ui-messageDialog-actions-horizontal', !value );
}
return this;
};
/**
* @inheritdoc
*/
OO.ui.MessageDialog.prototype.getActionProcess = function ( action ) {
if ( action ) {
return new OO.ui.Process( function () {
this.close( { action: action } );
}, this );
}
return OO.ui.MessageDialog.super.prototype.getActionProcess.call( this, action );
};
/**
* @inheritdoc
*
* @param {Object} [data] Dialog opening data
* @param {jQuery|string|Function|null} [data.title] Description of the action being confirmed
* @param {jQuery|string|Function|null} [data.message] Description of the action's consequence
* @param {string} [data.size] Symbolic name of the dialog size, see OO.ui.Window
* @param {Object[]} [data.actions] List of OO.ui.ActionOptionWidget configuration options for each
* action item
*/
OO.ui.MessageDialog.prototype.getSetupProcess = function ( data ) {
data = data || {};
// Parent method
return OO.ui.MessageDialog.super.prototype.getSetupProcess.call( this, data )
.next( function () {
this.title.setLabel(
data.title !== undefined ? data.title : this.constructor.static.title
);
this.message.setLabel(
data.message !== undefined ? data.message : this.constructor.static.message
);
this.size = data.size !== undefined ? data.size : this.constructor.static.size;
}, this );
};
/**
* @inheritdoc
*/
OO.ui.MessageDialog.prototype.getReadyProcess = function ( data ) {
data = data || {};
// Parent method
return OO.ui.MessageDialog.super.prototype.getReadyProcess.call( this, data )
.next( function () {
// Focus the primary action button
var actions = this.actions.get();
actions = actions.filter( function ( action ) {
return action.getFlags().indexOf( 'primary' ) > -1;
} );
if ( actions.length > 0 ) {
actions[ 0 ].focus();
}
}, this );
};
/**
* @inheritdoc
*/
OO.ui.MessageDialog.prototype.getBodyHeight = function () {
var bodyHeight, oldOverflow,
$scrollable = this.container.$element;
oldOverflow = $scrollable[ 0 ].style.overflow;
$scrollable[ 0 ].style.overflow = 'hidden';
OO.ui.Element.static.reconsiderScrollbars( $scrollable[ 0 ] );
bodyHeight = this.text.$element.outerHeight( true );
$scrollable[ 0 ].style.overflow = oldOverflow;
return bodyHeight;
};
/**
* @inheritdoc
*/
OO.ui.MessageDialog.prototype.setDimensions = function ( dim ) {
var
dialog = this,
$scrollable = this.container.$element;
OO.ui.MessageDialog.super.prototype.setDimensions.call( this, dim );
// Twiddle the overflow property, otherwise an unnecessary scrollbar will be produced.
// Need to do it after transition completes (250ms), add 50ms just in case.
setTimeout( function () {
var oldOverflow = $scrollable[ 0 ].style.overflow,
activeElement = document.activeElement;
$scrollable[ 0 ].style.overflow = 'hidden';
OO.ui.Element.static.reconsiderScrollbars( $scrollable[ 0 ] );
// Check reconsiderScrollbars didn't destroy our focus, as we
// are doing this after the ready process.
if ( activeElement && activeElement !== document.activeElement && activeElement.focus ) {
activeElement.focus();
}
$scrollable[ 0 ].style.overflow = oldOverflow;
}, 300 );
dialog.fitActions();
// Wait for CSS transition to finish and do it again :(
setTimeout( function () {
dialog.fitActions();
}, 300 );
return this;
};
/**
* @inheritdoc
*/
OO.ui.MessageDialog.prototype.initialize = function () {
// Parent method
OO.ui.MessageDialog.super.prototype.initialize.call( this );
// Properties
this.$actions = $( '<div>' );
this.container = new OO.ui.PanelLayout( {
scrollable: true, classes: [ 'oo-ui-messageDialog-container' ]
} );
this.text = new OO.ui.PanelLayout( {
padded: true, expanded: false, classes: [ 'oo-ui-messageDialog-text' ]
} );
this.message = new OO.ui.LabelWidget( {
classes: [ 'oo-ui-messageDialog-message' ]
} );
// Initialization
this.title.$element.addClass( 'oo-ui-messageDialog-title' );
this.$content.addClass( 'oo-ui-messageDialog-content' );
this.container.$element.append( this.text.$element );
this.text.$element.append( this.title.$element, this.message.$element );
this.$body.append( this.container.$element );
this.$actions.addClass( 'oo-ui-messageDialog-actions' );
this.$foot.append( this.$actions );
};
/**
* @inheritdoc
*/
OO.ui.MessageDialog.prototype.getActionWidgetConfig = function ( config ) {
// Force unframed
return $.extend( {}, config, { framed: false } );
};
/**
* @inheritdoc
*/
OO.ui.MessageDialog.prototype.attachActions = function () {
var i, len, special, others;
// Parent method
OO.ui.MessageDialog.super.prototype.attachActions.call( this );
special = this.actions.getSpecial();
others = this.actions.getOthers();
if ( special.safe ) {
this.$actions.append( special.safe.$element );
special.safe.toggleFramed( true );
}
for ( i = 0, len = others.length; i < len; i++ ) {
this.$actions.append( others[ i ].$element );
others[ i ].toggleFramed( true );
}
if ( special.primary ) {
this.$actions.append( special.primary.$element );
special.primary.toggleFramed( true );
}
};
/**
* Fit action actions into columns or rows.
*
* Columns will be used if all labels can fit without overflow, otherwise rows will be used.
*
* @private
*/
OO.ui.MessageDialog.prototype.fitActions = function () {
var i, len, action,
previous = this.verticalActionLayout,
actions = this.actions.get();
// Detect clipping
this.toggleVerticalActionLayout( false );
for ( i = 0, len = actions.length; i < len; i++ ) {
action = actions[ i ];
if ( action.$element[ 0 ].scrollWidth > action.$element[ 0 ].clientWidth ) {
this.toggleVerticalActionLayout( true );
break;
}
}
// Move the body out of the way of the foot
this.$body.css( 'bottom', this.$foot.outerHeight( true ) );
if ( this.verticalActionLayout !== previous ) {
// We changed the layout, window height might need to be updated.
this.updateSize();
}
};
/**
* ProcessDialog windows encapsulate a {@link OO.ui.Process process} and all of the code necessary
* to complete it. If the process terminates with an error, a customizable {@link OO.ui.Error error
* interface} alerts users to the trouble, permitting the user to dismiss the error and try again
* when relevant. The ProcessDialog class is always extended and customized with the actions and
* content required for each process.
*
* The process dialog box consists of a header that visually represents the ‘working’ state of long
* processes with an animation. The header contains the dialog title as well as
* two {@link OO.ui.ActionWidget action widgets}: a ‘safe’ action on the left (e.g., ‘Cancel’) and
* a ‘primary’ action on the right (e.g., ‘Done’).
*
* Like other windows, the process dialog is managed by a
* {@link OO.ui.WindowManager window manager}.
* Please see the [OOUI documentation on MediaWiki][1] for more information and examples.
*
* @example
* // Example: Creating and opening a process dialog window.
* function MyProcessDialog( config ) {
* MyProcessDialog.super.call( this, config );
* }
* OO.inheritClass( MyProcessDialog, OO.ui.ProcessDialog );
*
* MyProcessDialog.static.name = 'myProcessDialog';
* MyProcessDialog.static.title = 'Process dialog';
* MyProcessDialog.static.actions = [
* { action: 'save', label: 'Done', flags: 'primary' },
* { label: 'Cancel', flags: 'safe' }
* ];
*
* MyProcessDialog.prototype.initialize = function () {
* MyProcessDialog.super.prototype.initialize.apply( this, arguments );
* this.content = new OO.ui.PanelLayout( { padded: true, expanded: false } );
* this.content.$element.append( '<p>This is a process dialog window. The header ' +
* 'contains the title and two buttons: \'Cancel\' (a safe action) on the left and ' +
* '\'Done\' (a primary action) on the right.</p>' );
* this.$body.append( this.content.$element );
* };
* MyProcessDialog.prototype.getActionProcess = function ( action ) {
* var dialog = this;
* if ( action ) {
* return new OO.ui.Process( function () {
* dialog.close( { action: action } );
* } );
* }
* return MyProcessDialog.super.prototype.getActionProcess.call( this, action );
* };
*
* var windowManager = new OO.ui.WindowManager();
* $( document.body ).append( windowManager.$element );
*
* var dialog = new MyProcessDialog();
* windowManager.addWindows( [ dialog ] );
* windowManager.openWindow( dialog );
*
* [1]: https://www.mediawiki.org/wiki/OOUI/Windows/Process_Dialogs
*
* @abstract
* @class
* @extends OO.ui.Dialog
*
* @constructor
* @param {Object} [config] Configuration options
*/
OO.ui.ProcessDialog = function OoUiProcessDialog( config ) {
// Parent constructor
OO.ui.ProcessDialog.super.call( this, config );
// Properties
this.fitOnOpen = false;
// Initialization
this.$element.addClass( 'oo-ui-processDialog' );
if ( OO.ui.isMobile() ) {
this.$element.addClass( 'oo-ui-isMobile' );
}
};
/* Setup */
OO.inheritClass( OO.ui.ProcessDialog, OO.ui.Dialog );
/* Methods */
/**
* Handle dismiss button click events.
*
* Hides errors.
*
* @private
*/
OO.ui.ProcessDialog.prototype.onDismissErrorButtonClick = function () {
this.hideErrors();
};
/**
* Handle retry button click events.
*
* Hides errors and then tries again.
*
* @private
*/
OO.ui.ProcessDialog.prototype.onRetryButtonClick = function () {
this.hideErrors();
this.executeAction( this.currentAction );
};
/**
* @inheritdoc
*/
OO.ui.ProcessDialog.prototype.initialize = function () {
// Parent method
OO.ui.ProcessDialog.super.prototype.initialize.call( this );
// Properties
this.$navigation = $( '<div>' );
this.$location = $( '<div>' );
this.$safeActions = $( '<div>' );
this.$primaryActions = $( '<div>' );
this.$otherActions = $( '<div>' );
this.dismissButton = new OO.ui.ButtonWidget( {
label: OO.ui.msg( 'ooui-dialog-process-dismiss' )
} );
this.retryButton = new OO.ui.ButtonWidget();
this.$errors = $( '<div>' );
this.$errorsTitle = $( '<div>' );
// Events
this.dismissButton.connect( this, {
click: 'onDismissErrorButtonClick'
} );
this.retryButton.connect( this, {
click: 'onRetryButtonClick'
} );
this.title.connect( this, {
labelChange: 'fitLabel'
} );
// Initialization
this.title.$element.addClass( 'oo-ui-processDialog-title' );
this.$location
.append( this.title.$element )
.addClass( 'oo-ui-processDialog-location' );
this.$safeActions.addClass( 'oo-ui-processDialog-actions-safe' );
this.$primaryActions.addClass( 'oo-ui-processDialog-actions-primary' );
this.$otherActions.addClass( 'oo-ui-processDialog-actions-other' );
this.$errorsTitle
.addClass( 'oo-ui-processDialog-errors-title' )
.text( OO.ui.msg( 'ooui-dialog-process-error' ) );
this.$errors
.addClass( 'oo-ui-processDialog-errors oo-ui-element-hidden' )
.append(
this.$errorsTitle,
$( '<div>' ).addClass( 'oo-ui-processDialog-errors-actions' ).append(
this.dismissButton.$element, this.retryButton.$element
)
);
this.$content
.addClass( 'oo-ui-processDialog-content' )
.append( this.$errors );
this.$navigation
.addClass( 'oo-ui-processDialog-navigation' )
// Note: Order of appends below is important. These are in the order
// we want tab to go through them. Display-order is handled entirely
// by CSS absolute-positioning. As such, primary actions like "done"
// should go first.
.append( this.$primaryActions, this.$location, this.$safeActions );
this.$head.append( this.$navigation );
this.$foot.append( this.$otherActions );
};
/**
* @inheritdoc
*/
OO.ui.ProcessDialog.prototype.getActionWidgetConfig = function ( config ) {
function checkFlag( flag ) {
return config.flags === flag ||
( Array.isArray( config.flags ) && config.flags.indexOf( flag ) !== -1 );
}
config = $.extend( { framed: true }, config );
if ( checkFlag( 'close' ) ) {
// Change close buttons to icon only.
$.extend( config, {
icon: 'close',
invisibleLabel: true
} );
} else if ( checkFlag( 'back' ) ) {
// Change back buttons to icon only.
$.extend( config, {
icon: 'previous',
invisibleLabel: true
} );
}
return config;
};
/**
* @inheritdoc
*/
OO.ui.ProcessDialog.prototype.attachActions = function () {
var i, len, other, special, others;
// Parent method
OO.ui.ProcessDialog.super.prototype.attachActions.call( this );
special = this.actions.getSpecial();
others = this.actions.getOthers();
if ( special.primary ) {
this.$primaryActions.append( special.primary.$element );
}
for ( i = 0, len = others.length; i < len; i++ ) {
other = others[ i ];
this.$otherActions.append( other.$element );
}
if ( special.safe ) {
this.$safeActions.append( special.safe.$element );
}
};
/**
* @inheritdoc
*/
OO.ui.ProcessDialog.prototype.executeAction = function ( action ) {
var dialog = this;
return OO.ui.ProcessDialog.super.prototype.executeAction.call( this, action )
.fail( function ( errors ) {
dialog.showErrors( errors || [] );
} );
};
/**
* @inheritdoc
*/
OO.ui.ProcessDialog.prototype.setDimensions = function () {
var dialog = this;
// Parent method
OO.ui.ProcessDialog.super.prototype.setDimensions.apply( this, arguments );
this.fitLabel();
// If there are many actions, they might be shown on multiple lines. Their layout can change
// when resizing the dialog and when changing the actions. Adjust the height of the footer to
// fit them.
dialog.$body.css( 'bottom', dialog.$foot.outerHeight( true ) );
// Wait for CSS transition to finish and do it again :(
setTimeout( function () {
dialog.$body.css( 'bottom', dialog.$foot.outerHeight( true ) );
}, 300 );
};
/**
* Fit label between actions.
*
* @private
* @chainable
* @return {OO.ui.MessageDialog} The dialog, for chaining
*/
OO.ui.ProcessDialog.prototype.fitLabel = function () {
var safeWidth, primaryWidth, biggerWidth, labelWidth, navigationWidth, leftWidth, rightWidth,
size = this.getSizeProperties();
if ( typeof size.width !== 'number' ) {
if ( this.isOpened() ) {
navigationWidth = this.$head.width() - 20;
} else if ( this.isOpening() ) {
if ( !this.fitOnOpen ) {
// Size is relative and the dialog isn't open yet, so wait.
// FIXME: This should ideally be handled by setup somehow.
this.manager.lifecycle.opened.done( this.fitLabel.bind( this ) );
this.fitOnOpen = true;
}
return;
} else {
return;
}
} else {
navigationWidth = size.width - 20;
}
safeWidth = this.$safeActions.width();
primaryWidth = this.$primaryActions.width();
biggerWidth = Math.max( safeWidth, primaryWidth );
labelWidth = this.title.$element.width();
if ( !OO.ui.isMobile() && 2 * biggerWidth + labelWidth < navigationWidth ) {
// We have enough space to center the label
leftWidth = rightWidth = biggerWidth;
} else {
// Let's hope we at least have enough space not to overlap, because we can't wrap
// the label.
if ( this.getDir() === 'ltr' ) {
leftWidth = safeWidth;
rightWidth = primaryWidth;
} else {
leftWidth = primaryWidth;
rightWidth = safeWidth;
}
}
this.$location.css( { paddingLeft: leftWidth, paddingRight: rightWidth } );
return this;
};
/**
* Handle errors that occurred during accept or reject processes.
*
* @private
* @param {OO.ui.Error[]|OO.ui.Error} errors Errors to be handled
*/
OO.ui.ProcessDialog.prototype.showErrors = function ( errors ) {
var i, len, actions,
items = [],
abilities = {},
recoverable = true,
warning = false;
if ( errors instanceof OO.ui.Error ) {
errors = [ errors ];
}
for ( i = 0, len = errors.length; i < len; i++ ) {
if ( !errors[ i ].isRecoverable() ) {
recoverable = false;
}
if ( errors[ i ].isWarning() ) {
warning = true;
}
items.push( new OO.ui.MessageWidget( {
type: 'error',
label: errors[ i ].getMessage()
} ).$element[ 0 ] );
}
this.$errorItems = $( items );
if ( recoverable ) {
abilities[ this.currentAction ] = true;
// Copy the flags from the first matching action.
actions = this.actions.get( { actions: this.currentAction } );
if ( actions.length ) {
this.retryButton.clearFlags().setFlags( actions[ 0 ].getFlags() );
}
} else {
abilities[ this.currentAction ] = false;
this.actions.setAbilities( abilities );
}
if ( warning ) {
this.retryButton.setLabel( OO.ui.msg( 'ooui-dialog-process-continue' ) );
} else {
this.retryButton.setLabel( OO.ui.msg( 'ooui-dialog-process-retry' ) );
}
this.retryButton.toggle( recoverable );
this.$errorsTitle.after( this.$errorItems );
this.$errors.removeClass( 'oo-ui-element-hidden' ).scrollTop( 0 );
};
/**
* Hide errors.
*
* @private
*/
OO.ui.ProcessDialog.prototype.hideErrors = function () {
this.$errors.addClass( 'oo-ui-element-hidden' );
if ( this.$errorItems ) {
this.$errorItems.remove();
this.$errorItems = null;
}
};
/**
* @inheritdoc
*/
OO.ui.ProcessDialog.prototype.getTeardownProcess = function ( data ) {
// Parent method
return OO.ui.ProcessDialog.super.prototype.getTeardownProcess.call( this, data )
.first( function () {
// Make sure to hide errors.
this.hideErrors();
this.fitOnOpen = false;
}, this );
};
/**
* @class OO.ui
*/
/**
* Lazy-initialize and return a global OO.ui.WindowManager instance, used by OO.ui.alert and
* OO.ui.confirm.
*
* @private
* @return {OO.ui.WindowManager}
*/
OO.ui.getWindowManager = function () {
if ( !OO.ui.windowManager ) {
OO.ui.windowManager = new OO.ui.WindowManager();
$( document.body ).append( OO.ui.windowManager.$element );
OO.ui.windowManager.addWindows( [ new OO.ui.MessageDialog() ] );
}
return OO.ui.windowManager;
};
/**
* Display a quick modal alert dialog, using a OO.ui.MessageDialog. While the dialog is open, the
* rest of the page will be dimmed out and the user won't be able to interact with it. The dialog
* has only one action button, labelled "OK", clicking it will simply close the dialog.
*
* A window manager is created automatically when this function is called for the first time.
*
* @example
* OO.ui.alert( 'Something happened!' ).done( function () {
* console.log( 'User closed the dialog.' );
* } );
*
* OO.ui.alert( 'Something larger happened!', { size: 'large' } );
*
* @param {jQuery|string} text Message text to display
* @param {Object} [options] Additional options, see OO.ui.MessageDialog#getSetupProcess
* @return {jQuery.Promise} Promise resolved when the user closes the dialog
*/
OO.ui.alert = function ( text, options ) {
return OO.ui.getWindowManager().openWindow( 'message', $.extend( {
message: text,
actions: [ OO.ui.MessageDialog.static.actions[ 0 ] ]
}, options ) ).closed.then( function () {
return undefined;
} );
};
/**
* Display a quick modal confirmation dialog, using a OO.ui.MessageDialog. While the dialog is open,
* the rest of the page will be dimmed out and the user won't be able to interact with it. The
* dialog has two action buttons, one to confirm an operation (labelled "OK") and one to cancel it
* (labelled "Cancel").
*
* A window manager is created automatically when this function is called for the first time.
*
* @example
* OO.ui.confirm( 'Are you sure?' ).done( function ( confirmed ) {
* if ( confirmed ) {
* console.log( 'User clicked "OK"!' );
* } else {
* console.log( 'User clicked "Cancel" or closed the dialog.' );
* }
* } );
*
* @param {jQuery|string} text Message text to display
* @param {Object} [options] Additional options, see OO.ui.MessageDialog#getSetupProcess
* @return {jQuery.Promise} Promise resolved when the user closes the dialog. If the user chose to
* confirm, the promise will resolve to boolean `true`; otherwise, it will resolve to boolean
* `false`.
*/
OO.ui.confirm = function ( text, options ) {
return OO.ui.getWindowManager().openWindow( 'message', $.extend( {
message: text
}, options ) ).closed.then( function ( data ) {
return !!( data && data.action === 'accept' );
} );
};
/**
* Display a quick modal prompt dialog, using a OO.ui.MessageDialog. While the dialog is open,
* the rest of the page will be dimmed out and the user won't be able to interact with it. The
* dialog has a text input widget and two action buttons, one to confirm an operation
* (labelled "OK") and one to cancel it (labelled "Cancel").
*
* A window manager is created automatically when this function is called for the first time.
*
* @example
* OO.ui.prompt( 'Choose a line to go to', {
* textInput: { placeholder: 'Line number' }
* } ).done( function ( result ) {
* if ( result !== null ) {
* console.log( 'User typed "' + result + '" then clicked "OK".' );
* } else {
* console.log( 'User clicked "Cancel" or closed the dialog.' );
* }
* } );
*
* @param {jQuery|string} text Message text to display
* @param {Object} [options] Additional options, see OO.ui.MessageDialog#getSetupProcess
* @param {Object} [options.textInput] Additional options for text input widget,
* see OO.ui.TextInputWidget
* @return {jQuery.Promise} Promise resolved when the user closes the dialog. If the user chose to
* confirm, the promise will resolve with the value of the text input widget; otherwise, it will
* resolve to `null`.
*/
OO.ui.prompt = function ( text, options ) {
var instance,
manager = OO.ui.getWindowManager(),
textInput = new OO.ui.TextInputWidget( ( options && options.textInput ) || {} ),
textField = new OO.ui.FieldLayout( textInput, {
align: 'top',
label: text
} );
instance = manager.openWindow( 'message', $.extend( {
message: textField.$element
}, options ) );
// TODO: This is a little hacky, and could be done by extending MessageDialog instead.
instance.opened.then( function () {
textInput.on( 'enter', function () {
manager.getCurrentWindow().close( { action: 'accept' } );
} );
textInput.focus();
} );
return instance.closed.then( function ( data ) {
return data && data.action === 'accept' ? textInput.getValue() : null;
} );
};
}( OO ) );
//# sourceMappingURL=oojs-ui-windows.js.map.json<|fim▁end|> | |
<|file_name|>YTPlayerStateEvent.java<|end_file_name|><|fim▁begin|>/*<|fim▁hole|> * This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package me.mast3rplan.phantombot.event.ytplayer;
import me.mast3rplan.phantombot.twitchwsirc.Channel;
import me.mast3rplan.phantombot.ytplayer.YTPlayerState;
public class YTPlayerStateEvent extends YTPlayerEvent {
private final YTPlayerState state;
public YTPlayerStateEvent(YTPlayerState state) {
this.state = state;
}
public YTPlayerStateEvent(YTPlayerState state, Channel channel) {
super(channel);
this.state = state;
}
public YTPlayerState getState() {
return state;
}
public int getStateId() {
return state.i;
}
}<|fim▁end|> | * Copyright (C) 2017 phantombot.tv
* |
<|file_name|>saxutils.py<|end_file_name|><|fim▁begin|>"""
A library of useful helper classes to the saxlib classes, for the
convenience of application and driver writers.
$Id: saxutils.py,v 1.35 2004/03/20 07:46:04 fdrake Exp $
"""
import os, urlparse, urllib2, types
import handler
import xmlreader
import sys, _exceptions, saxlib
try:
_StringTypes = [types.StringType, types.UnicodeType]
except AttributeError: # 1.5 compatibility:UnicodeType not defined
_StringTypes = [types.StringType]
def __dict_replace(s, d):
"""Replace substrings of a string using a dictionary."""
for key, value in d.items():
s = s.replace(key, value)
return s
def escape(data, entities={}):
"""Escape &, <, and > in a string of data.
You can escape other strings of data by passing a dictionary as
the optional entities parameter. The keys and values must all be
strings; each key will be replaced with its corresponding value.
"""
data = data.replace("&", "&")
data = data.replace("<", "<")
data = data.replace(">", ">")
if entities:
data = __dict_replace(data, entities)
return data
def unescape(data, entities={}):
"""Unescape &, <, and > in a string of data.
You can unescape other strings of data by passing a dictionary as
the optional entities parameter. The keys and values must all be
strings; each key will be replaced with its corresponding value.
"""
data = data.replace("<", "<")
data = data.replace(">", ">")
if entities:
data = __dict_replace(data, entities)
# must do ampersand last
return data.replace("&", "&")
def quoteattr(data, entities={}):
"""Escape and quote an attribute value.
Escape &, <, and > in a string of data, then quote it for use as
an attribute value. The \" character will be escaped as well, if
necessary.
You can escape other strings of data by passing a dictionary as
the optional entities parameter. The keys and values must all be
strings; each key will be replaced with its corresponding value.
"""
data = escape(data, entities)
if '"' in data:
if "'" in data:
data = '"%s"' % data.replace('"', """)
else:
data = "'%s'" % data
else:
data = '"%s"' % data
return data
# --- DefaultHandler
class DefaultHandler(handler.EntityResolver, handler.DTDHandler,
handler.ContentHandler, handler.ErrorHandler):
"""Default base class for SAX2 event handlers. Implements empty
methods for all callback methods, which can be overridden by
application implementors. Replaces the deprecated SAX1 HandlerBase
class."""
# --- Location
class Location:
"""Represents a location in an XML entity. Initialized by being passed
a locator, from which it reads off the current location, which is then
stored internally."""
def __init__(self, locator):
self.__col = locator.getColumnNumber()
self.__line = locator.getLineNumber()
self.__pubid = locator.getPublicId()
self.__sysid = locator.getSystemId()
def getColumnNumber(self):
return self.__col
def getLineNumber(self):
return self.__line
def getPublicId(self):
return self.__pubid
def getSystemId(self):
return self.__sysid
def __str__(self):
if self.__line is None:
line = "?"
else:
line = self.__line
if self.__col is None:
col = "?"
else:
col = self.__col
return "%s:%s:%s" % (
self.__sysid or self.__pubid or "<unknown>",
line, col)
# --- ErrorPrinter
class ErrorPrinter:
"A simple class that just prints error messages to standard out."
def __init__(self, level=0, outfile=sys.stderr):
self._level = level
self._outfile = outfile
def warning(self, exception):
if self._level <= 0:
self._outfile.write("WARNING in %s: %s\n" %
(self.__getpos(exception),
exception.getMessage()))
def error(self, exception):
if self._level <= 1:
self._outfile.write("ERROR in %s: %s\n" %
(self.__getpos(exception),
exception.getMessage()))
def fatalError(self, exception):
if self._level <= 2:
self._outfile.write("FATAL ERROR in %s: %s\n" %
(self.__getpos(exception),
exception.getMessage()))
def __getpos(self, exception):
if isinstance(exception, _exceptions.SAXParseException):
return "%s:%s:%s" % (exception.getSystemId(),
exception.getLineNumber(),
exception.getColumnNumber())
else:
return "<unknown>"
# --- ErrorRaiser
class ErrorRaiser:
"A simple class that just raises the exceptions it is passed."
def __init__(self, level = 0):
self._level = level
def error(self, exception):
if self._level <= 1:
raise exception
def fatalError(self, exception):
if self._level <= 2:
raise exception
def warning(self, exception):
if self._level <= 0:
raise exception
# --- AttributesImpl now lives in xmlreader
from xmlreader import AttributesImpl
# --- XMLGenerator is the SAX2 ContentHandler for writing back XML
import codecs
def _outputwrapper(stream,encoding):
writerclass = codecs.lookup(encoding)[3]
return writerclass(stream)
if hasattr(codecs, "register_error"):
def writetext(stream, text, entities={}):
stream.errors = "xmlcharrefreplace"
stream.write(escape(text, entities))
stream.errors = "strict"
else:
def writetext(stream, text, entities={}):
text = escape(text, entities)
try:
stream.write(text)
except UnicodeError:
for c in text:
try:
stream.write(c)
except UnicodeError:
stream.write(u"&#%d;" % ord(c))
def writeattr(stream, text):
countdouble = text.count('"')
if countdouble:
countsingle = text.count("'")
if countdouble <= countsingle:
entities = {'"': """}
quote = '"'
else:
entities = {"'": "'"}
quote = "'"
else:
entities = {}
quote = '"'
stream.write(quote)
writetext(stream, text, entities)
stream.write(quote)
class XMLGenerator(handler.ContentHandler):
GENERATED_PREFIX = "pyxml.sax.saxutils.prefix%s"
def __init__(self, out=None, encoding="iso-8859-1"):
if out is None:
import sys
out = sys.stdout
handler.ContentHandler.__init__(self)
self._out = _outputwrapper(out,encoding)
self._ns_contexts = [{}] # contains uri -> prefix dicts
self._current_context = self._ns_contexts[-1]
self._undeclared_ns_maps = []
self._encoding = encoding
self._generated_prefix_ctr = 0
return
# ContentHandler methods
def startDocument(self):
self._out.write('<?xml version="1.0" encoding="%s"?>\n' %
self._encoding)
def startPrefixMapping(self, prefix, uri):
self._ns_contexts.append(self._current_context.copy())
self._current_context[uri] = prefix
self._undeclared_ns_maps.append((prefix, uri))
def endPrefixMapping(self, prefix):
self._current_context = self._ns_contexts[-1]
del self._ns_contexts[-1]
def startElement(self, name, attrs):
self._out.write('<' + name)
for (name, value) in attrs.items():
self._out.write(' %s=' % name)
writeattr(self._out, value)
self._out.write('>')
def endElement(self, name):
self._out.write('</%s>' % name)
def startElementNS(self, name, qname, attrs):
if name[0] is None:
name = name[1]
elif self._current_context[name[0]] is None:
# default namespace
name = name[1]
else:
name = self._current_context[name[0]] + ":" + name[1]
self._out.write('<' + name)
for k,v in self._undeclared_ns_maps:
if k is None:
self._out.write(' xmlns="%s"' % (v or ''))
else:
self._out.write(' xmlns:%s="%s"' % (k,v))
self._undeclared_ns_maps = []
for (name, value) in attrs.items():
if name[0] is None:
name = name[1]
elif self._current_context[name[0]] is None:
# default namespace
#If an attribute has a nsuri but not a prefix, we must
#create a prefix and add a nsdecl
prefix = self.GENERATED_PREFIX % self._generated_prefix_ctr
self._generated_prefix_ctr = self._generated_prefix_ctr + 1
name = prefix + ':' + name[1]
self._out.write(' xmlns:%s=%s' % (prefix, quoteattr(name[0])))
self._current_context[name[0]] = prefix
else:
name = self._current_context[name[0]] + ":" + name[1]
self._out.write(' %s=' % name)
writeattr(self._out, value)
self._out.write('>')
def endElementNS(self, name, qname):
# XXX: if qname is not None, we better use it.
# Python 2.0b2 requires us to use the recorded prefix for
# name[0], though
if name[0] is None:
qname = name[1]
elif self._current_context[name[0]] is None:
qname = name[1]
else:
qname = self._current_context[name[0]] + ":" + name[1]
self._out.write('</%s>' % qname)
def characters(self, content):
writetext(self._out, content)
def ignorableWhitespace(self, content):
self._out.write(content)
def processingInstruction(self, target, data):
self._out.write('<?%s %s?>' % (target, data))
class LexicalXMLGenerator(XMLGenerator, saxlib.LexicalHandler):
"""A XMLGenerator that also supports the LexicalHandler interface"""
def __init__(self, out=None, encoding="iso-8859-1"):
XMLGenerator.__init__(self, out, encoding)
self._in_cdata = 0
def characters(self, content):
if self._in_cdata:
self._out.write(content.replace(']]>', ']]>]]><![CDATA['))
else:
self._out.write(escape(content))
# LexicalHandler methods
# (we only support the most important ones and inherit the rest)
def startDTD(self, name, public_id, system_id):
self._out.write('<!DOCTYPE %s' % name)
if public_id:
self._out.write(' PUBLIC %s %s' % (
quoteattr(public_id or ""), quoteattr(system_id or "")
))
elif system_id:
self._out.write(' SYSTEM %s' % quoteattr(system_id or ""))
def endDTD(self):
self._out.write('>')
def comment(self, content):
self._out.write('<!--')
self._out.write(content)
self._out.write('-->')
def startCDATA(self):
self._in_cdata = 1
self._out.write('<![CDATA[')
def endCDATA(self):
self._in_cdata = 0
self._out.write(']]>')
# --- ContentGenerator is the SAX1 DocumentHandler for writing back XML
class ContentGenerator(XMLGenerator):
def characters(self, str, start, end):
# In SAX1, characters receives start and end; in SAX2, it receives
# a string. For plain strings, we may want to use a buffer object.
return XMLGenerator.characters(self, str[start:start+end])
# --- XMLFilterImpl
class XMLFilterBase(saxlib.XMLFilter):
"""This class is designed to sit between an XMLReader and the
client application's event handlers. By default, it does nothing
but pass requests up to the reader and events on to the handlers
unmodified, but subclasses can override specific methods to modify
the event stream or the configuration requests as they pass
through."""
# ErrorHandler methods
def error(self, exception):
self._err_handler.error(exception)
def fatalError(self, exception):
self._err_handler.fatalError(exception)
def warning(self, exception):
self._err_handler.warning(exception)
# ContentHandler methods
def setDocumentLocator(self, locator):
self._cont_handler.setDocumentLocator(locator)
def startDocument(self):
self._cont_handler.startDocument()
def endDocument(self):
self._cont_handler.endDocument()
def startPrefixMapping(self, prefix, uri):
self._cont_handler.startPrefixMapping(prefix, uri)
def endPrefixMapping(self, prefix):
self._cont_handler.endPrefixMapping(prefix)
def startElement(self, name, attrs):
self._cont_handler.startElement(name, attrs)
def endElement(self, name):
self._cont_handler.endElement(name)
def startElementNS(self, name, qname, attrs):
self._cont_handler.startElementNS(name, qname, attrs)
def endElementNS(self, name, qname):
self._cont_handler.endElementNS(name, qname)
def characters(self, content):
self._cont_handler.characters(content)
def ignorableWhitespace(self, chars):
self._cont_handler.ignorableWhitespace(chars)
def processingInstruction(self, target, data):
self._cont_handler.processingInstruction(target, data)
def skippedEntity(self, name):
self._cont_handler.skippedEntity(name)
# DTDHandler methods
<|fim▁hole|> def notationDecl(self, name, publicId, systemId):
self._dtd_handler.notationDecl(name, publicId, systemId)
def unparsedEntityDecl(self, name, publicId, systemId, ndata):
self._dtd_handler.unparsedEntityDecl(name, publicId, systemId, ndata)
# EntityResolver methods
def resolveEntity(self, publicId, systemId):
self._ent_handler.resolveEntity(publicId, systemId)
# XMLReader methods
def parse(self, source):
self._parent.setContentHandler(self)
self._parent.setErrorHandler(self)
self._parent.setEntityResolver(self)
self._parent.setDTDHandler(self)
self._parent.parse(source)
def setLocale(self, locale):
self._parent.setLocale(locale)
def getFeature(self, name):
return self._parent.getFeature(name)
def setFeature(self, name, state):
self._parent.setFeature(name, state)
def getProperty(self, name):
return self._parent.getProperty(name)
def setProperty(self, name, value):
self._parent.setProperty(name, value)
# FIXME: remove this backward compatibility hack when not needed anymore
XMLFilterImpl = XMLFilterBase
# --- BaseIncrementalParser
class BaseIncrementalParser(xmlreader.IncrementalParser):
"""This class implements the parse method of the XMLReader
interface using the feed, close and reset methods of the
IncrementalParser interface as a convenience to SAX 2.0 driver
writers."""
def parse(self, source):
source = prepare_input_source(source)
self.prepareParser(source)
self._cont_handler.startDocument()
# FIXME: what about char-stream?
inf = source.getByteStream()
buffer = inf.read(16384)
while buffer != "":
self.feed(buffer)
buffer = inf.read(16384)
self.close()
self.reset()
self._cont_handler.endDocument()
def prepareParser(self, source):
"""This method is called by the parse implementation to allow
the SAX 2.0 driver to prepare itself for parsing."""
raise NotImplementedError("prepareParser must be overridden!")
# --- Utility functions
def prepare_input_source(source, base = ""):
"""This function takes an InputSource and an optional base URL and
returns a fully resolved InputSource object ready for reading."""
if type(source) in _StringTypes:
source = xmlreader.InputSource(source)
elif hasattr(source, "read"):
f = source
source = xmlreader.InputSource()
source.setByteStream(f)
if hasattr(f, "name"):
source.setSystemId(f.name)
if source.getByteStream() is None:
sysid = source.getSystemId()
if os.path.isfile(sysid):
basehead = os.path.split(os.path.normpath(base))[0]
source.setSystemId(os.path.join(basehead, sysid))
f = open(sysid, "rb")
else:
source.setSystemId(urlparse.urljoin(base, sysid))
f = urllib2.urlopen(source.getSystemId())
source.setByteStream(f)
return source
# ===========================================================================
#
# DEPRECATED SAX 1.0 CLASSES
#
# ===========================================================================
# --- AttributeMap
class AttributeMap:
"""An implementation of AttributeList that takes an (attr,val) hash
and uses it to implement the AttributeList interface."""
def __init__(self, map):
self.map=map
def getLength(self):
return len(self.map.keys())
def getName(self, i):
try:
return self.map.keys()[i]
except IndexError,e:
return None
def getType(self, i):
return "CDATA"
def getValue(self, i):
try:
if type(i)==types.IntType:
return self.map[self.getName(i)]
else:
return self.map[i]
except KeyError,e:
return None
def __len__(self):
return len(self.map)
def __getitem__(self, key):
if type(key)==types.IntType:
return self.map.keys()[key]
else:
return self.map[key]
def items(self):
return self.map.items()
def keys(self):
return self.map.keys()
def has_key(self,key):
return self.map.has_key(key)
def get(self, key, alternative=None):
return self.map.get(key, alternative)
def copy(self):
return AttributeMap(self.map.copy())
def values(self):
return self.map.values()
# --- Event broadcasting object
class EventBroadcaster:
"""Takes a list of objects and forwards any method calls received
to all objects in the list. The attribute list holds the list and
can freely be modified by clients."""
class Event:
"Helper objects that represent event methods."
def __init__(self,list,name):
self.list=list
self.name=name
def __call__(self,*rest):
for obj in self.list:
apply(getattr(obj,self.name), rest)
def __init__(self,list):
self.list=list
def __getattr__(self,name):
return self.Event(self.list,name)
def __repr__(self):
return "<EventBroadcaster instance at %d>" % id(self)
# --- ESIS document handler
import saxlib
class ESISDocHandler(saxlib.HandlerBase):
"A SAX document handler that produces naive ESIS output."
def __init__(self,writer=sys.stdout):
self.writer=writer
def processingInstruction (self,target, remainder):
"""Receive an event signalling that a processing instruction
has been found."""
self.writer.write("?"+target+" "+remainder+"\n")
def startElement(self,name,amap):
"Receive an event signalling the start of an element."
self.writer.write("("+name+"\n")
for a_name in amap.keys():
self.writer.write("A"+a_name+" "+amap[a_name]+"\n")
def endElement(self,name):
"Receive an event signalling the end of an element."
self.writer.write(")"+name+"\n")
def characters(self,data,start_ix,length):
"Receive an event signalling that character data has been found."
self.writer.write("-"+data[start_ix:start_ix+length]+"\n")
# --- XML canonizer
class Canonizer(saxlib.HandlerBase):
"A SAX document handler that produces canonized XML output."
def __init__(self,writer=sys.stdout):
self.elem_level=0
self.writer=writer
def processingInstruction (self,target, remainder):
if not target=="xml":
self.writer.write("<?"+target+" "+remainder+"?>")
def startElement(self,name,amap):
self.writer.write("<"+name)
a_names=amap.keys()
a_names.sort()
for a_name in a_names:
self.writer.write(" "+a_name+"=\"")
self.write_data(amap[a_name])
self.writer.write("\"")
self.writer.write(">")
self.elem_level=self.elem_level+1
def endElement(self,name):
self.writer.write("</"+name+">")
self.elem_level=self.elem_level-1
def ignorableWhitespace(self,data,start_ix,length):
self.characters(data,start_ix,length)
def characters(self,data,start_ix,length):
if self.elem_level>0:
self.write_data(data[start_ix:start_ix+length])
def write_data(self,data):
"Writes datachars to writer."
data=data.replace("&","&")
data=data.replace("<","<")
data=data.replace("\"",""")
data=data.replace(">",">")
data=data.replace(chr(9),"	")
data=data.replace(chr(10)," ")
data=data.replace(chr(13)," ")
self.writer.write(data)
# --- mllib
class mllib:
"""A re-implementation of the htmllib, sgmllib and xmllib interfaces as a
SAX DocumentHandler."""
# Unsupported:
# - setnomoretags
# - setliteral
# - translate_references
# - handle_xml
# - handle_doctype
# - handle_charref
# - handle_entityref
# - handle_comment
# - handle_cdata
# - tag_attributes
def __init__(self):
self.reset()
def reset(self):
import saxexts # only used here
self.parser=saxexts.XMLParserFactory.make_parser()
self.handler=mllib.Handler(self.parser,self)
self.handler.reset()
def feed(self,data):
self.parser.feed(data)
def close(self):
self.parser.close()
def get_stack(self):
return self.handler.get_stack()
# --- Handler methods (to be overridden)
def handle_starttag(self,name,method,atts):
method(atts)
def handle_endtag(self,name,method):
method()
def handle_data(self,data):
pass
def handle_proc(self,target,data):
pass
def unknown_starttag(self,name,atts):
pass
def unknown_endtag(self,name):
pass
def syntax_error(self,message):
pass
# --- The internal handler class
class Handler(saxlib.DocumentHandler,saxlib.ErrorHandler):
"""An internal class to handle SAX events and translate them to mllib
events."""
def __init__(self,driver,handler):
self.driver=driver
self.driver.setDocumentHandler(self)
self.driver.setErrorHandler(self)
self.handler=handler
self.reset()
def get_stack(self):
return self.stack
def reset(self):
self.stack=[]
# --- DocumentHandler methods
def characters(self, ch, start, length):
self.handler.handle_data(ch[start:start+length])
def endElement(self, name):
if hasattr(self.handler,"end_"+name):
self.handler.handle_endtag(name,
getattr(self.handler,"end_"+name))
else:
self.handler.unknown_endtag(name)
del self.stack[-1]
def ignorableWhitespace(self, ch, start, length):
self.handler.handle_data(ch[start:start+length])
def processingInstruction(self, target, data):
self.handler.handle_proc(target,data)
def startElement(self, name, atts):
self.stack.append(name)
if hasattr(self.handler,"start_"+name):
self.handler.handle_starttag(name,
getattr(self.handler,
"start_"+name),
atts)
else:
self.handler.unknown_starttag(name,atts)
# --- ErrorHandler methods
def error(self, exception):
self.handler.syntax_error(str(exception))
def fatalError(self, exception):
raise RuntimeError(str(exception))<|fim▁end|> | |
<|file_name|>AlertsFragment.java<|end_file_name|><|fim▁begin|>package com.wisecityllc.cookedapp.fragments;
import android.app.Activity;
import android.support.v4.app.Fragment;
import android.net.Uri;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ListView;
import android.widget.ProgressBar;
import android.widget.TextView;
import com.parse.ParseQueryAdapter;
import com.segment.analytics.Analytics;
import com.wisecityllc.cookedapp.R;
import com.wisecityllc.cookedapp.adapters.AlertWallAdapter;
import com.wisecityllc.cookedapp.parseClasses.Message;
import java.util.List;
/**
* A simple {@link Fragment} subclass.
* Activities that contain this fragment must implement the
* {@link AlertsFragment.OnFragmentInteractionListener} interface
* to handle interaction events.
* Use the {@link AlertsFragment#newInstance} factory method to
* create an instance of this fragment.
*/
public class AlertsFragment extends Fragment {
public static final String ALERTS_SCREEN = "AlertsScreen";
private boolean mHasMadeInitialLoad = false;
private AlertWallAdapter mAlertsAdapter;
private ListView mAlertsListView;
private TextView mNoAlertsTextView;
private ProgressBar mLoadingIndicator;
public static AlertsFragment newInstance() {
AlertsFragment fragment = new AlertsFragment();
return fragment;
}
public AlertsFragment() {
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
return inflater.inflate(R.layout.fragment_alerts, container, false);
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
mLoadingIndicator = (ProgressBar) view.findViewById(R.id.alerts_fragment_loading_indicator);
mAlertsAdapter = new AlertWallAdapter(this.getActivity());
mAlertsAdapter.addOnQueryLoadListener(new ParseQueryAdapter.OnQueryLoadListener<Message>() {
@Override
public void onLoading() {
mAlertsListView.setVisibility(View.GONE);
mLoadingIndicator.setVisibility(View.VISIBLE);
mNoAlertsTextView.setVisibility(View.GONE);
}
@Override
public void onLoaded(List<Message> list, Exception e) {
mLoadingIndicator.setVisibility(View.GONE);
mNoAlertsTextView.setVisibility(e != null || list == null || list.isEmpty() ? View.VISIBLE : View.GONE);
mAlertsListView.setVisibility(View.VISIBLE);
}
});
if(mAlertsListView == null){
mAlertsListView = (ListView)view.findViewById(R.id.alerts_list_view);
}
<|fim▁hole|>
// mAlertsListView.setOnItemClickListener(this);
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
try {
} catch (ClassCastException e) {
throw new ClassCastException(activity.toString()
+ " must implement OnFragmentInteractionListener");
}
}
@Override
public void onDetach() {
super.onDetach();
}
/**
* This interface must be implemented by activities that contain this
* fragment to allow an interaction in this fragment to be communicated
* to the activity and potentially other fragments contained in that
* activity.
* <p/>
* See the Android Training lesson <a href=
* "http://developer.android.com/training/basics/fragments/communicating.html"
* >Communicating with Other Fragments</a> for more information.
*/
public interface OnFragmentInteractionListener {
// TODO: Update argument type and name
public void onFragmentInteraction(Uri uri);
}
@Override
public void setUserVisibleHint(boolean isVisibleToUser) {
super.setUserVisibleHint(isVisibleToUser);
if(isVisibleToUser) {
// Has become visible
Analytics.with(getActivity()).screen(null, ALERTS_SCREEN);
// Delay our loading until we become visible
if(mHasMadeInitialLoad == false && mAlertsAdapter != null) {
mAlertsListView.setAdapter(mAlertsAdapter);
mHasMadeInitialLoad = true;
}
}
}
}<|fim▁end|> |
mNoAlertsTextView = (TextView) view.findViewById(R.id.alerts_fragment_no_messages_text_view); |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.