prompt large_stringlengths 70 991k | completion large_stringlengths 0 1.02k |
|---|---|
<|file_name|>pgoapi.py<|end_file_name|><|fim▁begin|>from logging import getLogger
from yarl import URL
from aiohttp import BasicAuth
try:
from aiosocks import Socks4Auth, Socks5Auth
except ImportError:
class Socks4Auth(Exception):
def __init__(*args, **kwargs):
raise ImportError(
'You must install aiosocks to use a SOCKS proxy.')
Socks5Auth = Socks4Auth
from . import __title__, __version__
from .rpc_api import RpcApi, RpcState
from .auth_ptc import AuthPtc
from .auth_google import AuthGoogle
from .hash_server import HashServer
from .exceptions import AuthTokenExpiredException, InvalidCredentialsException, NoPlayerPositionSetException, ServerApiEndpointRedirectException
from .pogoprotos.networking.requests.request_type_pb2 import RequestType
from .pogoprotos.networking.platform.platform_request_type_pb2 import PlatformRequestType
class PGoApi:
log = getLogger(__name__)
log.info('%s v%s', __title__, __version__)
def __init__(self, lat=None, lon=None, alt=None, proxy=None, device_info=None):
self.auth_provider = None
self.state = RpcState()
self._api_endpoint = 'https://pgorelease.nianticlabs.com/plfe/rpc'
self.latitude = lat
self.longitude = lon
self.altitude = alt
self.proxy_auth = None
self.proxy = proxy
self.device_info = device_info
async def set_authentication(self, provider='ptc', username=None, password=None, timeout=10, locale='en_US', refresh_token=None):
if provider == 'ptc':
self.auth_provider = AuthPtc(
username,
password,
proxy=self._proxy,
proxy_auth=self.proxy_auth,
timeout=timeout)
elif provider == 'google':
self.auth_provider = AuthGoogle(
proxy=self._proxy, refresh_token=refresh_token)
if refresh_token:
return await self.auth_provider.get_access_token()
else:
raise InvalidCredentialsException(
"Invalid authentication provider - only ptc/google available.")
await self.auth_provider.user_login(username, password)
def set_position(self, lat, lon, alt=None):
self.log.debug('Set Position - Lat: %s Lon: %s Alt: %s', lat, lon, alt)
self.latitude = lat
self.longitude = lon
self.altitude = alt
def create_request(self):
return PGoApiRequest(self)
@staticmethod
def activate_hash_server(hash_token, conn_limit=300):
HashServer.set_token(hash_token)
HashServer.activate_session(conn_limit)
@property
def position(self):
return self.latitude, self.longitude, self.altitude
@property
def api_endpoint(self):
return self._api_endpoint
@api_endpoint.setter
def api_endpoint(self, api_url):
if api_url.startswith("https"):
self._api_endpoint = URL(api_url)
else:
self._api_endpoint = URL('https://' + api_url + '/rpc')
@property
def proxy(self):
return self._proxy
@proxy.setter
def proxy(self, proxy):
if proxy is None:
self._proxy = proxy
else:
self._proxy = URL(proxy)
if self._proxy.user:
scheme = self._proxy.scheme
if scheme == 'http':
self.proxy_auth = BasicAuth(
self._proxy.user, self._proxy.password)
elif scheme == 'socks5':
self.proxy_auth = Socks5Auth(
self._proxy.user, self._proxy.password)
elif scheme == 'socks4':
self.proxy_auth = Socks4Auth(self._proxy.user)
else:
raise ValueError(
'Proxy protocol must be http, socks5, or socks4.')
@property
def start_time(self):
return self.state.start_time
def __getattr__(self, func):
async def function(**kwargs):
request = self.create_request()
getattr(request, func)(**kwargs)
return await request.call()
if func.upper() in RequestType.keys():
return function
else:
raise AttributeError('{} not known.'.format(func))
<|fim▁hole|>class PGoApiRequest:
log = getLogger(__name__)
def __init__(self, parent):
self.__parent__ = parent
self._req_method_list = []
self._req_platform_list = []
async def call(self):
parent = self.__parent__
auth_provider = parent.auth_provider
position = parent.position
try:
assert position[0] is not None and position[1] is not None
except AssertionError:
raise NoPlayerPositionSetException('No position set.')
request = RpcApi(auth_provider, parent.state)
while True:
try:
response = await request.request(parent.api_endpoint, self._req_method_list, self._req_platform_list, position, parent.device_info, parent._proxy, parent.proxy_auth)
break
except AuthTokenExpiredException:
self.log.info('Access token rejected! Requesting new one...')
await auth_provider.get_access_token(force_refresh=True)
except ServerApiEndpointRedirectException as e:
self.log.debug('API endpoint redirect... re-executing call')
parent.api_endpoint = e.endpoint
# cleanup after call execution
self._req_method_list = []
return response
def list_curr_methods(self):
for i in self._req_method_list:
print("{} ({})".format(RequestType.Name(i), i))
def __getattr__(self, func):
func = func.upper()
def function(**kwargs):
self.log.debug('Creating a new request...')
try:
if func in RequestType.keys():
if kwargs:
self._req_method_list.append((RequestType.Value(func), kwargs))
self.log.debug("Arguments of '%s': \n\r%s", func, kwargs)
else:
self._req_method_list.append(RequestType.Value(func))
self.log.debug("Adding '%s' to RPC request", func)
elif func in PlatformRequestType.keys():
if kwargs:
self._req_platform_list.append((PlatformRequestType.Value(func), kwargs))
self.log.debug("Arguments of '%s': \n\r%s", func, kwargs)
else:
self._req_platform_list.append(PlatformRequestType.Value(func))
self.log.debug("Adding '%s' to RPC request", func)
except ValueError:
raise AttributeError('{} not known.'.format(func))
return self
return function<|fim▁end|> | |
<|file_name|>0031_premise_weight.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('premises', '0030_report_reason'),
]
operations = [
migrations.AddField(
model_name='premise',
name='weight',
field=models.IntegerField(default=0),
preserve_default=True,
),<|fim▁hole|><|fim▁end|> | ] |
<|file_name|>dashboard.js<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
function loadDashboards() {
var dashboards = dashboardProperties.dashboards;
var dashboard_list = '';
for (var i=0; i<dashboards.length; i++) {
var showDashboard = false;
if (dashboards[i].runtimes[0] == "all") {
showDashboard = true;
} else {
for (var j=0; j<dashboards[i].runtimes.length; j++) {
if (dashboards[i].runtimes[j] == selectedApplicationRevision.runtimeId) {
showDashboard = true;
break;
}
}
}
var isAvailable = (dashboards[i].isAvailable == 'true');
var url = dataAnalyticsServerUrl + dashboards[i].dashboardContext + eval(dashboards[i].dashboardTypeUtil).getQueryString();
if (showDashboard) {
var dashboard = '' +
'<div class="col-xs-12 col-md-12 col-lg-12" data-toggle="tooltip" title="' + dashboards[i].title + '">' +
'<a class="block-anch" href="' + url + ' " onclick="return ' + isAvailable +'" target="_blank">' +
'<div class="block-monitoring wrapper">';
if (!isAvailable) {
dashboard += '<div class="ribbon-wrapper"><div class="ribbon">Available Soon</div></div>';
}
dashboard += '<h3 class="ellipsis"><i class="fw fw-dashboard fw-lg icon"></i>' + dashboards[i].title + '</h3>' +
'</div>' +
'</a>' +
'</div>';
dashboard_list += dashboard;
}
}
$("#dashboards").html(dashboard_list);
}
// DashboardTypeUtil interface
var DashboardTypeUtil = {
getQueryString: function () {}
};
// define classes
var OperationalDashboardTypeUtil = function () {};
var HttpMonitoringDashboardTypeUtil = function () {};
var ESBAnalyticsDashboardTypeUtil = function () {};<|fim▁hole|>// extend the DashboardTypeUtil interface
OperationalDashboardTypeUtil.prototype = Object.create(DashboardTypeUtil);
HttpMonitoringDashboardTypeUtil.prototype = Object.create(DashboardTypeUtil);
ESBAnalyticsDashboardTypeUtil.prototype = Object.create(DashboardTypeUtil);
// actual implementation goes here
OperationalDashboardTypeUtil.prototype.getQueryString = function () {
return "/t/" + tenantDomain + "/dashboards/operational-dashboard/?shared=true&id=" + applicationName + "_" + selectedRevision + "_" + selectedApplicationRevision.hashId;
};
HttpMonitoringDashboardTypeUtil.prototype.getQueryString = function () {
return "?";
};
ESBAnalyticsDashboardTypeUtil.prototype.getQueryString = function () {
var currentTime = new Date().getTime();
var prevTime = currentTime - 3600000;
return "/t/" + tenantDomain + "/dashboards/esb-analytics/?shared=true&timeFrom=" + prevTime + "&timeTo=" + currentTime;
};<|fim▁end|> | |
<|file_name|>mount.py<|end_file_name|><|fim▁begin|>import sys
import socket
import fcntl
import struct
import random
import os
import shutil
import subprocess
import time
import csv
import ipaddress
# Run `python3 -m unittest discover` in this dir to execute tests
default_mount_options_nfs = "nfs hard,nointr,proto=tcp,mountproto=tcp,retry=30 0 0"
default_mount_options_cifs = "dir_mode=0777,file_mode=0777,serverino,nofail,uid=1001,gid=1001,vers=3.0"
def get_ip_address():
with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as s:
try:
# doesn't even have to be reachable
s.connect(("10.255.255.255", 1))
return s.getsockname()[0]
except:
return "127.0.0.1"
def ip_as_int(ip):
return int(ipaddress.ip_address(ip))
def remove_lines_containing(file, contains):
with open(file, "r+") as file:
d = file.readlines()
file.seek(0)
for i in d:
if contains not in i and i != "\n":
file.write(i)
file.truncate()
def print_help():
print("For example 'sudo python mount.py nfs '10.20.0.1:/folder1/nfsfolder2,10.20.0.1:/folder1/nfsfolder2'")
print(
"or 'sudo python mount.py azurefiles <storage-account-name>,<share-name>,<storage-account-key>'"
)
def install_apt_package(package):
try:
print("Attempt to install {}".format(package))
subprocess.check_call(["apt", "install", package, "-y"])
print("Install completed successfully")
except subprocess.CalledProcessError as e:
print("Failed install {} error: {}".format(package, e))
raise
# main allows the the mount script to be executable
def main():
if len(sys.argv) < 3:
print("Expected arg1: 'mount_type' and arg2 'mount_data'")
print_help()
exit(1)
mount_type = str(sys.argv[1])
mount_data = str(sys.argv[2])
mount_share(mount_type, mount_data)
# mount_share allows it to be invoked from other python scripts
def mount_share(mount_type, mount_data):
if mount_type.lower() != "nfs" and mount_type.lower() != "azurefiles":
print("Expected first arg to be either 'nfs' or 'azurefiles'")
print_help()
exit(1)
if mount_data == "":
print(
"""Expected second arg to be the mounting data. For NFS, this should be a CSV of IPs/FQDNS for the NFS servers with NFSExported dirs.
For example, '10.20.0.1:/folder1/nfsfolder2,10.20.0.1:/folder1/nfsfolder2'
For azure files this should be the azure files connection details."""
)
print_help()
exit(2)
print("Mounting type: {}".format(sys.argv[1]))
print("Mounting data: {}".format(sys.argv[2]))
mount_point_permissions = 0o0777 # Todo: What permissions does this really need?
primary_mount_folder = "/media/primarynfs"
seconday_mount_folder_prefix = "/media/secondarynfs"
fstab_file_path = "/etc/fstab"
try:
# Create folder to mount to
if not os.path.exists(primary_mount_folder):
os.makedirs(primary_mount_folder)
os.chmod(primary_mount_folder, mount_point_permissions)
# Make a backup of the fstab config incase we go wrong
shutil.copy(fstab_file_path, "/etc/fstab-mountscriptbackup")
# Clear existing NFS mount data to make script idempotent
remove_lines_containing(fstab_file_path, primary_mount_folder)
remove_lines_containing(fstab_file_path, seconday_mount_folder_prefix)
if mount_type.lower() == "azurefiles":
mount_azurefiles(fstab_file_path, mount_data, primary_mount_folder)
if mount_type.lower() == "nfs":
mount_nfs(fstab_file_path, mount_data, primary_mount_folder, mount_point_permissions)
except IOError as e:
print("I/O error({0})".format(e))
exit(1)
except:
print("Unexpected error:{0}".format, sys.exc_info())
raise
print("Done editing fstab ... attempting mount")
def mount_all():
subprocess.check_call(["mount", "-a"])
retryFunc("mount shares", mount_all, 100)
def retryFunc(desc, funcToRetry, maxRetries):
# Retry mounting for a while to handle race where VM exists before storage
# or temporary issue with storage
print("Attempting, with retries, to: {}".format(desc))
retryExponentialFactor = 3
for i in range(1, maxRetries):
if i == maxRetries:
print("Failed after max retries")
exit(3)
try:
print("Attempt #{}".format(str(i)))
funcToRetry()
except subprocess.CalledProcessError as e:
print("Failed:{0}".format(e))
retry_in = i * retryExponentialFactor
print("retrying in {0}secs".format(retry_in))
time.sleep(retry_in)
continue
else:
print("Succeeded to: {0} after {1} retries".format(desc, i))
break
def mount_nfs(fstab_file_path, mount_data, primary_mount_folder, mount_point_permissions):
# # Other apt instances on the machine may be doing an install
# # this means ours will fail so we retry to ensure success
def install_nfs():
install_apt_package("nfs-common")
retryFunc("install nfs-common", install_nfs, 20)
ips = mount_data.split(",")
print("Found ips:{}".format(",".join(ips)))
# Deterministically select a primary node from the available
# servers for this vm to use. By using the ip as a seed this ensures
# re-running will get the same node as primary.
# This enables spreading the load across multiple storage servers in a cluster
# like `Avere` or `Gluster` for higher throughput.
current_ip = get_ip_address()
current_ip_int = ip_as_int(current_ip)
print("Using ip as int: {0} for random seed".format((current_ip_int)))
random.seed(current_ip_int)
random_node = random.randint(0, len(ips) - 1)
primary = ips[random_node]
ips.remove(primary)
secondarys = ips
print("Primary node selected: {}".format(primary))
print("Secondary nodes selected: {}".format(",".join(secondarys)))
with open(fstab_file_path, "a") as file:
print("Mounting primary")
file.write(
"\n{} {} {}".format(
primary.strip(), primary_mount_folder, default_mount_options_nfs
)
)
print("Mounting secondarys")
number = 0
for ip in secondarys:<|fim▁hole|> if not os.path.exists(folder):
os.makedirs(folder)
os.chmod(folder, mount_point_permissions)
file.write(
"\n{} {} {}".format(ip.strip(), folder, default_mount_options_nfs)
)
def mount_azurefiles(fstab_file_path, mount_data, primary_mount_folder):
# Other apt instances on the machine may be doing an install
# this means ours will fail so we retry to ensure success
def install_cifs():
install_apt_package("cifs-utils")
retryFunc("install cifs-utils", install_cifs, 20)
params = mount_data.split(",")
if len(params) != 3:
print("Wrong params for azure files mount, expected 3 as CSV")
print_help()
exit(1)
account_name = params[0]
share_name = params[1]
account_key = params[2]
with open(fstab_file_path, "a") as file:
print("Mounting primary")
file.write(
"\n//{0}.file.core.windows.net/{1} {2} cifs username={0},password={3},{4}".format(
account_name,
share_name,
primary_mount_folder,
account_key,
default_mount_options_cifs,
)
)
if __name__ == "__main__":
main()<|fim▁end|> | number = number + 1
folder = "/media/secondarynfs" + str(number) |
<|file_name|>tests.rs<|end_file_name|><|fim▁begin|>use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
use expectest::prelude::*;
use maplit::*;
use serde_json::json;
use crate::models::{headers_from_json, Interaction, OptionalBody, PactSpecification, Consumer, Provider, ReadWritePact, write_pact};
use crate::models::content_types::JSON;
use crate::models::provider_states::ProviderState;
use crate::models::v4::{from_json, interaction_from_json, V4Interaction, V4Pact};
use crate::models::v4::http_parts::{HttpRequest, HttpResponse};
use crate::models::v4::http_parts::body_from_json;
use std::{io, env, fs};
use std::fs::File;
use std::io::Read;
#[test]
fn synchronous_http_request_from_json_defaults_to_get() {
let request_json : serde_json::Value = serde_json::from_str(r#"
{
"path": "/",
"query": "",
"headers": {}
}
"#).unwrap();
let request = HttpRequest::from_json(&request_json);
expect!(request.method).to(be_equal_to("GET"));
}
#[test]
fn synchronous_http_request_from_json_defaults_to_root_for_path() {
let request_json : serde_json::Value = serde_json::from_str(r#"
{
"method": "PUT",
"query": "",
"headers": {}
}
"#).unwrap();
let request = HttpRequest::from_json(&request_json);
assert_eq!(request.path, "/".to_string());
}
#[test]
fn synchronous_http_response_from_json_defaults_to_status_200() {
let response_json : serde_json::Value = serde_json::from_str(r#"
{
"headers": {}
}
"#).unwrap();
let response = HttpResponse::from_json(&response_json);
assert_eq!(response.status, 200);
}
#[test]
fn synchronous_http_request_content_type_falls_back_the_content_type_header_and_then_the_contents() {
let request_json = json!({
"headers": {},
"body": {
"content": "string"
}
});
let request = HttpRequest::from_json(&request_json);
expect!(request.body.content_type().unwrap()).to(be_equal_to("text/plain"));
let request_json = json!({
"headers": {
"Content-Type": ["text/html"]
},
"body": {
"content": "string"
}
});
let request = HttpRequest::from_json(&request_json);
expect!(request.body.content_type().unwrap()).to(be_equal_to("text/html"));
let request_json = json!({
"headers": {
"Content-Type": ["application/json; charset=UTF-8"]
},
"body": {
"content": "string"
}
});
let request = HttpRequest::from_json(&request_json);
expect!(request.body.content_type().unwrap()).to(be_equal_to("application/json;charset=utf-8"));
let request_json = json!({
"headers": {
"CONTENT-TYPE": ["application/json; charset=UTF-8"]
},
"body": {
"content": "string"
}
});
let request = HttpRequest::from_json(&request_json);
expect!(request.body.content_type().unwrap()).to(be_equal_to("application/json;charset=utf-8"));
let request_json = json!({
"body": {
"content": { "json": true }
}
});
let request = HttpRequest::from_json(&request_json);
expect!(request.body.content_type().unwrap()).to(be_equal_to("application/json"));
}
#[test]
fn loading_interaction_from_json() {
let interaction_json = json!({
"type": "Synchronous/HTTP",
"description": "String",
"providerStates": [{ "name": "provider state" }]
});
let interaction = interaction_from_json("", 0, &interaction_json).unwrap();
expect!(interaction.description()).to(be_equal_to("String"));
expect!(interaction.provider_states()).to(be_equal_to(vec![
ProviderState { name: "provider state".into(), params: hashmap!{} } ]));
}
#[test]
fn defaults_to_number_if_no_description() {
let interaction_json = json!({
"type": "Synchronous/HTTP"
});
let interaction = interaction_from_json("", 0, &interaction_json).unwrap();
expect!(interaction.description()).to(be_equal_to("Interaction 0"));
}
#[test]
fn defaults_to_empty_if_no_provider_state() {
let interaction_json = json!({
"type": "Synchronous/HTTP"
});
let interaction = interaction_from_json("", 0, &interaction_json).unwrap();
expect!(interaction.provider_states().iter()).to(be_empty());
}
#[test]
fn defaults_to_none_if_provider_state_null() {
let interaction_json = json!({
"type": "Synchronous/HTTP",
"description": "String",
"providerStates": null
});
let interaction = interaction_from_json("", 0, &interaction_json).unwrap();
expect!(interaction.provider_states().iter()).to(be_empty());
}
#[test]
fn load_empty_pact() {
let pact_json = json!({});
let pact = from_json("", &pact_json).unwrap();
expect!(pact.provider().name).to(be_equal_to("provider"));
expect!(pact.consumer().name).to(be_equal_to("consumer"));
expect!(pact.interactions().iter()).to(have_count(0));
expect!(pact.metadata().iter()).to(have_count(0));
expect!(pact.specification_version()).to(be_equal_to(PactSpecification::V4));
}
#[test]
fn load_basic_pact() {
let pact_json = json!({
"provider": {
"name": "Alice Service"
},
"consumer": {
"name": "Consumer"
},
"interactions": [
{
"type": "Synchronous/HTTP",
"description": "a retrieve Mallory request",
"request": {
"method": "GET",
"path": "/mallory",
"query": "name=ron&status=good"
},
"response": {
"status": 200,
"headers": {
"Content-Type": "text/html"
},
"body": {
"content": "\"That is some good Mallory.\""
}
}
}
]
});
let pact = from_json("", &pact_json).unwrap();
expect!(&pact.provider().name).to(be_equal_to("Alice Service"));
expect!(&pact.consumer().name).to(be_equal_to("Consumer"));
expect!(pact.interactions().iter()).to(have_count(1));
let interaction = pact.interactions()[0];
expect!(interaction.description()).to(be_equal_to("a retrieve Mallory request"));
expect!(interaction.provider_states().iter()).to(be_empty());
expect!(pact.specification_version()).to(be_equal_to(PactSpecification::V4));
expect!(pact.metadata().iter()).to(have_count(0));
let v4pact = pact.as_v4_pact().unwrap();
match &v4pact.interactions[0] {
V4Interaction::SynchronousHttp { request, response, .. } => {
expect!(request).to(be_equal_to(&HttpRequest {
method: "GET".into(),
path: "/mallory".into(),
query: Some(hashmap!{ "name".to_string() => vec!["ron".to_string()], "status".to_string() => vec!["good".to_string()] }),
headers: None,
body: OptionalBody::Missing,
.. HttpRequest::default()
}));
expect!(response).to(be_equal_to(&HttpResponse {
status: 200,
headers: Some(hashmap!{ "Content-Type".to_string() => vec!["text/html".to_string()] }),
body: OptionalBody::Present("\"That is some good Mallory.\"".into(), Some("text/html".into())),
.. HttpResponse::default()
}));
}
_ => panic!("Was expecting an HTTP pact")
}
}
#[test]
fn load_pact_encoded_query_string() {
let pact_json = json!({
"provider" : {
"name" : "test_provider"
},
"consumer" : {
"name" : "test_consumer"
},
"interactions" : [ {
"type": "Synchronous/HTTP",
"description" : "test interaction",
"request" : {
"query" : "datetime=2011-12-03T10%3A15%3A30%2B01%3A00&description=hello+world%21"
},
"response" : {
"status" : 200
}
} ],
"metadata" : {
"pactSpecification" : {
"version" : "4.0"
}
}
});
let pact = from_json("", &pact_json).unwrap();
expect!(pact.interactions().iter()).to(have_count(1));
let v4pact = pact.as_v4_pact().unwrap();
match &v4pact.interactions[0] {
V4Interaction::SynchronousHttp { request, .. } => {
expect!(&request.query).to(be_equal_to(
&Some(hashmap!{ "datetime".to_string() => vec!["2011-12-03T10:15:30+01:00".to_string()],
"description".to_string() => vec!["hello world!".to_string()] })));
}
_ => panic!("Was expecting an HTTP pact")
}
}
#[test]
fn load_pact_converts_methods_to_uppercase() {
let pact_json = json!({
"interactions" : [ {
"type": "Synchronous/HTTP",
"description" : "test interaction",
"request" : {
"method" : "get"
},
"response" : {
"status" : 200
}
} ],
"metadata" : {}
});
let pact = from_json("", &pact_json).unwrap();
expect!(pact.interactions().iter()).to(have_count(1));
let v4pact = pact.as_v4_pact().unwrap();
match &v4pact.interactions[0] {
V4Interaction::SynchronousHttp { request, .. } => {
expect!(&request.method).to(be_equal_to("GET"));
}
_ => panic!("Was expecting an HTTP pact")
}
}
#[test]
fn http_request_to_json_with_defaults() {
let request = HttpRequest::default();
expect!(request.to_json().to_string()).to(
be_equal_to("{\"method\":\"GET\",\"path\":\"/\"}"));
}
#[test]
fn http_request_to_json_converts_methods_to_upper_case() {
let request = HttpRequest { method: "post".into(), .. HttpRequest::default() };
expect!(request.to_json().to_string()).to(be_equal_to("{\"method\":\"POST\",\"path\":\"/\"}"));
}
#[test]
fn http_request_to_json_with_a_query() {
let request = HttpRequest { query: Some(hashmap!{
s!("a") => vec![s!("1"), s!("2")],
s!("b") => vec![s!("3")]
}), .. HttpRequest::default() };
expect!(request.to_json().to_string()).to(
be_equal_to(r#"{"method":"GET","path":"/","query":{"a":["1","2"],"b":["3"]}}"#)
);
}
#[test]
fn http_request_to_json_with_headers() {
let request = HttpRequest { headers: Some(hashmap!{
s!("HEADERA") => vec![s!("VALUEA")],
s!("HEADERB") => vec![s!("VALUEB1, VALUEB2")]
}), .. HttpRequest::default() };
expect!(request.to_json().to_string()).to(
be_equal_to(r#"{"headers":{"HEADERA":["VALUEA"],"HEADERB":["VALUEB1, VALUEB2"]},"method":"GET","path":"/"}"#)
);
}
#[test]
fn http_request_to_json_with_json_body() {
let request = HttpRequest { headers: Some(hashmap!{
s!("Content-Type") => vec![s!("application/json")]
}), body: OptionalBody::Present(r#"{"key": "value"}"#.into(), Some("application/json".into())), .. HttpRequest::default() };
expect!(request.to_json().to_string()).to(
be_equal_to(r#"{"body":{"content":{"key":"value"},"contentType":"application/json","encoded":false},"headers":{"Content-Type":["application/json"]},"method":"GET","path":"/"}"#)
);
}
#[test]
fn http_request_to_json_with_non_json_body() {
let request = HttpRequest { headers: Some(hashmap!{ s!("Content-Type") => vec![s!("text/plain")] }),
body: OptionalBody::Present("This is some text".into(), Some("text/plain".into())), .. HttpRequest::default() };
expect!(request.to_json().to_string()).to(
be_equal_to(r#"{"body":{"content":"This is some text","contentType":"text/plain","encoded":false},"headers":{"Content-Type":["text/plain"]},"method":"GET","path":"/"}"#)
);
}
#[test]
fn http_request_to_json_with_empty_body() {
let request = HttpRequest { body: OptionalBody::Empty, .. HttpRequest::default() };
expect!(request.to_json().to_string()).to(
be_equal_to(r#"{"body":{"content":""},"method":"GET","path":"/"}"#)
);
}
#[test]
fn http_request_to_json_with_null_body() {
let request = HttpRequest { body: OptionalBody::Null, .. HttpRequest::default() };
expect!(request.to_json().to_string()).to(
be_equal_to(r#"{"method":"GET","path":"/"}"#)
);
}
#[test]
fn http_response_to_json_with_defaults() {
let response = HttpResponse::default();
expect!(response.to_json().to_string()).to(be_equal_to("{\"status\":200}"));
}
#[test]
fn http_response_to_json_with_headers() {
let response = HttpResponse { headers: Some(hashmap!{
s!("HEADERA") => vec![s!("VALUEA")],
s!("HEADERB") => vec![s!("VALUEB1, VALUEB2")]
}), .. HttpResponse::default() };
expect!(response.to_json().to_string()).to(
be_equal_to(r#"{"headers":{"HEADERA":["VALUEA"],"HEADERB":["VALUEB1, VALUEB2"]},"status":200}"#)
);
}
#[test]
fn http_response_to_json_with_json_body() {
let response = HttpResponse { headers: Some(hashmap!{
s!("Content-Type") => vec![s!("application/json")]
}), body: OptionalBody::Present(r#"{"key": "value"}"#.into(), Some("application/json".into())), .. HttpResponse::default() };
expect!(response.to_json().to_string()).to(
be_equal_to(r#"{"body":{"content":{"key":"value"},"contentType":"application/json","encoded":false},"headers":{"Content-Type":["application/json"]},"status":200}"#)
);
}
#[test]
fn http_response_to_json_with_non_json_body() {
let response = HttpResponse { headers: Some(hashmap!{ s!("Content-Type") => vec![s!("text/plain")] }),
body: OptionalBody::Present("This is some text".into(), "text/plain".parse().ok()), .. HttpResponse::default() };
expect!(response.to_json().to_string()).to(
be_equal_to(r#"{"body":{"content":"This is some text","contentType":"text/plain","encoded":false},"headers":{"Content-Type":["text/plain"]},"status":200}"#)
);
}
#[test]
fn http_response_to_json_with_empty_body() {
let response = HttpResponse { body: OptionalBody::Empty, .. HttpResponse::default() };
expect!(response.to_json().to_string()).to(
be_equal_to(r#"{"body":{"content":""},"status":200}"#)
);
}
#[test]
fn http_response_to_json_with_null_body() {
let response = HttpResponse { body: OptionalBody::Null, .. HttpResponse::default() };
expect!(response.to_json().to_string()).to(
be_equal_to(r#"{"status":200}"#)
);
}
#[test]
fn interaction_from_json_sets_the_id_if_loaded_from_broker() {
let json = json!({
"type": "Synchronous/HTTP",
"_id": "123456789",
"description": "Test Interaction",
"request": {
"method": "GET",
"path": "/"
},
"response": {
"status": 200
}
});
let interaction = interaction_from_json("", 0, &json).unwrap();
let id = match interaction {
V4Interaction::SynchronousHttp { id, .. } => id,
V4Interaction::AsynchronousMessages { id, .. } => id
};
expect!(id).to(be_some().value("123456789".to_string()));
}
fn read_pact_file(file: &str) -> io::Result<String> {
let mut f = File::open(file)?;
let mut buffer = String::new();
f.read_to_string(&mut buffer)?;
Ok(buffer)
}
#[test]
fn write_pact_test() {
let pact = V4Pact { consumer: Consumer { name: s!("write_pact_test_consumer") },
provider: Provider { name: s!("write_pact_test_provider") },
interactions: vec![
V4Interaction::SynchronousHttp {
id: None,
key: None,
description: s!("Test Interaction"),
provider_states: vec![ProviderState { name: s!("Good state to be in"), params: hashmap!{} }],
request: Default::default(),
response: Default::default()
}
],
.. V4Pact::default() };
let mut dir = env::temp_dir();
let x = rand::random::<u16>();
dir.push(format!("pact_test_{}", x));
dir.push(pact.default_file_name());
let result = write_pact(&pact, &dir, PactSpecification::V4, true);
let pact_file = read_pact_file(dir.as_path().to_str().unwrap()).unwrap_or_default();
fs::remove_dir_all(dir.parent().unwrap()).unwrap_or(());
expect!(result).to(be_ok());
expect!(pact_file).to(be_equal_to(format!(r#"{{
"consumer": {{
"name": "write_pact_test_consumer"
}},
"interactions": [
{{
"description": "Test Interaction",
"key": "53d3170820ad2160",
"providerStates": [
{{
"name": "Good state to be in"
}}
],
"request": {{
"method": "GET",
"path": "/"
}},
"response": {{
"status": 200
}},
"type": "Synchronous/HTTP"
}}
],
"metadata": {{
"pactRust": {{
"version": "{}"
}},
"pactSpecification": {{
"version": "4.0"
}}
}},
"provider": {{
"name": "write_pact_test_provider"
}}
}}"#, super::PACT_RUST_VERSION.unwrap())));
}
// #[test]
// fn write_pact_test_should_merge_pacts() {
// let pact = RequestResponsePact { consumer: Consumer { name: s!("merge_consumer") },
// provider: Provider { name: s!("merge_provider") },
// interactions: vec![
// RequestResponseInteraction {
// description: s!("Test Interaction 2"),
// provider_states: vec![ProviderState { name: s!("Good state to be in"), params: hashmap!{} }],
// .. RequestResponseInteraction::default()
// }
// ],
// metadata: btreemap!{},
// specification_version: PactSpecification::V1_1
// };
// let pact2 = RequestResponsePact { consumer: Consumer { name: s!("merge_consumer") },
// provider: Provider { name: s!("merge_provider") },
// interactions: vec![
// RequestResponseInteraction {
// description: s!("Test Interaction"),
// provider_states: vec![ProviderState { name: s!("Good state to be in"), params: hashmap!{} }],
// .. RequestResponseInteraction::default()
// }
// ],
// metadata: btreemap!{},
// specification_version: PactSpecification::V1_1
// };
// let mut dir = env::temp_dir();<|fim▁hole|>// let result = pact.write_pact(dir.as_path(), PactSpecification::V2);
// let result2 = pact2.write_pact(dir.as_path(), PactSpecification::V2);
//
// let pact_file = read_pact_file(dir.as_path().to_str().unwrap()).unwrap_or(s!(""));
// fs::remove_dir_all(dir.parent().unwrap()).unwrap_or(());
//
// expect!(result).to(be_ok());
// expect!(result2).to(be_ok());
// expect!(pact_file).to(be_equal_to(format!(r#"{{
// "consumer": {{
// "name": "merge_consumer"
// }},
// "interactions": [
// {{
// "description": "Test Interaction",
// "providerState": "Good state to be in",
// "request": {{
// "method": "GET",
// "path": "/"
// }},
// "response": {{
// "status": 200
// }}
// }},
// {{
// "description": "Test Interaction 2",
// "providerState": "Good state to be in",
// "request": {{
// "method": "GET",
// "path": "/"
// }},
// "response": {{
// "status": 200
// }}
// }}
// ],
// "metadata": {{
// "pactRust": {{
// "version": "{}"
// }},
// "pactSpecification": {{
// "version": "2.0.0"
// }}
// }},
// "provider": {{
// "name": "merge_provider"
// }}
// }}"#, super::VERSION.unwrap())));
// }
//
// #[test]
// fn write_pact_test_should_not_merge_pacts_with_conflicts() {
// let pact = RequestResponsePact { consumer: Consumer { name: s!("write_pact_test_consumer") },
// provider: Provider { name: s!("write_pact_test_provider") },
// interactions: vec![
// RequestResponseInteraction {
// description: s!("Test Interaction"),
// provider_states: vec![ProviderState { name: s!("Good state to be in"), params: hashmap!{} }],
// .. RequestResponseInteraction::default()
// }
// ],
// metadata: btreemap!{},
// specification_version: PactSpecification::V1_1
// };
// let pact2 = RequestResponsePact { consumer: Consumer { name: s!("write_pact_test_consumer") },
// provider: Provider { name: s!("write_pact_test_provider") },
// interactions: vec![
// RequestResponseInteraction {
// description: s!("Test Interaction"),
// provider_states: vec![ProviderState { name: s!("Good state to be in"), params: hashmap!{} }],
// response: Response { status: 400, .. Response::default() },
// .. RequestResponseInteraction::default()
// }
// ],
// metadata: btreemap!{},
// specification_version: PactSpecification::V1_1
// };
// let mut dir = env::temp_dir();
// let x = rand::random::<u16>();
// dir.push(format!("pact_test_{}", x));
// dir.push(pact.default_file_name());
//
// let result = pact.write_pact(dir.as_path(), PactSpecification::V2);
// let result2 = pact2.write_pact(dir.as_path(), PactSpecification::V2);
//
// let pact_file = read_pact_file(dir.as_path().to_str().unwrap()).unwrap_or(s!(""));
// fs::remove_dir_all(dir.parent().unwrap()).unwrap_or(());
//
// expect!(result).to(be_ok());
// expect!(result2).to(be_err());
// expect!(pact_file).to(be_equal_to(format!(r#"{{
// "consumer": {{
// "name": "write_pact_test_consumer"
// }},
// "interactions": [
// {{
// "description": "Test Interaction",
// "providerState": "Good state to be in",
// "request": {{
// "method": "GET",
// "path": "/"
// }},
// "response": {{
// "status": 200
// }}
// }}
// ],
// "metadata": {{
// "pactRust": {{
// "version": "{}"
// }},
// "pactSpecification": {{
// "version": "2.0.0"
// }}
// }},
// "provider": {{
// "name": "write_pact_test_provider"
// }}
// }}"#, super::VERSION.unwrap())));
// }
//
// #[test]
// fn pact_merge_does_not_merge_different_consumers() {
// let pact = RequestResponsePact { consumer: Consumer { name: s!("test_consumer") },
// provider: Provider { name: s!("test_provider") },
// interactions: vec![],
// metadata: btreemap!{},
// specification_version: PactSpecification::V1
// };
// let pact2 = RequestResponsePact { consumer: Consumer { name: s!("test_consumer2") },
// provider: Provider { name: s!("test_provider") },
// interactions: vec![],
// metadata: btreemap!{},
// specification_version: PactSpecification::V1_1
// };
// expect!(pact.merge(&pact2)).to(be_err());
// }
//
// #[test]
// fn pact_merge_does_not_merge_different_providers() {
// let pact = RequestResponsePact { consumer: Consumer { name: s!("test_consumer") },
// provider: Provider { name: s!("test_provider") },
// interactions: vec![],
// metadata: btreemap!{},
// specification_version: PactSpecification::V1_1
// };
// let pact2 = RequestResponsePact { consumer: Consumer { name: s!("test_consumer") },
// provider: Provider { name: s!("test_provider2") },
// interactions: vec![],
// metadata: btreemap!{},
// specification_version: PactSpecification::V1_1
// };
// expect!(pact.merge(&pact2)).to(be_err());
// }
//
// #[test]
// fn pact_merge_does_not_merge_where_there_are_conflicting_interactions() {
// let pact = RequestResponsePact { consumer: Consumer { name: s!("test_consumer") },
// provider: Provider { name: s!("test_provider") },
// interactions: vec![
// RequestResponseInteraction {
// description: s!("Test Interaction"),
// provider_states: vec![ProviderState { name: s!("Good state to be in"), params: hashmap!{} }],
// .. RequestResponseInteraction::default()
// }
// ],
// metadata: btreemap!{},
// specification_version: PactSpecification::V1_1
// };
// let pact2 = RequestResponsePact { consumer: Consumer { name: s!("test_consumer") },
// provider: Provider { name: s!("test_provider") },
// interactions: vec![
// RequestResponseInteraction {
// description: s!("Test Interaction"),
// provider_states: vec![ProviderState { name: s!("Good state to be in"), params: hashmap!{} }],
// request: Request { path: s!("/other"), .. Request::default() },
// .. RequestResponseInteraction::default()
// }
// ],
// metadata: btreemap!{},
// specification_version: PactSpecification::V1_1
// };
// expect!(pact.merge(&pact2)).to(be_err());
// }
//
// #[test]
// fn pact_merge_removes_duplicates() {
// let pact = RequestResponsePact { consumer: Consumer { name: s!("test_consumer") },
// provider: Provider { name: s!("test_provider") },
// interactions: vec![
// RequestResponseInteraction {
// description: s!("Test Interaction"),
// provider_states: vec![ProviderState { name: s!("Good state to be in"), params: hashmap!{} }],
// .. RequestResponseInteraction::default()
// }
// ],
// .. RequestResponsePact::default()
// };
// let pact2 = RequestResponsePact { consumer: Consumer { name: s!("test_consumer") },
// provider: Provider { name: s!("test_provider") },
// interactions: vec![
// RequestResponseInteraction {
// description: s!("Test Interaction"),
// provider_states: vec![ProviderState { name: s!("Good state to be in"), params: hashmap!{} }],
// .. RequestResponseInteraction::default()
// },
// RequestResponseInteraction {
// description: s!("Test Interaction 2"),
// provider_states: vec![ProviderState { name: s!("Good state to be in"), params: hashmap!{} }],
// .. RequestResponseInteraction::default()
// }
// ],
// .. RequestResponsePact::default()
// };
//
// let merged_pact = pact.merge(&pact2);
// expect!(merged_pact.clone()).to(be_ok());
// expect!(merged_pact.clone().unwrap().interactions.len()).to(be_equal_to(2));
//
// let merged_pact2 = pact.merge(&pact.clone());
// expect!(merged_pact2.clone()).to(be_ok());
// expect!(merged_pact2.clone().unwrap().interactions.len()).to(be_equal_to(1));
// }
//
// #[test]
// fn interactions_do_not_conflict_if_they_have_different_descriptions() {
// let interaction1 = RequestResponseInteraction {
// description: s!("Test Interaction"),
// provider_states: vec![ProviderState { name: s!("Good state to be in"), params: hashmap!{} }],
// .. RequestResponseInteraction::default()
// };
// let interaction2 = RequestResponseInteraction {
// description: s!("Test Interaction 2"),
// provider_states: vec![ProviderState { name: s!("Good state to be in"), params: hashmap!{} }],
// .. RequestResponseInteraction::default()
// };
// expect!(interaction1.conflicts_with(&interaction2).iter()).to(be_empty());
// }
//
// #[test]
// fn interactions_do_not_conflict_if_they_have_different_provider_states() {
// let interaction1 = RequestResponseInteraction {
// description: s!("Test Interaction"),
// provider_states: vec![ProviderState { name: s!("Good state to be in"), params: hashmap!{} }],
// .. RequestResponseInteraction::default()
// };
// let interaction2 = RequestResponseInteraction {
// description: s!("Test Interaction"),
// provider_states: vec![ProviderState { name: s!("Bad state to be in"), params: hashmap!{} }],
// .. RequestResponseInteraction::default()
// };
// expect!(interaction1.conflicts_with(&interaction2).iter()).to(be_empty());
// }
//
// #[test]
// fn interactions_do_not_conflict_if_they_have_the_same_requests_and_responses() {
// let interaction1 = RequestResponseInteraction {
// description: s!("Test Interaction"),
// provider_states: vec![ProviderState { name: s!("Good state to be in"), params: hashmap!{} }],
// .. RequestResponseInteraction::default()
// };
// let interaction2 = RequestResponseInteraction {
// description: s!("Test Interaction"),
// provider_states: vec![ProviderState { name: s!("Good state to be in"), params: hashmap!{} }],
// .. RequestResponseInteraction::default()
// };
// expect!(interaction1.conflicts_with(&interaction2).iter()).to(be_empty());
// }
//
// #[test]
// fn interactions_conflict_if_they_have_different_requests() {
// let interaction1 = RequestResponseInteraction {
// description: s!("Test Interaction"),
// provider_states: vec![ProviderState { name: s!("Good state to be in"), params: hashmap!{} }],
// .. RequestResponseInteraction::default()
// };
// let interaction2 = RequestResponseInteraction {
// description: s!("Test Interaction"),
// provider_states: vec![ProviderState { name: s!("Good state to be in"), params: hashmap!{} }],
// request: Request { method: s!("POST"), .. Request::default() },
// .. RequestResponseInteraction::default()
// };
// expect!(interaction1.conflicts_with(&interaction2).iter()).to_not(be_empty());
// }
//
// #[test]
// fn interactions_conflict_if_they_have_different_responses() {
// let interaction1 = RequestResponseInteraction {
// description: s!("Test Interaction"),
// provider_states: vec![ProviderState { name: s!("Good state to be in"), params: hashmap!{} }],
// .. RequestResponseInteraction::default()
// };
// let interaction2 = RequestResponseInteraction {
// description: s!("Test Interaction"),
// provider_states: vec![ProviderState { name: s!("Good state to be in"), params: hashmap!{} }],
// response: Response { status: 400, .. Response::default() },
// .. RequestResponseInteraction::default()
// };
// expect!(interaction1.conflicts_with(&interaction2).iter()).to_not(be_empty());
// }
fn hash<T: Hash>(t: &T) -> u64 {
let mut s = DefaultHasher::new();
t.hash(&mut s);
s.finish()
}
#[test]
fn hash_for_http_request() {
let request1 = HttpRequest::default();
let request2 = HttpRequest { method: s!("POST"), .. HttpRequest::default() };
let request3 = HttpRequest { headers: Some(hashmap!{
s!("H1") => vec![s!("A")]
}), .. HttpRequest::default() };
let request4 = HttpRequest { headers: Some(hashmap!{
s!("H1") => vec![s!("B")]
}), .. HttpRequest::default() };
expect!(hash(&request1)).to(be_equal_to(hash(&request1)));
expect!(hash(&request3)).to(be_equal_to(hash(&request3)));
expect!(hash(&request1)).to_not(be_equal_to(hash(&request2)));
expect!(hash(&request3)).to_not(be_equal_to(hash(&request4)));
}
#[test]
fn hash_for_http_response() {
let response1 = HttpResponse::default();
let response2 = HttpResponse { status: 400, .. HttpResponse::default() };
let response3 = HttpResponse { headers: Some(hashmap!{
s!("H1") => vec![s!("A")]
}), .. HttpResponse::default() };
let response4 = HttpResponse { headers: Some(hashmap!{
s!("H1") => vec![s!("B")]
}), .. HttpResponse::default() };
expect!(hash(&response1)).to(be_equal_to(hash(&response1)));
expect!(hash(&response3)).to(be_equal_to(hash(&response3)));
expect!(hash(&response1)).to_not(be_equal_to(hash(&response2)));
expect!(hash(&response3)).to_not(be_equal_to(hash(&response4)));
}
// #[test]
// fn write_pact_test_with_matchers() {
// let pact = RequestResponsePact { consumer: Consumer { name: s!("write_pact_test_consumer") },
// provider: Provider { name: s!("write_pact_test_provider") },
// interactions: vec![
// RequestResponseInteraction {
// description: s!("Test Interaction"),
// provider_states: vec![ProviderState { name: s!("Good state to be in"), params: hashmap!{} }],
// request: Request {
// matching_rules: matchingrules!{
// "body" => {
// "$" => [ MatchingRule::Type ]
// }
// },
// .. Request::default()
// },
// .. RequestResponseInteraction::default()
// }
// ],
// .. RequestResponsePact::default() };
// let mut dir = env::temp_dir();
// let x = rand::random::<u16>();
// dir.push(format!("pact_test_{}", x));
// dir.push(pact.default_file_name());
//
// let result = pact.write_pact(dir.as_path(), PactSpecification::V2);
//
// let pact_file = read_pact_file(dir.as_path().to_str().unwrap()).unwrap_or(s!(""));
// fs::remove_dir_all(dir.parent().unwrap()).unwrap_or(());
//
// expect!(result).to(be_ok());
// expect!(pact_file).to(be_equal_to(format!(r#"{{
// "consumer": {{
// "name": "write_pact_test_consumer"
// }},
// "interactions": [
// {{
// "description": "Test Interaction",
// "providerState": "Good state to be in",
// "request": {{
// "matchingRules": {{
// "$.body": {{
// "match": "type"
// }}
// }},
// "method": "GET",
// "path": "/"
// }},
// "response": {{
// "status": 200
// }}
// }}
// ],
// "metadata": {{
// "pactRust": {{
// "version": "{}"
// }},
// "pactSpecification": {{
// "version": "2.0.0"
// }}
// }},
// "provider": {{
// "name": "write_pact_test_provider"
// }}
// }}"#, super::VERSION.unwrap())));
// }
//
// #[test]
// fn write_pact_v3_test_with_matchers() {
// let pact = RequestResponsePact { consumer: Consumer { name: s!("write_pact_test_consumer_v3") },
// provider: Provider { name: s!("write_pact_test_provider_v3") },
// interactions: vec![
// RequestResponseInteraction {
// description: s!("Test Interaction"),
// provider_states: vec![ProviderState { name: s!("Good state to be in"), params: hashmap!{} }],
// request: Request {
// matching_rules: matchingrules!{
// "body" => {
// "$" => [ MatchingRule::Type ]
// },
// "header" => {
// "HEADER_A" => [ MatchingRule::Include(s!("ValA")), MatchingRule::Include(s!("ValB")) ]
// }
// },
// .. Request::default()
// },
// .. RequestResponseInteraction::default()
// }
// ],
// .. RequestResponsePact::default() };
// let mut dir = env::temp_dir();
// let x = rand::random::<u16>();
// dir.push(format!("pact_test_{}", x));
// dir.push(pact.default_file_name());
//
// let result = pact.write_pact(dir.as_path(), PactSpecification::V3);
//
// let pact_file = read_pact_file(dir.as_path().to_str().unwrap()).unwrap_or(s!(""));
// fs::remove_dir_all(dir.parent().unwrap()).unwrap_or(());
//
// expect!(result).to(be_ok());
// expect!(pact_file).to(be_equal_to(format!(r#"{{
// "consumer": {{
// "name": "write_pact_test_consumer_v3"
// }},
// "interactions": [
// {{
// "description": "Test Interaction",
// "providerStates": [
// {{
// "name": "Good state to be in"
// }}
// ],
// "request": {{
// "matchingRules": {{
// "body": {{
// "$": {{
// "combine": "AND",
// "matchers": [
// {{
// "match": "type"
// }}
// ]
// }}
// }},
// "header": {{
// "HEADER_A": {{
// "combine": "AND",
// "matchers": [
// {{
// "match": "include",
// "value": "ValA"
// }},
// {{
// "match": "include",
// "value": "ValB"
// }}
// ]
// }}
// }}
// }},
// "method": "GET",
// "path": "/"
// }},
// "response": {{
// "status": 200
// }}
// }}
// ],
// "metadata": {{
// "pactRust": {{
// "version": "{}"
// }},
// "pactSpecification": {{
// "version": "3.0.0"
// }}
// }},
// "provider": {{
// "name": "write_pact_test_provider_v3"
// }}
// }}"#, super::VERSION.unwrap())));
// }
#[test]
fn body_from_json_returns_missing_if_there_is_no_body() {
let json = json!({});
let body = body_from_json(&json, "body", &None);
expect!(body).to(be_equal_to(OptionalBody::Missing));
}
#[test]
fn body_from_json_returns_null_if_the_body_is_null() {
let json = json!({
"path": "/",
"query": "",
"headers": {},
"body": null
});
let body = body_from_json(&json, "body", &None);
expect!(body).to(be_equal_to(OptionalBody::Null));
}
#[test]
fn body_from_json_returns_json_string_if_the_body_is_json_but_not_a_string() {
let json = json!({
"path": "/",
"query": "",
"headers": {},
"body": {
"content": {
"test": true
}
}
});
let body = body_from_json(&json, "body", &None);
expect!(body).to(be_equal_to(OptionalBody::Present("{\"test\":true}".into(),
Some(JSON.clone()))));
}
#[test]
fn body_from_json_returns_empty_if_the_body_is_an_empty_string() {
let json = json!({
"path": "/",
"query": "",
"headers": {},
"body": {
"content": ""
}
});
let body = body_from_json(&json, "body", &None);
expect!(body).to(be_equal_to(OptionalBody::Empty));
}
#[test]
fn body_from_json_returns_the_body_if_the_body_is_a_string() {
let json = json!({
"path": "/",
"query": "",
"headers": {},
"body": {
"content": "<?xml version=\"1.0\"?> <body></body>"
}
});
let body = body_from_json(&json, "body", &None);
expect!(body).to(be_equal_to(
OptionalBody::Present("<?xml version=\"1.0\"?> <body></body>".into(),
Some("application/xml".into()))));
}
#[test]
fn body_from_text_plain_type_returns_the_same_formatted_body() {
let json = json!({
"path": "/",
"query": "",
"headers": {"Content-Type": "text/plain"},
"body": {
"content": "\"This is a string\""
}
});
let headers = headers_from_json(&json);
let body = body_from_json(&json, "body", &headers);
expect!(body).to(be_equal_to(OptionalBody::Present("\"This is a string\"".into(), Some("text/plain".into()))));
}
#[test]
fn body_from_text_html_type_returns_the_same_formatted_body() {
let json = json!({
"path": "/",
"query": "",
"headers": {"Content-Type": "text/html"},
"body": {
"content": "\"This is a string\""
}
});
let headers = headers_from_json(&json);
let body = body_from_json(&json, "body", &headers);
expect!(body).to(be_equal_to(OptionalBody::Present("\"This is a string\"".into(), Some("text/html".into()))));
}
#[test]
fn body_from_json_returns_the_a_json_formatted_body_if_the_body_is_a_string_and_encoding_is_json() {
let json = json!({
"body": {
"content": "This is actually a JSON string",
"contentType": "application/json",
"encoded": "json"
}
});
let body = body_from_json(&json, "body", &None);
expect!(body).to(be_equal_to(OptionalBody::Present("\"This is actually a JSON string\"".into(), Some("application/json".into()))));
}
#[test]
fn body_from_json_returns_the_raw_body_if_there_is_no_encoded_value() {
let json = json!({
"path": "/",
"query": "",
"headers": {"Content-Type": "application/json"},
"body": {
"content": "{\"test\":true}"
}
});
let headers = headers_from_json(&json);
let body = body_from_json(&json, "body", &headers);
expect!(body).to(be_equal_to(OptionalBody::Present("{\"test\":true}".into(), Some("application/json".into()))));
}
// #[test]
// fn write_v3_pact_test() {
// let pact = RequestResponsePact { consumer: Consumer { name: s!("write_pact_test_consumer") },
// provider: Provider { name: s!("write_pact_test_provider") },
// interactions: vec![
// RequestResponseInteraction {
// description: s!("Test Interaction"),
// provider_states: vec![ProviderState { name: s!("Good state to be in"), params: hashmap!{} }],
// request: Request {
// query: Some(hashmap!{
// s!("a") => vec![s!("1"), s!("2"), s!("3")],
// s!("b") => vec![s!("bill"), s!("bob")],
// }),
// .. Request::default()
// },
// .. RequestResponseInteraction::default()
// }
// ],
// .. RequestResponsePact::default() };
// let mut dir = env::temp_dir();
// let x = rand::random::<u16>();
// dir.push(format!("pact_test_{}", x));
// dir.push(pact.default_file_name());
//
// let result = pact.write_pact(dir.as_path(), PactSpecification::V3);
//
// let pact_file = read_pact_file(dir.as_path().to_str().unwrap()).unwrap_or(s!(""));
// fs::remove_dir_all(dir.parent().unwrap()).unwrap_or(());
//
// expect!(result).to(be_ok());
// expect!(pact_file).to(be_equal_to(format!(r#"{{
// "consumer": {{
// "name": "write_pact_test_consumer"
// }},
// "interactions": [
// {{
// "description": "Test Interaction",
// "providerStates": [
// {{
// "name": "Good state to be in"
// }}
// ],
// "request": {{
// "method": "GET",
// "path": "/",
// "query": {{
// "a": [
// "1",
// "2",
// "3"
// ],
// "b": [
// "bill",
// "bob"
// ]
// }}
// }},
// "response": {{
// "status": 200
// }}
// }}
// ],
// "metadata": {{
// "pactRust": {{
// "version": "{}"
// }},
// "pactSpecification": {{
// "version": "3.0.0"
// }}
// }},
// "provider": {{
// "name": "write_pact_test_provider"
// }}
// }}"#, super::VERSION.unwrap())));
// }
//
// #[test]
// fn write_pact_test_with_generators() {
// let pact = RequestResponsePact { consumer: Consumer { name: s!("write_pact_test_consumer") },
// provider: Provider { name: s!("write_pact_test_provider") },
// interactions: vec![
// RequestResponseInteraction {
// description: s!("Test Interaction with generators"),
// provider_states: vec![ProviderState { name: s!("Good state to be in"), params: hashmap!{} }],
// request: Request {
// generators: generators!{
// "BODY" => {
// "$" => Generator::RandomInt(1, 10)
// },
// "HEADER" => {
// "A" => Generator::RandomString(20)
// }
// },
// .. Request::default()
// },
// .. RequestResponseInteraction::default()
// }
// ],
// .. RequestResponsePact::default() };
// let mut dir = env::temp_dir();
// let x = rand::random::<u16>();
// dir.push(format!("pact_test_{}", x));
// dir.push(pact.default_file_name());
//
// let result = pact.write_pact(dir.as_path(), PactSpecification::V3);
//
// let pact_file = read_pact_file(dir.as_path().to_str().unwrap()).unwrap_or(s!(""));
// fs::remove_dir_all(dir.parent().unwrap()).unwrap_or(());
//
// expect!(result).to(be_ok());
// expect!(pact_file).to(be_equal_to(format!(r#"{{
// "consumer": {{
// "name": "write_pact_test_consumer"
// }},
// "interactions": [
// {{
// "description": "Test Interaction with generators",
// "providerStates": [
// {{
// "name": "Good state to be in"
// }}
// ],
// "request": {{
// "generators": {{
// "body": {{
// "$": {{
// "max": 10,
// "min": 1,
// "type": "RandomInt"
// }}
// }},
// "header": {{
// "A": {{
// "size": 20,
// "type": "RandomString"
// }}
// }}
// }},
// "method": "GET",
// "path": "/"
// }},
// "response": {{
// "status": 200
// }}
// }}
// ],
// "metadata": {{
// "pactRust": {{
// "version": "{}"
// }},
// "pactSpecification": {{
// "version": "3.0.0"
// }}
// }},
// "provider": {{
// "name": "write_pact_test_provider"
// }}
// }}"#, super::VERSION.unwrap())));
// }<|fim▁end|> | // let x = rand::random::<u16>();
// dir.push(format!("pact_test_{}", x));
// dir.push(pact.default_file_name());
// |
<|file_name|>inventory.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- encoding: utf-8; py-indent-offset: 4 -*-
# +------------------------------------------------------------------+<|fim▁hole|># | / ___| |__ ___ ___| | __ | \/ | |/ / |
# | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / |
# | | |___| | | | __/ (__| < | | | | . \ |
# | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ |
# | |
# | Copyright Mathias Kettner 2014 mk@mathias-kettner.de |
# +------------------------------------------------------------------+
#
# This file is part of Check_MK.
# The official homepage is at http://mathias-kettner.de/check_mk.
#
# check_mk is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation in version 2. check_mk is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; with-
# out even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU General Public License for more de-
# ails. You should have received a copy of the GNU General Public
# License along with GNU Make; see the file COPYING. If not, write
# to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
# Boston, MA 02110-1301 USA.
def paint_icon_inventory(what, row, tags, customer_vars):
if (what == "host" or row.get("service_check_command","").startswith("check_mk_active-cmk_inv!")) \
and inventory.has_inventory(row["host_name"]):
return link_to_view(html.render_icon('inv', _("Show Hardware/Software-Inventory of this host")),
row, 'inv_host' )
multisite_icons.append({
'host_columns': [ "name" ],
'paint': paint_icon_inventory,
})<|fim▁end|> | # | ____ _ _ __ __ _ __ | |
<|file_name|>DefaultScene.cpp<|end_file_name|><|fim▁begin|>///////////////////////////////////////////////////////////////////////////////
// This source file is part of Hect.
//
// Copyright (c) 2016 Colin Hill
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
///////////////////////////////////////////////////////////////////////////////
#include "DefaultScene.h"
#include "Hect/Runtime/Platform.h"
#include "Hect/Scene/Systems/InputSystem.h"
#include "Hect/Scene/Systems/PhysicsSystem.h"
#include "Hect/Scene/Systems/TransformSystem.h"
#include "Hect/Scene/Systems/BoundingBoxSystem.h"
#include "Hect/Scene/Systems/CameraSystem.h"
#include "Hect/Scene/Systems/DebugSystem.h"
#include "Hect/Scene/Systems/InterfaceSystem.h"
using namespace hect;
DefaultScene::DefaultScene(Engine& engine) :
Scene(engine),
_interface_system(*this, engine.asset_cache(), engine.platform(), engine.renderer(), engine.vector_renderer()),
_debug_system(*this, engine.asset_cache(), _interface_system),
_input_system(*this, engine.platform(), engine.settings()),
_camera_system(*this),
_bounding_box_system(*this, _debug_system),
_transform_system(*this, _bounding_box_system),
_physics_system(*this, _transform_system),
_scene_renderer(engine.asset_cache(), engine.task_pool())
{
Platform& platform = engine.platform();
if (platform.has_keyboard())
{
Keyboard& keyboard = platform.keyboard();
keyboard.register_listener(*this);
}
}
<|fim▁hole|>void DefaultScene::pre_tick(Seconds time_step)
{
Scene::refresh();
_input_system.update_axes(time_step);
_debug_system.clear_enqueued_debug_geometry();
}
void DefaultScene::post_tick(Seconds time_step)
{
_physics_system.wait_for_simulation_task();
_physics_system.sync_with_simulation();
_physics_system.begin_simulation_task(engine().task_pool(), time_step);
_transform_system.update_committed_transforms();
_camera_system.update_all_cameras();
_interface_system.tick_all_interfaces(time_step);
if (_debug_rendering_enabled)
{
_bounding_box_system.render_debug_geometry();
}
Scene::refresh();
}
void DefaultScene::tick(Seconds time_step)
{
pre_tick(time_step);
post_tick(time_step);
}
void DefaultScene::render(RenderTarget& target)
{
Renderer& renderer = engine().renderer();
_scene_renderer.render(*this, _camera_system, renderer, target);
_interface_system.render_all_interfaces();
}
void DefaultScene::receive_event(const KeyboardEvent& event)
{
if (event.is_key_down(Key::F5))
{
_debug_rendering_enabled = !_debug_rendering_enabled;
}
}
InterfaceSystem& DefaultScene::interface_system()
{
return _interface_system;
}
DebugSystem& DefaultScene::debug_system()
{
return _debug_system;
}
InputSystem& DefaultScene::input_system()
{
return _input_system;
}
CameraSystem& DefaultScene::camera_system()
{
return _camera_system;
}
BoundingBoxSystem& DefaultScene::bounding_box_system()
{
return _bounding_box_system;
}
TransformSystem& DefaultScene::transform_system()
{
return _transform_system;
}
PhysicsSystem& DefaultScene::physics_system()
{
return _physics_system;
}<|fim▁end|> | |
<|file_name|>commands.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""Module for pull and push command."""
import abc
from base64 import b64decode
import six
from six.moves import configparser
from ..core.api import API
from ..core.commands import AbstractCommand
from ..core.models.terminal import clean_order
from .client.controllers import ApiController
from .client.cryptor import RNCryptor
from ..core.storage.strategies import RelatedGetStrategy, SyncSaveStrategy
@six.add_metaclass(abc.ABCMeta)
class CloudSynchronizationCommand(AbstractCommand):
"""Base class for pull and push commands."""
def extend_parser(self, parser):
"""Add more arguments to parser."""
parser.add_argument('-p', '--password', metavar='PASSWORD')
return parser
@abc.abstractmethod
def process_sync(self, api_controller):
"""Do sync staff here."""
pass
def take_action(self, parsed_args):
"""Process CLI call."""
encryption_salt = b64decode(self.config.get('User', 'salt'))
hmac_salt = b64decode(self.config.get('User', 'hmac_salt'))
password = parsed_args.password
if password is None:<|fim▁hole|> cryptor = RNCryptor()
cryptor.password = password
cryptor.encryption_salt = encryption_salt
cryptor.hmac_salt = hmac_salt
controller = ApiController(self.storage, self.config, cryptor)
with self.storage:
self.process_sync(controller)
def validate_password(self, password):
"""Raise an error when password invalid."""
username = self.config.get('User', 'username')
API().login(username, password)
class PushCommand(CloudSynchronizationCommand):
"""Push data to Termius cloud."""
get_strategy = RelatedGetStrategy
save_strategy = SyncSaveStrategy
def process_sync(self, api_controller):
"""Push outdated local instances."""
try:
api_controller.put_setting()
api_controller.post_bulk()
except (configparser.NoSectionError, configparser.NoOptionError):
self.log.error('Call pull at first.')
else:
self.log.info('Push data to Termius cloud.')
class PullCommand(CloudSynchronizationCommand):
"""Pull data from Termius cloud."""
save_strategy = SyncSaveStrategy
def process_sync(self, api_controller):
"""Pull updated remote instances."""
api_controller.get_settings()
api_controller.get_bulk()
self.log.info('Pull data from Termius cloud.')
class FullCleanCommand(CloudSynchronizationCommand):
"""Pull, delete all data and push to Termius cloud."""
get_strategy = RelatedGetStrategy
save_strategy = SyncSaveStrategy
supported_models = clean_order
def process_sync(self, api_controller):
"""Pull updated remote instances."""
api_controller.get_bulk()
with self.storage:
self.full_clean()
api_controller.post_bulk()
self.log.info('Full clean data from Termius cloud.')
def full_clean(self):
"""Remove all local and remote instances."""
for model in self.supported_models:
self.log.info('Start cleaning %s...', model)
instances = self.storage.get_all(model)
for i in instances:
self.storage.delete(i)
self.log.info('Complete cleaning')
class CryptoCommand(CloudSynchronizationCommand):
"""Command for crypting and decrypting text."""
def extend_parser(self, parser):
"""Add more arguments to parser."""
super(CryptoCommand, self).extend_parser(parser)
parser.add_argument(
'-d', '--decrypt',
action='store_const', const='decrypt',
dest='operation'
)
parser.add_argument(
'-e', '--encrypt',
action='store_const', const='encrypt',
dest='operation'
)
parser.add_argument('text', nargs=1, metavar='TEXT', action='store')
return parser
def process_sync(self, api_controller):
"""Do sync staff here."""
pass
def take_action(self, parsed_args):
"""Process decrypt and encrypt text."""
encryption_salt = b64decode(self.config.get('User', 'salt'))
hmac_salt = b64decode(self.config.get('User', 'hmac_salt'))
password = parsed_args.password
if password is None:
password = self.prompt_password()
self.validate_password(password)
cryptor = RNCryptor()
cryptor.password = password
cryptor.encryption_salt = encryption_salt
cryptor.hmac_salt = hmac_salt
for i in parsed_args.text:
result_text = getattr(cryptor, parsed_args.operation)(i)
self.app.stdout.write('{}\n'.format(result_text))<|fim▁end|> | password = self.prompt_password()
self.validate_password(password) |
<|file_name|>movie-result.directive.js<|end_file_name|><|fim▁begin|>angular.module('movieApp')
.directive('movieResult', function () {
var directive = {
restrict: 'E',
replace: true,
scope: {
result: '=result'
},
template: [
'<div class="row">',
'<div class="col-sm-4">',
'<img ng-src="{{result.Poster}}" alt="{{result.Title}}" width="220px">',
'</div>',
'<div class="col-sm-8">',
'<h3>{{result.Title}}</h3>',
'<p>{{result.Plot}}</p>',
'<p><strong>Director:</strong> {{result.Director}}</p>',
'<p><strong>Actors:</strong> {{result.Actors}}</p>',
'<p><strong>Released:</strong> {{result.Released}} ({{result.Released | fromNow}})</p>',
'<p><strong>Genre:</strong> {{result.Genre}}</p>',
'</div>',
'</div>'
].join('')<|fim▁hole|>});<|fim▁end|> | };
return directive; |
<|file_name|>solver060.rs<|end_file_name|><|fim▁begin|>// COPYRIGHT (C) 2017 barreiro. All Rights Reserved.
// Rust solvers for Project Euler problems
use std::collections::HashMap;
use euler::algorithm::long::{concatenation, pow_10, square};
use euler::algorithm::prime::{generator_wheel, miller_rabin, prime_sieve};
use euler::Solver;
// The primes 3, 7, 109, and 673, are quite remarkable. By taking any two primes and concatenating them in any order the result will always be prime.
// For example, taking 7 and 109, both 7109 and 1097 are prime. The sum of these four primes, 792, represents the lowest sum for a set of four primes with this property.
//
// Find the lowest sum for a set of five primes for which any two primes concatenate to produce another prime.
pub struct Solver060 {
pub n: isize
}
impl Default for Solver060 {
fn default() -> Self {
Solver060 { n: 5 }
}
}
impl Solver for Solver060 {
fn solve(&self) -> isize {
let (mut set, primes) = (vec![], generator_wheel().take_while(|&p| p < pow_10(self.n - 1)).collect::<Vec<_>>());
add_prime_to_set(&mut set, self.n as _, &primes, &mut HashMap::new());<|fim▁hole|>}
fn add_prime_to_set<'a>(set: &mut Vec<isize>, size: usize, primes: &'a [isize], cache: &mut HashMap<isize, Vec<&'a isize>>) -> bool {
let last_prime = *primes.last().unwrap();
let is_prime = |c| if c < last_prime {
primes.binary_search(&c).is_ok()
} else if c < square(last_prime) {
prime_sieve(c, primes)
} else {
miller_rabin(c)
};
let concatenation_list = |p| primes.iter().filter(|&&prime| prime > p && is_prime(concatenation(p, prime)) && is_prime(concatenation(prime, p))).collect::<Vec<_>>();
// Memoization of the prime concatenations for a 25% speedup, despite increasing code complexity significantly
set.last().iter().for_each(|&&p| { cache.entry(p).or_insert_with(|| concatenation_list(p)); });
// Closure that takes an element of the set and does the intersection with the concatenations of other elements.
// The outcome is the primes that form concatenations with all elements of the set. From there, try to increase the size of the set by recursion.
let candidates = |p| cache.get(p).unwrap().iter().filter(|&c| set.iter().all(|&s| s == *p || cache.get(&s).unwrap().binary_search(c).is_ok())).map(|&&s| s).collect();
set.last().map_or(primes.to_vec(), candidates).iter().any(|&c| {
set.push(c);
if set.len() >= size || add_prime_to_set(set, size, primes, cache) {
true
} else {
set.pop();
false
}
})
}<|fim▁end|> | set.iter().sum()
} |
<|file_name|>customwizardpage.cpp<|end_file_name|><|fim▁begin|>/**************************************************************************
**
** This file is part of Qt Creator
**
** Copyright (c) 2011 Nokia Corporation and/or its subsidiary(-ies).
**
** Contact: Nokia Corporation (qt-info@nokia.com)
**
** No Commercial Usage
**
** This file contains pre-release code and may not be distributed.
** You may use this file in accordance with the terms and conditions
** contained in the Technology Preview License Agreement accompanying
** this package.
**
** GNU Lesser General Public License Usage
**
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Nokia gives you certain additional
** rights. These rights are described in the Nokia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** If you have questions regarding the use of this file, please contact
** Nokia at qt-info@nokia.com.
**
**************************************************************************/
#include "customwizardpage.h"
#include "customwizardparameters.h"
#include <utils/pathchooser.h>
#include <utils/qtcassert.h>
#include <QtCore/QRegExp>
#include <QtCore/QDebug>
#include <QtCore/QDir>
#include <QtGui/QWizardPage>
#include <QtGui/QFormLayout>
#include <QtGui/QVBoxLayout>
#include <QtGui/QLineEdit>
#include <QtGui/QLabel>
#include <QtGui/QRegExpValidator>
#include <QtGui/QComboBox>
#include <QtGui/QTextEdit>
#include <QtGui/QSpacerItem>
enum { debug = 0 };
namespace ProjectExplorer {
namespace Internal {
// ----------- TextFieldComboBox
TextFieldComboBox::TextFieldComboBox(QWidget *parent) :
QComboBox(parent)
{
setEditable(false);
connect(this, SIGNAL(currentIndexChanged(int)),
this, SLOT(slotCurrentIndexChanged(int)));
}
QString TextFieldComboBox::text() const
{
return valueAt(currentIndex());
}
void TextFieldComboBox::setText(const QString &s)
{
const int index = findData(QVariant(s), Qt::UserRole);
if (index != -1 && index != currentIndex())
setCurrentIndex(index);
}
void TextFieldComboBox::slotCurrentIndexChanged(int i)
{
emit text4Changed(valueAt(i));
}
void TextFieldComboBox::setItems(const QStringList &displayTexts,
const QStringList &values)
{
QTC_ASSERT(displayTexts.size() == values.size(), return)
clear();
addItems(displayTexts);
const int count = values.count();
for (int i = 0; i < count; i++)
setItemData(i, QVariant(values.at(i)), Qt::UserRole);
}
QString TextFieldComboBox::valueAt(int i) const
{
return i >= 0 && i < count() ? itemData(i, Qt::UserRole).toString() : QString();
}
// -------------- TextCheckBox
TextFieldCheckBox::TextFieldCheckBox(const QString &text, QWidget *parent) :
QCheckBox(text, parent),
m_trueText(QLatin1String("true")), m_falseText(QLatin1String("false"))
{
connect(this, SIGNAL(stateChanged(int)), this, SLOT(slotStateChanged(int)));
}
QString TextFieldCheckBox::text() const
{
return isChecked() ? m_trueText : m_falseText;
}
void TextFieldCheckBox::setText(const QString &s)
{
setChecked(s == m_trueText);
}
void TextFieldCheckBox::slotStateChanged(int cs)
{
emit textChanged(cs == Qt::Checked ? m_trueText : m_falseText);
}
// --------------- CustomWizardFieldPage
CustomWizardFieldPage::LineEditData::LineEditData(QLineEdit* le, const QString &defText) :
lineEdit(le), defaultText(defText)
{
}
CustomWizardFieldPage::TextEditData::TextEditData(QTextEdit* le, const QString &defText) :
textEdit(le), defaultText(defText)
{
}
CustomWizardFieldPage::CustomWizardFieldPage(const QSharedPointer<CustomWizardContext> &ctx,
const QSharedPointer<CustomWizardParameters> ¶meters,
QWidget *parent) :
QWizardPage(parent),
m_parameters(parameters),
m_context(ctx),
m_formLayout(new QFormLayout),
m_errorLabel(new QLabel)
{
QVBoxLayout *vLayout = new QVBoxLayout;
m_formLayout->setFieldGrowthPolicy(QFormLayout::ExpandingFieldsGrow);
if (debug)
qDebug() << Q_FUNC_INFO << parameters->fields.size();
foreach(const CustomWizardField &f, parameters->fields)
addField(f);
vLayout->addLayout(m_formLayout);
m_errorLabel->setVisible(false);
m_errorLabel->setStyleSheet(QLatin1String("background: red"));
vLayout->addItem(new QSpacerItem(0, 0, QSizePolicy::Ignored, QSizePolicy::MinimumExpanding));
vLayout->addWidget(m_errorLabel);
setLayout(vLayout);
}
CustomWizardFieldPage::~CustomWizardFieldPage()
{
}
void CustomWizardFieldPage::addRow(const QString &name, QWidget *w)
{
m_formLayout->addRow(name, w);
}
void CustomWizardFieldPage::showError(const QString &m)
{
m_errorLabel->setText(m);
m_errorLabel->setVisible(true);
}
void CustomWizardFieldPage::clearError()
{
m_errorLabel->setText(QString());
m_errorLabel->setVisible(false);
}
// Create widget a control based on the control attributes map
// and register it with the QWizard.
void CustomWizardFieldPage::addField(const CustomWizardField &field)\
{
// Register field, indicate mandatory by '*' (only when registering)
QString fieldName = field.name;
if (field.mandatory)
fieldName += QLatin1Char('*');
bool spansRow = false;
// Check known classes: QComboBox
const QString className = field.controlAttributes.value(QLatin1String("class"));
QWidget *fieldWidget = 0;
if (className == QLatin1String("QComboBox")) {
fieldWidget = registerComboBox(fieldName, field);
} else if (className == QLatin1String("QTextEdit")) {
fieldWidget = registerTextEdit(fieldName, field);
} else if (className == QLatin1String("Utils::PathChooser")) {
fieldWidget = registerPathChooser(fieldName, field);
} else if (className == QLatin1String("QCheckBox")) {
fieldWidget = registerCheckBox(fieldName, field.description, field);
spansRow = true; // Do not create a label for the checkbox.
} else {
fieldWidget = registerLineEdit(fieldName, field);
}
if (spansRow) {
m_formLayout->addRow(fieldWidget);
} else {
addRow(field.description, fieldWidget);
}
}
// Return the list of values and display texts for combo
static void comboChoices(const CustomWizardField::ControlAttributeMap &controlAttributes,
QStringList *values, QStringList *displayTexts)
{
typedef CustomWizardField::ControlAttributeMap::ConstIterator AttribMapConstIt;
values->clear();
displayTexts->clear();<|fim▁hole|> // display == value.
const AttribMapConstIt attribConstEnd = controlAttributes.constEnd();
const AttribMapConstIt choicesIt = controlAttributes.constFind(QLatin1String("combochoices"));
if (choicesIt != attribConstEnd) {
const QString &choices = choicesIt.value();
if (!choices.isEmpty())
*values = *displayTexts = choices.split(QLatin1Char(','));
return;
}
// From 2.2 on: Separate lists of value and text. Add all values found.
for (int i = 0; ; i++) {
const QString valueKey = CustomWizardField::comboEntryValueKey(i);
const AttribMapConstIt valueIt = controlAttributes.constFind(valueKey);
if (valueIt == attribConstEnd)
break;
values->push_back(valueIt.value());
const QString textKey = CustomWizardField::comboEntryTextKey(i);
displayTexts->push_back(controlAttributes.value(textKey));
}
}
QWidget *CustomWizardFieldPage::registerComboBox(const QString &fieldName,
const CustomWizardField &field)
{
TextFieldComboBox *combo = new TextFieldComboBox;
do { // Set up items and current index
QStringList values;
QStringList displayTexts;
comboChoices(field.controlAttributes, &values, &displayTexts);
combo->setItems(displayTexts, values);
bool ok;
const QString currentIndexS = field.controlAttributes.value(QLatin1String("defaultindex"));
if (currentIndexS.isEmpty())
break;
const int currentIndex = currentIndexS.toInt(&ok);
if (!ok || currentIndex < 0 || currentIndex >= combo->count())
break;
combo->setCurrentIndex(currentIndex);
} while (false);
registerField(fieldName, combo, "text", SIGNAL(text4Changed(QString)));
return combo;
} // QComboBox
QWidget *CustomWizardFieldPage::registerTextEdit(const QString &fieldName,
const CustomWizardField &field)
{
QTextEdit *textEdit = new QTextEdit;
registerField(fieldName, textEdit, "plainText", SIGNAL(textChanged(QString)));
const QString defaultText = field.controlAttributes.value(QLatin1String("defaulttext"));
m_textEdits.push_back(TextEditData(textEdit, defaultText));
return textEdit;
} // QTextEdit
QWidget *CustomWizardFieldPage::registerPathChooser(const QString &fieldName,
const CustomWizardField & /*field*/)
{
Utils::PathChooser *pathChooser = new Utils::PathChooser;
registerField(fieldName, pathChooser, "path", SIGNAL(changed(QString)));
return pathChooser;
} // Utils::PathChooser
QWidget *CustomWizardFieldPage::registerCheckBox(const QString &fieldName,
const QString &fieldDescription,
const CustomWizardField &field)
{
typedef CustomWizardField::ControlAttributeMap::const_iterator AttributeMapConstIt;
TextFieldCheckBox *checkBox = new TextFieldCheckBox(fieldDescription);
const bool defaultValue = field.controlAttributes.value(QLatin1String("defaultvalue")) == QLatin1String("true");
checkBox->setChecked(defaultValue);
const AttributeMapConstIt trueTextIt = field.controlAttributes.constFind(QLatin1String("truevalue"));
if (trueTextIt != field.controlAttributes.constEnd()) // Also set empty texts
checkBox->setTrueText(trueTextIt.value());
const AttributeMapConstIt falseTextIt = field.controlAttributes.constFind(QLatin1String("falsevalue"));
if (falseTextIt != field.controlAttributes.constEnd()) // Also set empty texts
checkBox->setFalseText(falseTextIt.value());
registerField(fieldName, checkBox, "text", SIGNAL(textChanged(QString)));
return checkBox;
}
QWidget *CustomWizardFieldPage::registerLineEdit(const QString &fieldName,
const CustomWizardField &field)
{
QLineEdit *lineEdit = new QLineEdit;
const QString validationRegExp = field.controlAttributes.value(QLatin1String("validator"));
if (!validationRegExp.isEmpty()) {
QRegExp re(validationRegExp);
if (re.isValid()) {
lineEdit->setValidator(new QRegExpValidator(re, lineEdit));
} else {
qWarning("Invalid custom wizard field validator regular expression %s.", qPrintable(validationRegExp));
}
}
registerField(fieldName, lineEdit, "text", SIGNAL(textEdited(QString)));
const QString defaultText = field.controlAttributes.value(QLatin1String("defaulttext"));
m_lineEdits.push_back(LineEditData(lineEdit, defaultText));
return lineEdit;
}
void CustomWizardFieldPage::initializePage()
{
QWizardPage::initializePage();
clearError();
// Note that the field mechanism will always restore the value
// set on it when entering the page, so, there is no point in
// trying to preserve user modifications of the text.
foreach(const LineEditData &led, m_lineEdits) {
if (!led.defaultText.isEmpty()) {
QString defaultText = led.defaultText;
CustomWizardContext::replaceFields(m_context->baseReplacements, &defaultText);
led.lineEdit->setText(defaultText);
}
}
foreach(const TextEditData &ted, m_textEdits) {
if (!ted.defaultText.isEmpty()) {
QString defaultText = ted.defaultText;
CustomWizardContext::replaceFields(m_context->baseReplacements, &defaultText);
ted.textEdit->setText(defaultText);
}
}
}
bool CustomWizardFieldPage::validatePage()
{
clearError();
// Check line edits with validators
foreach(const LineEditData &led, m_lineEdits) {
if (const QValidator *val = led.lineEdit->validator()) {
int pos = 0;
QString text = led.lineEdit->text();
if (val->validate(text, pos) != QValidator::Acceptable) {
led.lineEdit->setFocus();
return false;
}
}
}
// Any user validation rules -> Check all and display messages with
// place holders applied.
if (!m_parameters->rules.isEmpty()) {
const QMap<QString, QString> values = replacementMap(wizard(), m_context, m_parameters->fields);
QString message;
if (!CustomWizardValidationRule::validateRules(m_parameters->rules, values, &message)) {
showError(message);
return false;
}
}
return QWizardPage::validatePage();
}
QMap<QString, QString> CustomWizardFieldPage::replacementMap(const QWizard *w,
const QSharedPointer<CustomWizardContext> &ctx,
const FieldList &f)
{
QMap<QString, QString> fieldReplacementMap = ctx->baseReplacements;
foreach(const Internal::CustomWizardField &field, f) {
const QString value = w->field(field.name).toString();
fieldReplacementMap.insert(field.name, value);
}
// Insert paths for generator scripts.
fieldReplacementMap.insert(QLatin1String("Path"), QDir::toNativeSeparators(ctx->path));
fieldReplacementMap.insert(QLatin1String("TargetPath"), QDir::toNativeSeparators(ctx->targetPath));
return fieldReplacementMap;
}
// --------------- CustomWizardPage
CustomWizardPage::CustomWizardPage(const QSharedPointer<CustomWizardContext> &ctx,
const QSharedPointer<CustomWizardParameters> ¶meters,
QWidget *parent) :
CustomWizardFieldPage(ctx, parameters, parent),
m_pathChooser(new Utils::PathChooser)
{
addRow(tr("Path:"), m_pathChooser);
connect(m_pathChooser, SIGNAL(validChanged()), this, SIGNAL(completeChanged()));
}
QString CustomWizardPage::path() const
{
return m_pathChooser->path();
}
void CustomWizardPage::setPath(const QString &path)
{
m_pathChooser->setPath(path);
}
bool CustomWizardPage::isComplete() const
{
return m_pathChooser->isValid();
}
} // namespace Internal
} // namespace ProjectExplorer<|fim▁end|> | // Pre 2.2 Legacy: "combochoices" attribute with a comma-separated list, for |
<|file_name|>str-growth.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
<|fim▁hole|>
pub fn main() {
let mut s = ~"a";
s.push_char('b');
assert_eq!(s[0], 'a' as u8);
assert_eq!(s[1], 'b' as u8);
s.push_char('c');
s.push_char('d');
assert_eq!(s[0], 'a' as u8);
assert_eq!(s[1], 'b' as u8);
assert_eq!(s[2], 'c' as u8);
assert_eq!(s[3], 'd' as u8);
}<|fim▁end|> | |
<|file_name|>trayicon.cpp<|end_file_name|><|fim▁begin|>#include "trayicon.h"
#include <QApplication>
#include <QMenu>
#include <QTimer>
#include <conf/addressgroup.h>
#include <conf/appgroup.h>
#include <conf/confmanager.h>
#include <conf/firewallconf.h>
#include <form/controls/mainwindow.h>
#include <fortsettings.h>
#include <manager/hotkeymanager.h>
#include <manager/windowmanager.h>
#include <user/iniuser.h>
#include <util/guiutil.h>
#include <util/iconcache.h>
#include "traycontroller.h"
namespace {
const char *const eventSingleClick = "singleClick";
const char *const eventDoubleClick = "doubleClick";
const char *const eventMiddleClick = "middleClick";
const char *const actionShowPrograms = "Programs";
const char *const actionShowOptions = "Options";
const char *const actionShowStatistics = "Statistics";
const char *const actionShowTrafficGraph = "TrafficGraph";
const char *const actionSwitchFilterEnabled = "FilterEnabled";
const char *const actionSwitchStopTraffic = "StopTraffic";
const char *const actionSwitchStopInetTraffic = "StopInetTraffic";
const char *const actionSwitchAutoAllowPrograms = "AutoAllowPrograms";
QString clickNameByType(TrayIcon::ClickType clickType)
{
switch (clickType) {
case TrayIcon::SingleClick:
return eventSingleClick;
case TrayIcon::DoubleClick:
return eventDoubleClick;
case TrayIcon::MiddleClick:
return eventMiddleClick;
default:
return QString();
}
}
QString actionNameByType(TrayIcon::ActionType actionType)
{
switch (actionType) {
case TrayIcon::ActionShowPrograms:
return actionShowPrograms;
case TrayIcon::ActionShowOptions:
return actionShowOptions;
case TrayIcon::ActionShowStatistics:
return actionShowStatistics;
case TrayIcon::ActionShowTrafficGraph:<|fim▁hole|> return actionShowTrafficGraph;
case TrayIcon::ActionSwitchFilterEnabled:
return actionSwitchFilterEnabled;
case TrayIcon::ActionSwitchStopTraffic:
return actionSwitchStopTraffic;
case TrayIcon::ActionSwitchStopInetTraffic:
return actionSwitchStopInetTraffic;
case TrayIcon::ActionSwitchAutoAllowPrograms:
return actionSwitchAutoAllowPrograms;
default:
return QString();
}
}
TrayIcon::ActionType actionTypeByName(const QString &name)
{
if (name.isEmpty())
return TrayIcon::ActionNone;
if (name == actionShowPrograms)
return TrayIcon::ActionShowPrograms;
if (name == actionShowOptions)
return TrayIcon::ActionShowOptions;
if (name == actionShowStatistics)
return TrayIcon::ActionShowStatistics;
if (name == actionShowTrafficGraph)
return TrayIcon::ActionShowTrafficGraph;
if (name == actionSwitchFilterEnabled)
return TrayIcon::ActionSwitchFilterEnabled;
if (name == actionSwitchStopTraffic)
return TrayIcon::ActionSwitchStopTraffic;
if (name == actionSwitchStopInetTraffic)
return TrayIcon::ActionSwitchStopInetTraffic;
if (name == actionSwitchAutoAllowPrograms)
return TrayIcon::ActionSwitchAutoAllowPrograms;
return TrayIcon::ActionNone;
}
TrayIcon::ActionType defaultActionTypeByClick(TrayIcon::ClickType clickType)
{
switch (clickType) {
case TrayIcon::SingleClick:
return TrayIcon::ActionShowPrograms;
case TrayIcon::DoubleClick:
return TrayIcon::ActionShowOptions;
case TrayIcon::MiddleClick:
return TrayIcon::ActionShowStatistics;
default:
return TrayIcon::ActionNone;
}
}
void setActionCheckable(QAction *action, bool checked = false, const QObject *receiver = nullptr,
const char *member = nullptr)
{
action->setCheckable(true);
action->setChecked(checked);
if (receiver) {
QObject::connect(action, SIGNAL(toggled(bool)), receiver, member);
}
}
QAction *addAction(QWidget *widget, const QIcon &icon, const QString &text,
const QObject *receiver = nullptr, const char *member = nullptr, bool checkable = false,
bool checked = false)
{
auto action = new QAction(icon, text, widget);
if (receiver) {
QObject::connect(action, SIGNAL(triggered(bool)), receiver, member);
}
if (checkable) {
setActionCheckable(action, checked);
}
widget->addAction(action);
return action;
}
}
TrayIcon::TrayIcon(QObject *parent) :
QSystemTrayIcon(parent), m_trayTriggered(false), m_ctrl(new TrayController(this))
{
setupUi();
setupController();
connect(this, &QSystemTrayIcon::activated, this, &TrayIcon::onTrayActivated);
}
TrayIcon::~TrayIcon()
{
delete m_menu;
}
FortSettings *TrayIcon::settings() const
{
return ctrl()->settings();
}
ConfManager *TrayIcon::confManager() const
{
return ctrl()->confManager();
}
FirewallConf *TrayIcon::conf() const
{
return ctrl()->conf();
}
IniOptions *TrayIcon::ini() const
{
return ctrl()->ini();
}
IniUser *TrayIcon::iniUser() const
{
return ctrl()->iniUser();
}
HotKeyManager *TrayIcon::hotKeyManager() const
{
return ctrl()->hotKeyManager();
}
WindowManager *TrayIcon::windowManager() const
{
return ctrl()->windowManager();
}
void TrayIcon::onMouseClicked(TrayIcon::ClickType clickType)
{
QAction *action = m_clickActions[clickType];
if (action) {
action->trigger();
}
}
void TrayIcon::onTrayActivated(QSystemTrayIcon::ActivationReason reason)
{
switch (reason) {
case QSystemTrayIcon::Trigger:
m_trayTriggered = true;
QTimer::singleShot(QApplication::doubleClickInterval(), this, [&] {
if (m_trayTriggered) {
m_trayTriggered = false;
onMouseClicked(SingleClick);
}
});
break;
case QSystemTrayIcon::DoubleClick:
if (m_trayTriggered) {
m_trayTriggered = false;
onMouseClicked(DoubleClick);
}
break;
case QSystemTrayIcon::MiddleClick:
m_trayTriggered = false;
onMouseClicked(MiddleClick);
break;
case QSystemTrayIcon::Context:
m_trayTriggered = false;
showTrayMenu(QCursor::pos());
break;
default:
break;
}
}
void TrayIcon::updateTrayIcon(bool alerted)
{
const auto icon = alerted ? GuiUtil::overlayIcon(":/icons/sheild-96.png", ":/icons/error.png")
: IconCache::icon(":/icons/sheild-96.png");
this->setIcon(icon);
}
void TrayIcon::showTrayMenu(const QPoint &pos)
{
m_menu->popup(pos);
}
void TrayIcon::updateTrayMenu(bool onlyFlags)
{
if (!onlyFlags) {
updateAppGroupActions();
}
updateTrayMenuFlags();
updateHotKeys();
}
void TrayIcon::setupController()
{
connect(windowManager(), &WindowManager::optWindowChanged, this,
&TrayIcon::updateTrayMenuFlags);
connect(windowManager(), &WindowManager::graphWindowChanged, m_graphAction,
&QAction::setChecked);
connect(settings(), &FortSettings::passwordCheckedChanged, this,
&TrayIcon::updateTrayMenuFlags);
connect(ctrl(), &TrayController::retranslateUi, this, &TrayIcon::retranslateUi);
retranslateUi();
}
void TrayIcon::retranslateUi()
{
m_programsAction->setText(tr("Programs"));
m_optionsAction->setText(tr("Options"));
m_statisticsAction->setText(tr("Statistics"));
m_zonesAction->setText(tr("Zones"));
m_graphAction->setText(tr("Traffic Graph"));
m_filterEnabledAction->setText(tr("Filter Enabled"));
m_stopTrafficAction->setText(tr("Stop Traffic"));
m_stopInetTrafficAction->setText(tr("Stop Internet Traffic"));
m_autoAllowProgsAction->setText(tr("Auto-Allow New Programs"));
m_quitAction->setText(tr("Quit"));
}
void TrayIcon::setupUi()
{
this->setToolTip(QApplication::applicationDisplayName());
setupTrayMenu();
updateTrayMenu();
updateTrayIcon();
updateClickActions();
}
void TrayIcon::setupTrayMenu()
{
m_menu = new QMenu();
m_programsAction = addAction(m_menu, IconCache::icon(":/icons/application.png"), QString(),
windowManager(), SLOT(showProgramsWindow()));
addHotKey(m_programsAction, iniUser()->hotKeyPrograms());
m_optionsAction = addAction(m_menu, IconCache::icon(":/icons/cog.png"), QString(),
windowManager(), SLOT(showOptionsWindow()));
addHotKey(m_optionsAction, iniUser()->hotKeyOptions());
m_statisticsAction = addAction(m_menu, IconCache::icon(":/icons/chart_bar.png"), QString(),
windowManager(), SLOT(showStatisticsWindow()));
addHotKey(m_statisticsAction, iniUser()->hotKeyStatistics());
m_graphAction = addAction(m_menu, IconCache::icon(":/icons/action_log.png"), QString(),
windowManager(), SLOT(switchGraphWindow()), true, !!windowManager()->graphWindow());
addHotKey(m_graphAction, iniUser()->hotKeyGraph());
m_zonesAction = addAction(m_menu, IconCache::icon(":/icons/ip_class.png"), QString(),
windowManager(), SLOT(showZonesWindow()));
addHotKey(m_zonesAction, iniUser()->hotKeyZones());
m_menu->addSeparator();
m_filterEnabledAction =
addAction(m_menu, QIcon(), QString(), this, SLOT(switchTrayFlag(bool)), true);
addHotKey(m_filterEnabledAction, iniUser()->hotKeyFilter());
m_stopTrafficAction =
addAction(m_menu, QIcon(), QString(), this, SLOT(switchTrayFlag(bool)), true);
addHotKey(m_stopTrafficAction, iniUser()->hotKeyStopTraffic());
m_stopInetTrafficAction =
addAction(m_menu, QIcon(), QString(), this, SLOT(switchTrayFlag(bool)), true);
addHotKey(m_stopInetTrafficAction, iniUser()->hotKeyStopInetTraffic());
m_autoAllowProgsAction =
addAction(m_menu, QIcon(), QString(), this, SLOT(switchTrayFlag(bool)), true);
addHotKey(m_autoAllowProgsAction, iniUser()->hotKeyAllowAllNew());
m_menu->addSeparator();
for (int i = 0; i < MAX_APP_GROUP_COUNT; ++i) {
QAction *a = addAction(m_menu, QIcon(), QString(), this, SLOT(switchTrayFlag(bool)), true);
if (i < 12) {
const QString shortcutText =
iniUser()->hotKeyAppGroupModifiers() + "+F" + QString::number(i + 1);
addHotKey(a, shortcutText);
}
m_appGroupActions.append(a);
}
m_menu->addSeparator();
m_quitAction = addAction(m_menu, QIcon(), tr("Quit"), this, SLOT(quitProgram()));
addHotKey(m_quitAction, iniUser()->hotKeyQuit());
}
void TrayIcon::updateTrayMenuFlags()
{
const bool editEnabled = (!settings()->isPasswordRequired() && !windowManager()->optWindow());
m_filterEnabledAction->setEnabled(editEnabled);
m_stopTrafficAction->setEnabled(editEnabled);
m_stopInetTrafficAction->setEnabled(editEnabled);
m_autoAllowProgsAction->setEnabled(editEnabled);
m_filterEnabledAction->setChecked(conf()->filterEnabled());
m_stopTrafficAction->setChecked(conf()->stopTraffic());
m_stopInetTrafficAction->setChecked(conf()->stopInetTraffic());
m_autoAllowProgsAction->setChecked(conf()->allowAllNew());
int appGroupIndex = 0;
for (QAction *action : qAsConst(m_appGroupActions)) {
if (!action->isVisible())
break;
const bool appGroupEnabled = conf()->appGroupEnabled(appGroupIndex++);
action->setEnabled(editEnabled);
action->setChecked(appGroupEnabled);
}
}
void TrayIcon::updateAppGroupActions()
{
const int appGroupsCount = conf()->appGroups().count();
for (int i = 0; i < MAX_APP_GROUP_COUNT; ++i) {
QAction *action = m_appGroupActions.at(i);
QString menuLabel;
bool visible = false;
if (i < appGroupsCount) {
const AppGroup *appGroup = conf()->appGroups().at(i);
menuLabel = appGroup->menuLabel();
visible = true;
}
action->setText(menuLabel);
action->setVisible(visible);
action->setEnabled(visible);
}
}
void TrayIcon::saveTrayFlags()
{
conf()->setFilterEnabled(m_filterEnabledAction->isChecked());
conf()->setStopTraffic(m_stopTrafficAction->isChecked());
conf()->setStopInetTraffic(m_stopInetTrafficAction->isChecked());
conf()->setAllowAllNew(m_autoAllowProgsAction->isChecked());
int i = 0;
for (AppGroup *appGroup : conf()->appGroups()) {
const QAction *action = m_appGroupActions.at(i++);
appGroup->setEnabled(action->isChecked());
}
confManager()->saveFlags();
}
void TrayIcon::switchTrayFlag(bool checked)
{
if (iniUser()->confirmTrayFlags()) {
const auto action = qobject_cast<QAction *>(sender());
Q_ASSERT(action);
if (!windowManager()->showQuestionBox(
tr("Are you sure to switch the \"%1\"?").arg(action->text()))) {
action->setChecked(!checked);
return;
}
}
saveTrayFlags();
}
void TrayIcon::quitProgram()
{
if (iniUser()->confirmQuit()) {
if (!windowManager()->showQuestionBox(tr("Are you sure you want to quit the program?")))
return;
}
windowManager()->quitByCheckPassword();
}
void TrayIcon::addHotKey(QAction *action, const QString &shortcutText)
{
if (shortcutText.isEmpty())
return;
const QKeySequence shortcut = QKeySequence::fromString(shortcutText);
hotKeyManager()->addAction(action, shortcut);
}
void TrayIcon::updateHotKeys()
{
hotKeyManager()->setEnabled(iniUser()->hotKeyEnabled());
}
void TrayIcon::removeHotKeys()
{
hotKeyManager()->removeActions();
}
TrayIcon::ActionType TrayIcon::clickEventActionType(ClickType clickType) const
{
const QString eventName = clickNameByType(clickType);
const QString actionName = iniUser()->trayAction(eventName);
const ActionType actionType = actionTypeByName(actionName);
return (actionType != ActionNone) ? actionType : defaultActionTypeByClick(clickType);
}
void TrayIcon::setClickEventActionType(ClickType clickType, ActionType actionType)
{
const QString eventName = clickNameByType(clickType);
const QString actionName = actionNameByType(actionType);
iniUser()->setTrayAction(eventName, actionName);
updateClickActions();
}
void TrayIcon::updateClickActions()
{
m_clickActions[SingleClick] = clickActionFromIni(SingleClick);
m_clickActions[DoubleClick] = clickActionFromIni(DoubleClick);
m_clickActions[MiddleClick] = clickActionFromIni(MiddleClick);
}
QAction *TrayIcon::clickActionFromIni(ClickType clickType) const
{
const ActionType actionType = clickEventActionType(clickType);
return clickActionByType(actionType);
}
QAction *TrayIcon::clickActionByType(ActionType actionType) const
{
switch (actionType) {
case TrayIcon::ActionShowPrograms:
return m_programsAction;
case TrayIcon::ActionShowOptions:
return m_optionsAction;
case TrayIcon::ActionShowStatistics:
return m_statisticsAction;
case TrayIcon::ActionShowTrafficGraph:
return m_graphAction;
case TrayIcon::ActionSwitchFilterEnabled:
return m_filterEnabledAction;
case TrayIcon::ActionSwitchStopTraffic:
return m_stopTrafficAction;
case TrayIcon::ActionSwitchStopInetTraffic:
return m_stopInetTrafficAction;
case TrayIcon::ActionSwitchAutoAllowPrograms:
return m_autoAllowProgsAction;
}
return nullptr;
}<|fim▁end|> | |
<|file_name|>20.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | export { Globe20 as default } from "../../"; |
<|file_name|>data.ts<|end_file_name|><|fim▁begin|>import { DataFrameDTO, FieldConfig } from './dataFrame';
import { DataTransformerConfig } from './transformations';
import { ApplyFieldOverrideOptions } from './fieldOverrides';
import { PanelPluginDataSupport } from '.';
import { DataTopic } from './query';
import { DataFrameType } from './dataFrameTypes';
export type KeyValue<T = any> = Record<string, T>;
/**
* Represent panel data loading state.
* @public
*/
export enum LoadingState {
NotStarted = 'NotStarted',
Loading = 'Loading',
Streaming = 'Streaming',
Done = 'Done',
Error = 'Error',
}
// Should be kept in sync with grafana-plugin-sdk-go/data/frame_meta.go
export const preferredVisualizationTypes = ['graph', 'table', 'logs', 'trace', 'nodeGraph'] as const;
export type PreferredVisualisationType = typeof preferredVisualizationTypes[number];
/**
* @public
*/
export interface QueryResultMeta {
type?: DataFrameType;
/** DatasSource Specific Values */
custom?: Record<string, any>;
/** Stats */
stats?: QueryResultMetaStat[];
/** Meta Notices */
notices?: QueryResultMetaNotice[];
<|fim▁hole|> preferredVisualisationType?: PreferredVisualisationType;
/** The path for live stream updates for this frame */
channel?: string;
/**
* Optionally identify which topic the frame should be assigned to.
* A value specified in the response will override what the request asked for.
*/
dataTopic?: DataTopic;
/**
* This is the raw query sent to the underlying system. All macros and templating
* as been applied. When metadata contains this value, it will be shown in the query inspector
*/
executedQueryString?: string;
/**
* A browsable path on the datasource
*/
path?: string;
/**
* defaults to '/'
*/
pathSeparator?: string;
/**
* Legacy data source specific, should be moved to custom
* */
alignmentPeriod?: number; // used by cloud monitoring
searchWords?: string[]; // used by log models and loki
limit?: number; // used by log models and loki
json?: boolean; // used to keep track of old json doc values
instant?: boolean;
}
export interface QueryResultMetaStat extends FieldConfig {
displayName: string;
value: number;
}
/**
* QueryResultMetaNotice is a structure that provides user notices for query result data
* @public
*/
export interface QueryResultMetaNotice {
/**
* Specify the notice severity
*/
severity: 'info' | 'warning' | 'error';
/**
* Notice descriptive text
*/
text: string;
/**
* An optional link that may be displayed in the UI.
* This value may be an absolute URL or relative to grafana root
*/
link?: string;
/**
* Optionally suggest an appropriate tab for the panel inspector
*/
inspect?: 'meta' | 'error' | 'data' | 'stats';
}
/**
* @public
*/
export interface QueryResultBase {
/**
* Matches the query target refId
*/
refId?: string;
/**
* Used by some backend data sources to communicate back info about the execution (generated sql, timing)
*/
meta?: QueryResultMeta;
}
export interface Labels {
[key: string]: string;
}
export interface Column {
text: string; // For a Column, the 'text' is the field name
filterable?: boolean;
unit?: string;
custom?: Record<string, any>;
}
export interface TableData extends QueryResultBase {
name?: string;
columns: Column[];
rows: any[][];
type?: string;
}
export type TimeSeriesValue = number | null;
export type TimeSeriesPoints = TimeSeriesValue[][];
export interface TimeSeries extends QueryResultBase {
target: string;
/**
* If name is manually configured via an alias / legend pattern
*/
title?: string;
datapoints: TimeSeriesPoints;
unit?: string;
tags?: Labels;
}
export enum NullValueMode {
Null = 'null',
Ignore = 'connected',
AsZero = 'null as zero',
}
/**
* Describes and API for exposing panel specific data configurations.
*/
export interface DataConfigSource {
configRev?: number;
getDataSupport: () => PanelPluginDataSupport;
getTransformations: () => DataTransformerConfig[] | undefined;
getFieldOverrideOptions: () => ApplyFieldOverrideOptions | undefined;
snapshotData?: DataFrameDTO[];
}<|fim▁end|> | /** Used to track transformation ids that where part of the processing */
transformations?: string[];
/** Currently used to show results in Explore only in preferred visualisation option */ |
<|file_name|>RequestTrackingHostValveTest.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2009 Martin Grotzke
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package de.javakaffee.web.msm;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.*;
import static org.testng.Assert.assertEquals;
import java.io.IOException;
import java.util.Collection;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.annotation.Nonnull;
import javax.servlet.ServletException;
import javax.servlet.http.Cookie;
import org.apache.catalina.Context;
import org.apache.catalina.Host;
import org.apache.catalina.Valve;
import org.apache.catalina.connector.Request;
import org.apache.catalina.connector.Response;
import org.apache.tomcat.util.http.ServerCookie;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import de.javakaffee.web.msm.MemcachedSessionService.SessionManager;
/**
* Test the {@link RequestTrackingHostValve}.
*
* @author <a href="mailto:martin.grotzke@javakaffee.de">Martin Grotzke</a>
* @version $Id$
*/
public abstract class RequestTrackingHostValveTest {
protected MemcachedSessionService _service;
private RequestTrackingHostValve _sessionTrackerValve;
private Valve _nextValve;
private Request _request;
private Response _response;
@BeforeMethod
public void setUp() throws Exception {
_service = mock( MemcachedSessionService.class );
_request = mock( Request.class );
_response = mock( Response.class );
final Context _contextContainer = mock(Context.class);
final Host _hostContainer = mock(Host.class);
final SessionManager _manager = mock(SessionManager.class);
when(_service.getManager()).thenReturn(_manager);
when(_manager.getContext()).thenReturn(_contextContainer);
when(_contextContainer.getParent()).thenReturn(_hostContainer);
when(_contextContainer.getPath()).thenReturn("/");
_sessionTrackerValve = createSessionTrackerValve();
_nextValve = mock( Valve.class );
_sessionTrackerValve.setNext( _nextValve );
_sessionTrackerValve.setContainer(_hostContainer);
when(_request.getRequestURI()).thenReturn( "/someRequest");<|fim▁hole|>
when(_request.getNote(eq(RequestTrackingHostValve.REQUEST_PROCESSED))).thenReturn(Boolean.TRUE);
when(_request.getNote(eq(RequestTrackingHostValve.SESSION_ID_CHANGED))).thenReturn(Boolean.FALSE);
}
@Nonnull
protected RequestTrackingHostValve createSessionTrackerValve() {
return new RequestTrackingHostValve(".*\\.(png|gif|jpg|css|js|ico)$", "somesessionid", _service, Statistics.create(),
new AtomicBoolean( true ), new CurrentRequest()) {
@Override
protected String[] getSetCookieHeaders(final Response response) {
return RequestTrackingHostValveTest.this.getSetCookieHeaders(response);
}
};
}
protected abstract String[] getSetCookieHeaders(final Response response);
@AfterMethod
public void tearDown() throws Exception {
reset( _service,
_nextValve,
_request,
_response );
}
@Test
public final void testGetSessionCookieName() throws IOException, ServletException {
final RequestTrackingHostValve cut = new RequestTrackingHostValve(null, "foo", _service, Statistics.create(),
new AtomicBoolean( true ), new CurrentRequest()) {
@Override
protected String[] getSetCookieHeaders(final Response response) {
final Collection<String> result = response.getHeaders("Set-Cookie");
return result.toArray(new String[result.size()]);
}
};
assertEquals(cut.getSessionCookieName(), "foo");
}
@Test
public final void testProcessRequestNotePresent() throws IOException, ServletException {
_sessionTrackerValve.invoke( _request, _response );
verify( _service, never() ).backupSession( anyString(), anyBoolean(), anyString() );
verify(_request).setNote(eq(RequestTrackingHostValve.REQUEST_PROCESS), eq(Boolean.TRUE));
}
@Test
public final void testBackupSessionNotInvokedWhenNoSessionIdPresent() throws IOException, ServletException {
when( _request.getRequestedSessionId() ).thenReturn( null );
when( _response.getHeader( eq( "Set-Cookie" ) ) ).thenReturn( null );
_sessionTrackerValve.invoke( _request, _response );
verify( _service, never() ).backupSession( anyString(), anyBoolean(), anyString() );
}
@Test
public final void testBackupSessionInvokedWhenResponseCookiePresent() throws IOException, ServletException {
when( _request.getRequestedSessionId() ).thenReturn( null );
final Cookie cookie = new Cookie( _sessionTrackerValve.getSessionCookieName(), "foo" );
setupGetResponseSetCookieHeadersExpectations(_response, new String[]{generateCookieString( cookie )});
_sessionTrackerValve.invoke( _request, _response );
verify( _service ).backupSession( eq( "foo" ), eq( false), anyString() );
}
@Test
public final void testChangeSessionIdForRelocatedSession() throws IOException, ServletException {
final String sessionId = "bar";
final String newSessionId = "newId";
when(_request.getNote(eq(RequestTrackingHostValve.SESSION_ID_CHANGED))).thenReturn(Boolean.TRUE);
when( _request.getRequestedSessionId() ).thenReturn( sessionId );
final Cookie cookie = new Cookie( _sessionTrackerValve.getSessionCookieName(), newSessionId );
setupGetResponseSetCookieHeadersExpectations(_response, new String[]{generateCookieString( cookie )});
_sessionTrackerValve.invoke( _request, _response );
verify( _service ).backupSession( eq( newSessionId ), eq( true ), anyString() );
}
@Test
public final void testRequestFinishedShouldBeInvokedForIgnoredResources() throws IOException, ServletException {
when( _request.getRequestedSessionId() ).thenReturn( "foo" );
when(_request.getRequestURI()).thenReturn("/pixel.gif");
_sessionTrackerValve.invoke( _request, _response );
verify( _service ).requestFinished( eq( "foo" ), anyString() );
}
protected abstract void setupGetResponseSetCookieHeadersExpectations(Response response, String[] result);
@Nonnull
protected String generateCookieString(final Cookie cookie) {
final StringBuffer sb = new StringBuffer();
ServerCookie.appendCookieValue
(sb, cookie.getVersion(), cookie.getName(), cookie.getValue(),
cookie.getPath(), cookie.getDomain(), cookie.getComment(),
cookie.getMaxAge(), cookie.getSecure(), true);
final String setSessionCookieHeader = sb.toString();
return setSessionCookieHeader;
}
}<|fim▁end|> | when(_request.getMethod()).thenReturn("GET");
when(_request.getQueryString()).thenReturn(null);
when(_request.getContext()).thenReturn(_contextContainer); |
<|file_name|>TType2.java<|end_file_name|><|fim▁begin|>package rholang.parsing.delimc.Absyn; // Java Package generated by the BNF Converter.
public class TType2 extends TType {
public final Type type_1, type_2;
public TType2(Type p1, Type p2) { type_1 = p1; type_2 = p2; }
public <R,A> R accept(rholang.parsing.delimc.Absyn.TType.Visitor<R,A> v, A arg) { return v.visit(this, arg); }
public boolean equals(Object o) {
if (this == o) return true;
if (o instanceof rholang.parsing.delimc.Absyn.TType2) {
rholang.parsing.delimc.Absyn.TType2 x = (rholang.parsing.delimc.Absyn.TType2)o;
return this.type_1.equals(x.type_1) && this.type_2.equals(x.type_2);
}
return false;
}
public int hashCode() {<|fim▁hole|>
}<|fim▁end|> | return 37*(this.type_1.hashCode())+this.type_2.hashCode();
} |
<|file_name|>templateresponse.py<|end_file_name|><|fim▁begin|>import logging
from xml.dom.minidom import *
from jinja2 import Environment, Template<|fim▁hole|>
from edge.dateutility import DateUtility
from edge.opensearch.response import Response
class TemplateResponse(Response):
def __init__(self):
super(TemplateResponse, self).__init__()
self.env = Environment()
self.env.trim_blocks = True
self.env.autoescape = True
self.variables = {}
self.env.filters['convertISOTime'] = DateUtility.convertISOTime
def setTemplate(self, template):
self.template = self.env.from_string(template)
def generate(self, pretty=False):
logging.debug('TemplateResponse.generate is called.')
if pretty:
try :
xmlStr = self.template.render(self.variables).encode('utf-8').replace('\n', '')
except Exception as e:
logging.debug("Problem generating template " + str(e))
xmlStr = self.template.render({}).encode('utf-8').replace('\n', '')
document = xml.dom.minidom.parseString(xmlStr)
return document.toprettyxml()
else:
return self.template.render(self.variables).replace('\n', '')<|fim▁end|> | |
<|file_name|>flatbuttonrenderer.js<|end_file_name|><|fim▁begin|>// Copyright 2008 The Closure Library Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**<|fim▁hole|> * but uses a <div> element instead of a <button> or <input> element.
*
*/
goog.provide('goog.ui.FlatButtonRenderer');
goog.require('goog.a11y.aria');
goog.require('goog.a11y.aria.Role');
goog.require('goog.dom.classes');
goog.require('goog.ui.Button');
goog.require('goog.ui.ButtonRenderer');
goog.require('goog.ui.INLINE_BLOCK_CLASSNAME');
goog.require('goog.ui.registry');
/**
* Flat renderer for {@link goog.ui.Button}s. Flat buttons can contain
* almost arbitrary HTML content, will flow like inline elements, but can be
* styled like block-level elements.
* @constructor
* @extends {goog.ui.ButtonRenderer}
*/
goog.ui.FlatButtonRenderer = function() {
goog.ui.ButtonRenderer.call(this);
};
goog.inherits(goog.ui.FlatButtonRenderer, goog.ui.ButtonRenderer);
goog.addSingletonGetter(goog.ui.FlatButtonRenderer);
/**
* Default CSS class to be applied to the root element of components rendered
* by this renderer.
* @type {string}
*/
goog.ui.FlatButtonRenderer.CSS_CLASS = goog.getCssName('goog-flat-button');
/**
* Returns the control's contents wrapped in a div element, with
* the renderer's own CSS class and additional state-specific classes applied
* to it, and the button's disabled attribute set or cleared as needed.
* Overrides {@link goog.ui.ButtonRenderer#createDom}.
* @param {goog.ui.Control} button Button to render.
* @return {Element} Root element for the button.
* @override
*/
goog.ui.FlatButtonRenderer.prototype.createDom = function(button) {
var classNames = this.getClassNames(button);
var attributes = {
'class': goog.ui.INLINE_BLOCK_CLASSNAME + ' ' + classNames.join(' '),
'title': button.getTooltip() || ''
};
return button.getDomHelper().createDom(
'div', attributes, button.getContent());
};
/**
* Returns the ARIA role to be applied to flat buttons.
* @return {goog.a11y.aria.Role|undefined} ARIA role.
* @override
*/
goog.ui.FlatButtonRenderer.prototype.getAriaRole = function() {
return goog.a11y.aria.Role.BUTTON;
};
/**
* Returns true if this renderer can decorate the element. Overrides
* {@link goog.ui.ButtonRenderer#canDecorate} by returning true if the
* element is a DIV, false otherwise.
* @param {Element} element Element to decorate.
* @return {boolean} Whether the renderer can decorate the element.
* @override
*/
goog.ui.FlatButtonRenderer.prototype.canDecorate = function(element) {
return element.tagName == 'DIV';
};
/**
* Takes an existing element and decorates it with the flat button control.
* Initializes the control's ID, content, tooltip, value, and state based
* on the ID of the element, its child nodes, and its CSS classes, respectively.
* Returns the element. Overrides {@link goog.ui.ButtonRenderer#decorate}.
* @param {goog.ui.Control} button Button instance to decorate the element.
* @param {Element} element Element to decorate.
* @return {Element} Decorated element.
* @override
*/
goog.ui.FlatButtonRenderer.prototype.decorate = function(button, element) {
goog.dom.classes.add(element, goog.ui.INLINE_BLOCK_CLASSNAME);
return goog.ui.FlatButtonRenderer.superClass_.decorate.call(this, button,
element);
};
/**
* Flat buttons can't use the value attribute since they are div elements.
* Overrides {@link goog.ui.ButtonRenderer#getValue} to prevent trying to
* access the element's value.
* @param {Element} element The button control's root element.
* @return {string} Value not valid for flat buttons.
* @override
*/
goog.ui.FlatButtonRenderer.prototype.getValue = function(element) {
// Flat buttons don't store their value in the DOM.
return '';
};
/**
* Returns the CSS class to be applied to the root element of components
* rendered using this renderer.
* @return {string} Renderer-specific CSS class.
* @override
*/
goog.ui.FlatButtonRenderer.prototype.getCssClass = function() {
return goog.ui.FlatButtonRenderer.CSS_CLASS;
};
// Register a decorator factory function for Flat Buttons.
goog.ui.registry.setDecoratorByClassName(goog.ui.FlatButtonRenderer.CSS_CLASS,
function() {
// Uses goog.ui.Button, but with FlatButtonRenderer.
return new goog.ui.Button(null, goog.ui.FlatButtonRenderer.getInstance());
});<|fim▁end|> | * @fileoverview Similiar functionality of {@link goog.ui.ButtonRenderer}, |
<|file_name|>app-config.js<|end_file_name|><|fim▁begin|>(function (angular) {
'use strict';
var config = {
githubApiUrl: 'https://api.github.com/',
};<|fim▁hole|>
angular.module('myGithubApp').constant('config', config);
})(angular);<|fim▁end|> | |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | export * from './selectInputs.component'; |
<|file_name|>resources.py<|end_file_name|><|fim▁begin|># coding: utf-8
import os
from UserDict import DictMixin
from fnmatch import fnmatch
from datetime import datetime
from datetime import date
import pytz
from pyramid.threadlocal import get_current_registry
from pyramid.traversal import resource_path
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import DateTime, Date
from sqlalchemy import ForeignKey
from sqlalchemy import Integer, Float
from sqlalchemy import LargeBinary
from sqlalchemy import String
from sqlalchemy import Unicode
from sqlalchemy import UnicodeText
from sqlalchemy import UniqueConstraint
from sqlalchemy import Table, select
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.ext.orderinglist import ordering_list
from sqlalchemy.orm import backref
from sqlalchemy.orm import deferred
from sqlalchemy.orm import object_mapper
from sqlalchemy.orm import relation, relationship
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.sql import and_
from sqlalchemy.sql import select
from sqlalchemy.util import classproperty
from transaction import commit
from zope.deprecation.deprecation import deprecated
from zope.interface import implements
import kotti
from kotti import Base
from kotti import DBSession
from kotti import get_settings
from kotti import metadata
from kotti.interfaces import INode
from kotti.interfaces import IContent
from kotti.interfaces import IDocument
from kotti.interfaces import IFile
from kotti.interfaces import IImage
from kotti.interfaces import IDefaultWorkflow
from kotti.migrate import stamp_heads
from kotti.security import PersistentACLMixin
from kotti.security import has_permission
from kotti.security import view_permitted, SITE_ACL
from kotti.security import Principals, get_principals
from kotti.sqla import ACLType
from kotti.sqla import JsonType
from kotti.sqla import MutationList
from kotti.sqla import NestedMutationDict
from kotti.util import ViewLink
#from kotti.util import _
from kotti.util import camel_case_to_name
from kotti.util import get_paste_items
from kotti.util import camel_case_to_name
from kotti.resources import Document
from mba import _
TZ_HK = pytz.timezone('Asia/Hong_Kong')
friend = Table(
'friends', Base.metadata,
Column('user_a_id', Integer, ForeignKey('mba_users.id'), primary_key=True),
Column('user_b_id', Integer, ForeignKey('mba_users.id'), primary_key=True),
Column('status', Integer, default=0) # 0: No friend yet, 1: friend already
)
# Meetup Invitation
class MeetupInvitation(Base):
id = Column('id', Integer, nullable=False, primary_key=True, autoincrement=True)
inviter_id = Column('inviter_id',Integer, ForeignKey('mba_users.id')) #邀请者
inviter = relationship("MbaUser", foreign_keys="[MeetupInvitation.inviter_id]")
invitee_id = Column('invitee_id', Integer, ForeignKey('mba_users.id') ) #被邀请者
invitee = relationship("MbaUser", foreign_keys="[MeetupInvitation.invitee_id]")
meetup_id = Column(Integer, ForeignKey('acts.id'))
meetup = relationship('Act')
status = Column(Integer, default=0) # 0 : unread, 1: ignore 2:accept, 3: reject 4: deleted
class UserInterest(Base):
interest_id = Column(Integer, ForeignKey('interests.id'), primary_key=True)
user_id = Column(Integer, ForeignKey('mba_users.id'), primary_key=True)
# interest = relationship('Interest', backref='interest_items')
# name = association_proxy('interest', 'name')
user = relationship("MbaUser",
backref=backref("user_interests",
cascade="all, delete-orphan")
)
interest = relationship("Interest")
interest_name = association_proxy("interest", "name")
@classmethod
def _interest_find_or_create(cls, name):
with DBSession.no_autoflush:
interest = DBSession.query(Interest).filter_by(name=name).first()
if interest is None:
interest = Interest(name=name)
return cls(interest=interest)
class UserSkill(Base):
interest_id = Column(Integer, ForeignKey('interests.id'), primary_key=True)
user_id = Column(Integer, ForeignKey('mba_users.id'), primary_key=True)
user = relationship("MbaUser",
backref=backref("user_skills",
cascade="all, delete-orphan")
)
skill = relationship("Interest")
skill_name = association_proxy("skill", "name")
@classmethod
def _interest_find_or_create(cls, name):
with DBSession.no_autoflush:
interest = DBSession.query(Interest).filter_by(name=name).first()
if interest is None:
interest = Interest(name=name)
return cls(skill=interest)
class Interest(Base):
__table_args__ = (
UniqueConstraint('name'),
)
id = Column(Integer, primary_key=True, autoincrement=True)
name = Column(String(250), nullable=False)
description = Column(UnicodeText())
def __init__(self, name, **kw):
self.name = name
Base.__init__(self,**kw)
# def __repr__(self):
# return (self.name)
@property
def users(self):
return [rel.user for rel in self.interest_items]
#TODO for deleting
class PositionCollect(Base):
position_id = Column(Integer, ForeignKey('positions.id', ondelete='cascade'), primary_key=True)
user_id = Column(Integer, ForeignKey('mba_users.id', ondelete='cascade'), primary_key=True)
create_date = Column(DateTime(), default=datetime.now(tz=None))
position = relationship('Position', backref='position_items')
@classmethod
def _create(cls, p):
if p is None:
raise Exception('position can not be None')
return cls(position=p)
class Visit(Base):
user_id1 = Column('user_id1', Integer, ForeignKey('mba_users.id'), primary_key=True)
user_id2 = Column('user_id2', Integer, ForeignKey('mba_users.id'), primary_key=True)
visit_date = Column(DateTime(), default=datetime.now(tz=None))
# 1 <--> 1
user = relationship("MbaUser", foreign_keys="[Visit.user_id2]")
class City(Base):
__tablename__ = 'city'
__table_args__ = (
UniqueConstraint('name'),
)
id = Column(Integer, primary_key=True)
name = Column(Unicode(50), nullable=False)
acts = relationship("Act", backref='city', order_by='desc(Act.creation_date)')
usercity = relationship("MbaUser", backref='city', order_by='desc(MbaUser.creation_date)')
@classmethod
def _find_or_create(cls, name):
with DBSession.no_autoflush:
obj = DBSession.query(City).filter_by(name=name).first()
if obj is None:
obj = City(name=name)
# print 'cannt find city create one'
#return cls(city=obj)
return obj
class UserBetween(Base):
city_id = Column(Integer, ForeignKey('city.id'), primary_key=True)
user_id = Column(Integer, ForeignKey('mba_users.id'), primary_key=True)
user = relationship("MbaUser",
backref=backref("user_between",
cascade="all, delete-orphan")
)
city = relationship("City")
city_name = association_proxy("city", "name")
@classmethod
def _city_find_or_create(cls, name):
city = City._find_or_create(name=name)
return cls(city=city)
class Message(Base):
id = Column(Integer, primary_key=True, autoincrement=True)
sender_id = Column(Integer, ForeignKey('mba_users.id'))
sender = relationship("MbaUser", foreign_keys="[Message.sender_id]")
reciever_id = Column(Integer, ForeignKey('mba_users.id'))
reciever = relationship("MbaUser", foreign_keys="[Message.reciever_id]")
# message type,
# 0: system message
# 1: admin message
# 2: friend private message
# 10: somebody ask to be friend
# 11: friends invite me some person
# 12: friends invite me some meetup
type = Column(Integer)
content = Column(String(500))
status = Column(Integer,default=0) # 0: unread, 1:read, 2:deleted
from mba.utils import assign_default_avatar
#This is a base class for all users
class MbaUser(Base):
__mapper_args__ = dict(
order_by='mba_users.id',
# polymorphic_on='type',
# polymorphic_identity='mba_users',
#with_polymorphic='*',
)
id = Column(Integer, primary_key=True)
name = Column(Unicode(100), unique=True)
password = Column(Unicode(100))
real_name = Column(Unicode(50))
_avatar = Column(String(100))
@property
def avatar(self):
if not self._avatar:
assign_default_avatar(self)
return self._avatar
@avatar.setter
def avatar(self, value):
self._avatar = value
[INACTIVE, ACTIVE, TO_FULLFIL_DATA, BANNED] = [0, 1, 2, 9999] #未激活、激活、待完善资料、禁封
status = Column(Integer, default=INACTIVE)
confirm_token = Column(Unicode(100))
phone = Column(String(20))
phone_privacy_level = Column(Integer, default=5) ## 1: 对所有会员公开 5: 成功交换名片可看, 9: 完全保密
title = Column(Unicode(100), nullable=True)
title_privacy_level = Column(Integer, default=5) # 1: 对所有会员公开 5: 成功交换名片可看, 9: 完全保密
email = Column(Unicode(100), unique=True)
email_privacy_level = Column(Integer, default=5) # 1: 对所有会员公开 5: 成功交换名片可看, 9: 完全保密
groups = Column(JsonType(), nullable=True)
creation_date = Column(DateTime(), nullable=True)
last_login_date = Column(DateTime())
[MALE, FEMALE] = range(2)
sex = Column(Integer())
# type = Column(String(50), nullable=True) # change type string to integer by sunset 2015.1.27
[USER_TYPE_MBA, USER_TYPE_EMBA, USER_TYPE_MANAGER, USER_TYPE_EXPERT] = range(4)
type = Column(Integer, default=USER_TYPE_MBA)
# _interests = relationship("UserInterest", backref='user')
interests = association_proxy(
'user_interests',
'interest_name',
creator=UserInterest._interest_find_or_create,
)
special_skills = association_proxy(
'user_skills',
'skill_name',
creator=UserSkill._interest_find_or_create,
)
between = association_proxy(
'user_between',
'city_name',
creator=UserBetween._city_find_or_create,
)
company = Column(String(255), default=u"")
company_privacy_level = Column(Integer, default=1) # 1: 对所有会员公开 5: 成功交换名片可看, 9: 完全保密
industry = Column(String(255), default=u"")
# special_skill = Column(String(255), default=u"")
interest = Column(String(255), default=u"") # job interest
# between = Column(String(255), default=u"")
introduction = Column(String(255), default=u"")
_positions = relationship("PositionCollect", backref='user')
positions = association_proxy("_positions","position", creator=PositionCollect._create)
#http://stackoverflow.com/questions/17252816/create-many-to-many-on-one-table
#http://docs.sqlalchemy.org/en/rel_0_8/orm/relationships.html#adjacency-list-relationships
#visit = relationship("Visit", foreign_keys="[Visit.user_id2]", backref='users', order_by='desc(Visit.visit_date)')
# 1 <--> 1
visit = relationship("Visit", primaryjoin="and_(MbaUser.id==Visit.user_id1)"
, order_by='desc(Visit.visit_date)')
# 1 <--> n
visitors = association_proxy("visit", "user")
#
friendship = relationship("MbaUser", secondary=friend,
primaryjoin=id==friend.c.user_a_id,
secondaryjoin=id==friend.c.user_b_id)
invited_meetups = relationship("MeetupInvitation",
foreign_keys="[MeetupInvitation.invitee_id]" )
messages = relationship('Message',foreign_keys="[Message.reciever_id]")
# newmessages = Message.query.filter(status=10).count()
newmessages = relationship('Message',
# foreign_keys="[Message.reciever_id]",
primaryjoin="and_(MbaUser.id==Message.reciever_id, Message.status==0)")
available_invitation_codes = relationship('InvitationCode',
primaryjoin="and_(MbaUser.id==InvitationCode.sender_id,"\
"InvitationCode.status==0)")
city_id = Column(Integer, ForeignKey('city.id') ) # backref is defined in class City
city_name = association_proxy('city'
, 'name'
, creator=City._find_or_create)
is_lunar_canlender = Column(Boolean)
lunar_birthday = Column(String(255), default="")
wechat = Column(String(255),default="")
def __init__(self, name, password=None, confirm_token=None,
title=u"", email=None, groups=(), city_name='',
real_name='', birth_date=None, school=u"", school_year=0,
company=u"", industry=u"", special_skill=u"", interest=u"",
between=u"", introduction=u"", **kwargs):
self.name = name
if password is not None:
password = get_principals().hash_password(password)
self.password = password
self.confirm_token = confirm_token
self.title = title
self.email = email
self.groups = groups
self.creation_date = datetime.now(tz=None)
self.last_login_date = None
if city_name:
self.city_name = city_name
else:
# default city_name
self.city_name = u'深圳'
self.real_name = real_name
self.birth_date = birth_date
self.school = school
self.school_year = school_year
self.company = company
self.industry = industry
self.special_skill = special_skill
self.between = between
self.introduction = introduction
super(MbaUser, self).__init__(**kwargs)
@property
def position_items(self):
return [(rel, rel.position) for rel in self._positions]
def __repr__(self): # pragma: no cover
return '<MbaUser %r>' % self.name
@property
def sex_info(self):
if 0 == self.sex:
return u"男"
return u"女"
def add_visit(self, u):
v = None
new_v = False
try:
v = DBSession.query(Visit).filter(Visit.user_id1==self.id, Visit.user_id2==u.id).one()
except:
new_v = True
if not v:
v = Visit(user_id1=self.id, user_id2=u.id)
v.visit_date = datetime.now(tz=None)
if new_v:
DBSession.add(v)
# @classproperty
# def __mapper_args__(cls):
# return dict(
# order_by='mba_users.name',
# polymorphic_identity=camel_case_to_name(cls.__name__)
# )
# id = Column('id', Integer, ForeignKey('mba_users.id'), primary_key=True)
school = Column(String(100))
school_year = Column(Integer())
# real_name = Column(String(20)), real_name is put in superclass ,for global site, real name is needed
birth_date = Column(Date())
identify_type = Column(Integer())
identify = Column(String(30))
home_number = Column(String(20))
# location = Column(String(20)) # location is duplicated with city_name in MbaUser
salary = Column(Integer())
work_years = Column(Integer())
company_phone = Column(String(30))
keyword = Column(String(100))
job_status = Column(String(100))
[AUTH_STATUS_UNAUTH, AUTH_STATUS_AUTHED, AUTH_STATUS_FAIL, AUTH_STATUS_REQ_FOR_AUTH ] = range(4)
auth_info = Column(Integer,default=AUTH_STATUS_UNAUTH) # 0, unauthed, 1 authed, 2 authfail, ( 3 request for auth?)
auth_meetup = Column(Integer,default=AUTH_STATUS_UNAUTH)
auth_friend = Column(Integer,default=AUTH_STATUS_UNAUTH) #
auth_expert = Column(Integer,default=AUTH_STATUS_UNAUTH) #
auth_expert_req = relationship('ExpertAuthReq', uselist=False)
auth_expert_reason = association_proxy('auth_expert_req', 'reason')
@property
def auth_honesty(self):
return [self.auth_info, self.auth_meetup, self.auth_friend].count(self.AUTH_STATUS_AUTHED) >= 2
resume = relationship('Resume', backref='user', uselist=False)
#resumes = relationship('Resume', backref='user')
def __repr__(self): # pragma: no cover
return '<Student %r>' % self.name
@property
def work_info(self):
arrs = [u"小于一年", u"一到三年", u"三到五年", u"五年以上"]
if self.work_years >= 0 and self.work_years < len(arrs):
return arrs[self.work_years]
return arrs[0]
@property
def birth_old(self):
return abs(date.today().year - self.birth_date.year)+1
Student = MbaUser
friend_union = select([
friend.c.user_a_id,
friend.c.user_b_id
]).where(friend.c.status==1).union(
select([
friend.c.user_b_id,
friend.c.user_a_id,
]).where(friend.c.status==1)
).alias()
MbaUser.all_friends = relationship('MbaUser',
secondary=friend_union,
primaryjoin=MbaUser.id==friend_union.c.user_a_id,
secondaryjoin=MbaUser.id==friend_union.c.user_b_id,
viewonly=True
)
my_requests = select([
friend.c.user_a_id,
friend.c.user_b_id
]).where(friend.c.status==0).alias()
MbaUser.my_requests = relationship('MbaUser',
secondary=my_requests,
primaryjoin=MbaUser.id==my_requests.c.user_a_id,
secondaryjoin=MbaUser.id==my_requests.c.user_b_id,
viewonly=True)
others_requests = select([
friend.c.user_a_id,
friend.c.user_b_id,
]).where(friend.c.status==0).alias()
MbaUser.others_requests = relationship('MbaUser',
secondary=others_requests,
primaryjoin=MbaUser.id==others_requests.c.user_b_id,
secondaryjoin=MbaUser.id==others_requests.c.user_a_id,
viewonly=True)
class Participate(Base):
__tablename__ = 'participate'
user_id = Column(Integer, ForeignKey('mba_users.id'), primary_key=True)
act_id = Column(Integer, ForeignKey('acts.id'), primary_key=True)
creation_date = Column(DateTime(), nullable=False, default=datetime.now)
#用户参加活动之后可进行评分
rating = Column(Integer())
user = relationship("MbaUser", backref=backref("partin",
cascade="all, delete-orphan") )
meetup = relationship("Act")
class TeacherTag(Base):
__tablename__ = 'teacher_tags'
id = Column(Integer, primary_key=True)
title = Column(Unicode(100), unique=True, nullable=False)
def __repr__(self):
return "<TeacherTag ('%s')>" % self.title
@property
def items(self):
return [rel.item for rel in self.content_tags]
class TeacherTagToActs(Base):
__tablename__ = 'teacher_tag_to_acts'
#
tag_id = Column(Integer, ForeignKey('teacher_tags.id'), primary_key=True)
content_id = Column(Integer, ForeignKey('acts.id'), primary_key=True)
teacher_tag = relation(TeacherTag, backref=backref('teacher_tags', cascade='all'))
position = Column(Integer, nullable=False)
title = association_proxy('teacher_tag', 'title')
@classmethod
def _tag_find_or_create(cls, title):
with DBSession.no_autoflush:
tag = DBSession.query(TeacherTag).filter_by(title=title).first()
if tag is None:
tag = TeacherTag(title=title)
return cls(teacher_tag=tag)
# class ActStatus:
# PUBLIC, DRAFT, PRIVATE, CANCEL, DELETED = 0, 1, 2, 3, 4
# # public : seen by anyone
# # priveate: seen by admins
# # draft: seen by self.
# # cancel: meetup is canceled . 由于某些原因 管理员人为的取消活动
# # deleted: meetup is deleted . 如果活动已经有人报名,将不能删除
# # 是否是活动首页推荐、全站首页推荐,全站首页推荐待考虑
# class HeadLine:
# NOT_TOP, MEETUPS_TOP, SITE_TOP = 0, 1, 2
# 活动的类别
class MeetupType(Base):
id = Column(Integer, primary_key=True)
title = Column(String(100), nullable=True)
acts = relationship("Act", backref='meetup_types')
from kotti.views.edit.content import Image
#Image.acts = relationship("Act", backref('images'))
#人数限制、钱钱、地点、嘉宾
# Act means activity
class Act(Document):
id = Column('id', Integer, ForeignKey('documents.id'), primary_key=True)
__acl__ = SITE_ACL
[STATUS_PUBLIC, STATUS_DRAFT, STATUS_PRIVATE, STATUS_CANCEL, STATUS_DELETED] = range(5)
status = Column(Integer(), nullable=False, default=STATUS_PUBLIC)
[PUTONBANNER_NO, PUTONBANNER_MEETUP, PUTONBANNER_HOME] = range(3)
headline = Column(Integer, nullable=False, default=PUTONBANNER_NO)
meetup_type = Column(Integer, ForeignKey('meetup_types.id'))
meetup_type_title = association_proxy('meetup_types', 'title' )
#海报ID
# poster_id = Column(Integer, ForeignKey('images.id'))
# poster = relationship('Image')
# @property
# def poster_img(self):
# # return "/images/%s/image/" % (self.poster.name)
# return self.poster_img_url
poster_img = Column(String(200)) # change 50 to 200 , 2014.10.29 by sunset
# TODO Ignore the city ?
city_id = Column(Integer, ForeignKey('city.id'))
city_name = association_proxy('city'
, 'name'
, creator=City._find_or_create)
# Meetup start time
meetup_start_time = Column(DateTime(timezone=TZ_HK))
# Meetup finish time
meetup_finish_time = Column(DateTime(timezone=TZ_HK))
enroll_finish_time = Column(DateTime(timezone=TZ_HK))
enroll_start_time = Column(DateTime(timezone=TZ_HK))
location = Column(UnicodeText())
#经度
latitude = Column(Float())
longitude = Column(Float())
zoomlevel = Column(Integer())
_teacher_tags = relation(
TeacherTagToActs,
backref=backref('item'),
order_by=[TeacherTagToActs.position],
collection_class=ordering_list("position"),
cascade='all, delete-orphan',
)
teachers = association_proxy(
'_teacher_tags',
'title',
creator=TeacherTagToActs._tag_find_or_create,
)
limit_num = Column(Integer(), default=500)
pay_count = Column(Integer(), default=0)
#TODO for teacher selected
type_info = Document.type_info.copy(
name=u'Act',
title=_(u'Act'),
add_view=u'add_act',
addable_to=[u'Act'],
)
_parts = relationship('Participate', backref='act')
@property
def parts(self):
return [rel.user for rel in self._parts]
_comments = relationship('Comment', backref='act')
reviews = relationship('Review', backref='act')
# @property
# def comments(self):
# return [i. for rel in self._comments]
class Review(Document):
id = Column('id', Integer, ForeignKey('documents.id'), primary_key=True)
review_to_meetup_id = Column('review_to_meetup_id', Integer)
type_info = Document.type_info.copy(
name=u'Review',
title=_(u'Review'),
add_view=u'add_review',
addable_to=[u'Review'],
)
comments = relationship('Comment', backref='reivew')
class Infomation(Document):
'''This Class stores the infomatino recommended by admins '''
id = Column('id', Integer, ForeignKey('documents.id'), primary_key=True)
[STATUS_PUBLIC, STATUS_DRAFT, STATUS_PRIVATE, STATUS_CANCEL, STATUS_DELETED] = range(5)
status = Column(Integer(), nullable=False, default=STATUS_PUBLIC)
type_info = Document.type_info.copy(
name=u'Infomation',
title=_(u'推荐信息'),
add_view=u'add_info',
addable_to=[u'Infomation'],
)
comments = relationship('Comment', backref='infomation')
class Comment(Base):
__tablename__ = 'comments'
id = Column(Integer, primary_key=True)
TYPE_MEETUP = 0
TYPE_MEETUP_REVIEW = 1
TYPE_INFOMATION = 2
# 评论类型,0=活动评论,1=活动回顾评论, 2=推荐信息评论
type = Column(Integer, default=TYPE_MEETUP)
# 评论关联的活动、活动回顾的ID
document_id = Column(Integer, ForeignKey('documents.id'))
user_id = Column(Integer, ForeignKey('mba_users.id'))
content = Column(String(500), nullable=True)
user = relationship("MbaUser", backref='comment')
post_date = Column(DateTime(), nullable=False, default=datetime.now)
# Tables about resume
# Education n -- 1 Resume
class Education(Base):
id = Column(Integer, primary_key=True)
resume_id = Column(Integer, ForeignKey('resumes.id'))
school_name = Column(String(100), nullable=False)
location = Column(String(100))
start_date = Column(Date())
finish_date = Column(Date())
major = Column(String(30))
degree = Column(Integer())
abroad = Column(Boolean)
summary = Column(UnicodeText())
# Job n -- 1 Resume
class Job(Base):
id = Column(Integer, primary_key=True)
resume_id = Column(Integer, ForeignKey('resumes.id'))
location = Column(String(200))
industy = Column(String(100))
industy_type = Column(Integer())
industy_scale = Column(Integer())
duty = Column(String(200))
start_date = Column(Date())
finish_date = Column(Date())
description = Column(UnicodeText())
is_current = Column(Boolean, default=False)
class Train(Base):
id = Column(Integer, primary_key=True)
resume_id = Column(Integer, ForeignKey('resumes.id'))
start_date = Column(DateTime())
finish_date = Column(DateTime())
location = Column(String(200))
course = Column(String(100))
certificate = Column(String(50))
summary = Column(UnicodeText())
class ProjectInfo(Base):
id = Column(Integer, primary_key=True)
resume_id = Column(Integer, ForeignKey('resumes.id'))
start_date = Column(DateTime())
finish_date = Column(DateTime())
name = Column(String(200))
tool = Column(String(200))
hardware = Column(String(200))
software = Column(String(200))
description = Column(UnicodeText())
duty = Column(UnicodeText)
class Language(Base):
id = Column(Integer, primary_key=True)
resume_id = Column(Integer, ForeignKey('resumes.id'))
lang_type = Column(String(20))
grasp = Column(String(20))
read_cap = Column(String(20))
write_cap = Column(String(20))
# resume many to many skill
class ResumeSkill(Base):
resume_id = Column(Integer, ForeignKey('resumes.id'), primary_key=True)
skill_id = Column(Integer, ForeignKey('skills.id'), primary_key=True)
skill = relationship('Skill', backref='resume_items')
name = association_proxy('skill', 'name')
@classmethod
def _skill_find_or_create(cls, name):
with DBSession.no_autoflush:
skill = DBSession.query(Skill).filter_by(name=name).first()
if skill is None:
skill = Skill(name=name)
return cls(skill=skill)
class Skill(Base):
__table_args__ = (
UniqueConstraint('name'),
)
id = Column(Integer, primary_key=True)
name = Column(String(250))
@property
def resumes(self):
return [rel.resume for rel in self.resume_items]
class Resume(Base):
#id = Column(Integer, primary_key=True)
#user_id = Column(Integer, ForeignKey('mba_users.id'))
id = Column(Integer, ForeignKey('mba_users.id'), primary_key=True)
title = Column(String(250))
create_date = Column(DateTime(), default=datetime.utcnow)
modify_date = Column(DateTime(), default=datetime.utcnow, onupdate=datetime.utcnow)
_skills = relationship('ResumeSkill', backref='resume')
skills = association_proxy(
'_skills',
'name',
creator=ResumeSkill._skill_find_or_create,
)
# String like jobid1,jobid2,jobid3 5,6,3,1
job_order = Column(String(100), nullable=True)
jobs = relationship('Job', cascade="save-update, merge, delete")
projects = relationship('ProjectInfo', cascade="save-update, merge, delete")
educations = relationship('Education', cascade="save-update, merge, delete")
trains = relationship('Train', cascade="save-update, merge, delete")
langs = relationship('Language', cascade="save-update, merge, delete")
publicity = Column(Boolean, default=True)
def order_jobs(self):
jobs = self.jobs
ids = dict([(obj.id,obj) for obj in jobs])
rlts = []
for s in self.job_order.split(','):
id = int(s)
if id in ids:
rlts.append(ids[id])
return (rlts+list(set(jobs).difference(set(rlts))))
def get_act_root(request=None):
return DBSession.query(Document).filter_by(name="meetup").one()
def get_review_root(request=None):
return DBSession.query(Document).filter_by(name="review").one()
def get_image_root(request=None):
return DBSession.query(Document).filter_by(name="images").one()
def get_info_root(request=None):
return DBSession.query(Document).filter_by(name="infomation").one()
class CompanyInfo(Base):
id = Column('id', Integer, primary_key=True)
name = Column(String(100))
scope = Column(String(200))
industry = Column(String(200))
type_info = Column(String(200))
location = Column(String(300))
description = Column(UnicodeText())
#用户投给职位的简历
class PositionResume(Base):
position_id = Column(Integer, ForeignKey('positions.id'), primary_key=True)
resume_id = Column(Integer, ForeignKey('resumes.id'), primary_key=True)
create_date = Column(DateTime(), default=datetime.utcnow())
#反馈状态
status = Column(Integer())
resume = relationship('Resume', backref='postition_items')
user = association_proxy('resume', 'user')
#工作职位表 views/active.py
class Position(Document):
id = Column('id', Integer, ForeignKey('documents.id'), primary_key=True)
company_id = Column(Integer, ForeignKey('company_infos.id'))
city_name = Column(String(100))
degree = Column(String(100))
experience = Column(String(100))
salary = Column(Integer(), default=0)
public_date = Column(Date(), default=datetime.now(tz=None).date())
end_date = Column(Date(), default=datetime.now(tz=None).date())
location = Column(UnicodeText())
#猎头/公司
hunting_type = Column(Integer(), default=0)
[STATUS_PUBLIC, STATUS_DRAFT] = range(2)
status = Column(Integer(), nullable=False, default=STATUS_DRAFT)
resumes = relationship('PositionResume', backref='position')
users = association_proxy('resumes', 'user')
company = relationship('CompanyInfo', backref='postitions')
company_name = association_proxy('company', 'name')
industry = association_proxy('company', 'industry')
create_date = Column(DateTime(), default=datetime.now(tz=None))
type_info = Document.type_info.copy(
name=u'Position',
title=_(u'Position'),
add_view=u'add_position',
addable_to=[u'Position'],
)
row2dict = lambda r: {c.name: getattr(r, c.name) for c in r.__table__.columns}<|fim▁hole|>
banner_position = Column(Integer, default=0) # 0:home banner, 1:meetup 2: Job, Currently, home banner is the only selection
[TYPE_HOME, TYPE_MEETUP, TYPE_JOB ] = [0, 1 , 2]
type = Column(Integer, default=TYPE_HOME) # 0: home Banner, 1:Meetup Banner, 2: Job Banner
title = Column(String(100))
img_url = Column(String(100))
link_url = Column(String(200))
htmlcontent = Column(String(500), default=0)
last_edit_date = Column(Date(), default=datetime.now(tz=None).date())
[VALID, INVALID ] = [1, 0]
status = Column(Integer, default=VALID) # 1: 生效, 0:失效
class ValidationSms(Base):
'''注册、重置密码时发送的验证短信的表'''
__tablename__ = "register_sms"
[TYPE_REGISTER, TYPE_RESET_PASSWORD] = [0,1]
id = Column(Integer, primary_key=True)
phonenum = Column(String(20))
# type = Column(Integer, default=TYPE_REGISTER) #类型,TYPE_REGISTER注册时,TYPE_RESET_PASSWORD:重置密码时
validate_code = Column(String(20)) # 注册时发送的验证码
send_datetime = Column(DateTime(), default=datetime.now(tz=None) )
ip = Column(String(50))
class InvitationCode(Base):
'''注册邀请码表'''
[AVAILABLE, USED, EXPIRED ] = [0, 1, -1] # # 0, unused, 1: used. -1: unvailable
id = Column(Integer, primary_key=True)
code = Column(String(10))
sender_id = Column('sender_id', Integer, ForeignKey('mba_users.id'))
sender = relationship("MbaUser", foreign_keys="[InvitationCode.sender_id]")
receiver_id = Column('receiver_id', Integer, ForeignKey('mba_users.id'))
receiver = relationship("MbaUser", foreign_keys="[InvitationCode.receiver_id]",
backref=backref("invitation_code",
cascade="all, delete-orphan"))
expiration = Column(DateTime() )
status = Column(Integer, default=AVAILABLE)
class GlobalSiteSetting(Base):
[TRUE, FALSE] = [1, 0]
id = Column(Integer, primary_key=True)
need_invitationcode = Column(Integer, default=True)
class Univs(Base):
id = Column(Integer, primary_key=True)
name = Column(String(250), nullable=False)
pinyin = Column(String(250), nullable=False)
pprev = Column(String(250), nullable=False)
class ExpertAuthReq(Base):
'''专家申请'''
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey('mba_users.id', ondelete='cascade') )
reason = Column(String(400))
[VALID, INVALID] = range(2) #申请状态在MbaUser里
status = Column(Integer, default=VALID)<|fim▁end|> |
class Banner(Base):
id = Column(Integer, primary_key=True) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import absolute_import
<|fim▁hole|># Django starts so that shared_task will use this app.
from .celery import app as celery_app<|fim▁end|> | from .local import Local # noqa
from .production import Production # noqa
# This will make sure the app is always imported when |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! The module contains implementations of HTTP/2 clients that could be
//! directly used to access HTTP/2 servers, i.e. send requests and read
//! responses.
pub use self::simple::SimpleClient;
pub use self::async::Client;
<|fim▁hole|>#[cfg(test)]mod tests;<|fim▁end|> | mod simple;
mod async; |
<|file_name|>partial_gpus.py<|end_file_name|><|fim▁begin|># File has been renamed.<|fim▁hole|>raise DeprecationWarning("This file has been renamed to `fractional_gpus.py` "
"in the same folder!")<|fim▁end|> | |
<|file_name|>pyds9_backend.py<|end_file_name|><|fim▁begin|>#_PYTHON_INSERT_SAO_COPYRIGHT_HERE_(2007)_
#_PYTHON_INSERT_GPL_LICENSE_HERE_
from itertools import izip
import numpy
import time
import ds9
from sherpa.utils import get_keyword_defaults, SherpaFloat
from sherpa.utils.err import DS9Err
_target = 'sherpa'
def _get_win():
return ds9.ds9(_target)
def doOpen():
_get_win()
def isOpen():
targets = ds9.ds9_targets()
if targets is None:
return False
if type(targets) in (list,):
for target in targets:<|fim▁hole|>
return False
def close():
if isOpen():
imager = _get_win()
imager.set("quit")
def delete_frames():
if not isOpen():
raise DS9Err('open')
imager = _get_win()
try:
imager.set("frame delete all")
return imager.set("frame new")
except:
raise DS9Err('delframe')
def get_region(coord):
if not isOpen():
raise DS9Err('open')
imager = _get_win()
try:
regionstr = "regions -format saoimage -strip yes"
if (coord != ''):
if (coord != 'image'):
regionstr = "regions -format ciao -strip yes -system " + str(coord)
else:
regionstr = "regions -format saoimage -strip yes -system image"
reg = imager.get(regionstr)
reg = reg.replace(';','')
return reg
except:
raise DS9Err('retreg')
def image(arr, newframe=False, tile=False):
if not isOpen():
doOpen()
imager = _get_win()
if newframe is True:
try:
imager.set("frame new")
imager.set("frame last")
except:
raise DS9Err('newframe')
try:
if tile is True:
imager.set("tile yes")
else:
imager.set("tile no")
except:
raise DS9Err('settile')
time.sleep(1)
try:
# pyds9 expects shape[::-1] compared to DS9.py
# therefore transpose the image before sending
arr = numpy.asarray(arr, dtype=SherpaFloat)
imager.set_np2arr(arr.T)
except:
raise # DS9Err('noimage')
def _set_wcs(keys):
eqpos, sky, name = keys
phys = ''
wcs = "OBJECT = '%s'\n" % name
if eqpos is not None:
wcrpix = eqpos.crpix
wcrval = eqpos.crval
wcdelt = eqpos.cdelt
if sky is not None:
pcrpix = sky.crpix
pcrval = sky.crval
pcdelt = sky.cdelt
# join together all strings with a '\n' between each
phys = '\n'.join(["WCSNAMEP = 'PHYSICAL'",
"CTYPE1P = 'x '",
'CRVAL1P = %.14E' % pcrval[0],
'CRPIX1P = %.14E' % pcrpix[0],
'CDELT1P = %.14E' % pcdelt[0],
"CTYPE2P = 'y '",
'CRVAL2P = %.14E' % pcrval[1],
'CRPIX2P = %.14E' % pcrpix[1],
'CDELT2P = %.14E' % pcdelt[1]])
if eqpos is not None:
wcdelt = wcdelt * pcdelt
wcrpix = ((wcrpix - pcrval) /
pcdelt + pcrpix )
if eqpos is not None:
# join together all strings with a '\n' between each
wcs = wcs + '\n'.join(["RADECSYS = 'ICRS '",
"CTYPE1 = 'RA---TAN'",
'CRVAL1 = %.14E' % wcrval[0],
'CRPIX1 = %.14E' % wcrpix[0],
'CDELT1 = %.14E' % wcdelt[0],
"CTYPE2 = 'DEC--TAN'",
'CRVAL2 = %.14E' % wcrval[1],
'CRPIX2 = %.14E' % wcrpix[1],
'CDELT2 = %.14E' % wcdelt[1]])
# join the wcs and physical with '\n' between them and at the end
return ('\n'.join([wcs,phys]) + '\n')
def wcs(keys):
if not isOpen():
raise DS9Err('open')
imager = _get_win()
info = _set_wcs( keys )
try:
# use stdin to pass the WCS info
imager.set('wcs replace', info)
except:
raise DS9Err('setwcs')
def open():
doOpen()
def set_region(reg, coord):
if not isOpen():
raise DS9Err('open')
imager = _get_win()
try:
if (access(reg, R_OK) is True):
imager.set("regions load " + "'" + reg + "'")
else:
# Assume region string has to be in CIAO format
regions = reg.split(";")
for region in regions:
if (region != ''):
if (coord != ''):
imager.set("regions", str(coord) + ";" + region)
else:
imager.set("regions", region)
except:
raise DS9Err('badreg', str(reg))
def xpaget(arg):
if not isOpen():
raise DS9Err('open')
imager = _get_win()
return imager.get(arg)
def xpaset(arg, data=None):
if not isOpen():
raise DS9Err('open')
imager = _get_win()
return imager.set(arg, data)<|fim▁end|> | if _target in target:
return True |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>struct Inches {
pub inches: f64,
}
struct Feet {
pub ft: f64,
}
struct Yards {
pub yd: f64,
}
impl Into <Feet> for Inches {
fn into (self) -> Feet {
Feet { ft: self.inches / 12.0 }
}
}
impl Into <Yards> for Inches {
fn into (self) -> Yards {
Yards { yd: self.inches / 12.0 / 3.0 }
}
}
fn main () {
let height = Inches {inches: 68.0};
println! ("Inches: {}", height.inches);
println! ("Feet: {}", height.into ().ft);
println! ("Yards: {}", height.into::<Yards> ().yd);<|fim▁hole|><|fim▁end|> | } |
<|file_name|>dpdk.hh<|end_file_name|><|fim▁begin|>/*
* This file is open source software, licensed to you under the terms
* of the Apache License, Version 2.0 (the "License"). See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. You may not use this file except in compliance with the License.
*
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Copyright (C) 2014 Cloudius Systems, Ltd.
*/
#pragma once
#include <memory>
#include <seastar/net/config.hh>
#include <seastar/net/net.hh>
#include <seastar/core/sstring.hh>
#include <seastar/util/program-options.hh>
namespace seastar {
namespace net {
/// DPDK configuration.
struct dpdk_options : public program_options::option_group {
/// DPDK Port Index.
///
/// Default: 0.
program_options::value<unsigned> dpdk_port_index;
/// \brief Enable HW Flow Control (on / off).
///
/// Default: \p on.
program_options::value<std::string> hw_fc;
/// \cond internal
dpdk_options(program_options::option_group* parent_group);
/// \endcond
};
}
/// \cond internal
#ifdef SEASTAR_HAVE_DPDK
std::unique_ptr<net::device> create_dpdk_net_device(
uint16_t port_idx = 0,
uint16_t num_queues = 1,
bool use_lro = true,
bool enable_fc = true);
std::unique_ptr<net::device> create_dpdk_net_device(
const net::hw_config& hw_cfg);
namespace dpdk {<|fim▁hole|> * @return Number of bytes needed for mempool objects of each QP.
*/
uint32_t qp_mempool_obj_size(bool hugetlbfs_membackend);
}
/// \endcond
#endif // SEASTAR_HAVE_DPDK
}<|fim▁end|> | /** |
<|file_name|>test_gvar.py<|end_file_name|><|fim▁begin|>"""
test-gvar.py
"""
# Copyright (c) 2012-20 G. Peter Lepage.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version (see <http://www.gnu.org/licenses/>).
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
import os
import unittest
import collections
import math
import pickle
import numpy as np
import random
import gvar as gv
from gvar import *
try:
import vegas
have_vegas = True
except:
have_vegas = False
FAST = False
class ArrayTests(object):
def __init__(self):
pass
def assert_gvclose(self,x,y,rtol=1e-5,atol=1e-8,prt=False):
""" asserts that the means and sdevs of all x and y are close """
if hasattr(x,'keys') and hasattr(y,'keys'):
if sorted(x.keys())==sorted(y.keys()):
for k in x:
self.assert_gvclose(x[k],y[k],rtol=rtol,atol=atol)
return
else:
raise ValueError("x and y have mismatched keys")
self.assertSequenceEqual(np.shape(x),np.shape(y))
x = np.asarray(x).flat
y = np.asarray(y).flat
if prt:
print(np.array(x))
print(np.array(y))
for xi,yi in zip(x,y):
self.assertGreater(atol+rtol*abs(yi.mean),abs(xi.mean-yi.mean))
self.assertGreater(10*(atol+rtol*abs(yi.sdev)),abs(xi.sdev-yi.sdev))
def assert_arraysclose(self,x,y,rtol=1e-5,prt=False):
self.assertSequenceEqual(np.shape(x),np.shape(y))
x = np.array(x).flatten()
y = np.array(y).flatten()
max_val = max(np.abs(list(x)+list(y)))
max_rdiff = max(np.abs(x-y))/max_val
if prt:
print(x)
print(y)
print(max_val,max_rdiff,rtol)
self.assertAlmostEqual(max_rdiff,0.0,delta=rtol)
def assert_arraysequal(self,x,y):
self.assertSequenceEqual(np.shape(x),np.shape(y))
x = [float(xi) for xi in np.array(x).flatten()]
y = [float(yi) for yi in np.array(y).flatten()]
self.assertSequenceEqual(x,y)
class test_svec(unittest.TestCase,ArrayTests):
def test_v(self):
""" svec svec.assign svec.toarray """
v = svec(3) # [1,2,0,0,3]
v.assign([1.,3.,2.],[0,4,1])
self.assert_arraysequal(v.toarray(),[1.,2.,0.,0.,3.])
def test_null_v(self):
""" svec(0) """
v = svec(0)
self.assertEqual(len(v.toarray()),0)
self.assertEqual(len(v.clone().toarray()),0)
self.assertEqual(len(v.mul(10.).toarray()),0)
u = svec(1)
u.assign([1],[0])
self.assertEqual(v.dot(u),0.0)
self.assertEqual(u.dot(v),0.0)
self.assert_arraysequal(u.add(v).toarray(),v.add(u).toarray())
def test_v_clone(self):
""" svec.clone """
v1 = svec(3) # [1,2,0,0,3]
v1.assign([1.,3.,2.],[0,4,1])
v2 = v1.clone() # [0,10,0,0,20]
self.assert_arraysequal(v1.toarray(),v2.toarray())
v2.assign([10.,20.,30.],[0,1,2])
self.assert_arraysequal(v2.toarray(),[10.,20.,30.])
def test_v_dot(self):
""" svec.dot """
v1 = svec(3) # [1,2,0,0,3]
v1.assign([1.,3.,2.],[0,4,1])
v2 = svec(2)
v2.assign([10.,20.],[1,4])
self.assertEqual(v1.dot(v2),v2.dot(v1))
self.assertEqual(v1.dot(v2),80.)
v1 = svec(3)
v1.assign([1,2,3],[0,1,2])
v2 = svec(2)
v2.assign([4,5],[3,4])
self.assertEqual(v1.dot(v2),v2.dot(v1))
self.assertEqual(v1.dot(v2),0.0)
def test_v_add(self):
""" svec.add """
v1 = svec(3) # [1,2,0,0,3]
v1.assign([1.,3.,2.],[0,4,1])
v2 = svec(2) # [0,10,0,0,20]
v2.assign([10.,20.],[1,4])
self.assert_arraysequal(v1.add(v2).toarray(),v2.add(v1).toarray())
self.assert_arraysequal(v1.add(v2).toarray(),[1,12,0,0,23])
self.assert_arraysequal(v1.add(v2,10,100).toarray(),[10.,1020.,0,0,2030.])
self.assert_arraysequal(v2.add(v1,100,10).toarray(),[10.,1020.,0,0,2030.])
v1 = svec(2) # overlapping
v1.assign([1,2],[0,1])
v2.assign([3,4],[1,2])
self.assert_arraysequal(v1.add(v2,5,7).toarray(),[5.,31.,28.])
self.assert_arraysequal(v2.add(v1,7,5).toarray(),[5.,31.,28.])
v1 = svec(3)
v2 = svec(3)
v1.assign([1,2,3],[0,1,2])
v2.assign([10,20,30],[1,2,3])
self.assert_arraysequal(v1.add(v2,5,7).toarray(),[5.,80.,155.,210.])
self.assert_arraysequal(v2.add(v1,7,5).toarray(),[5.,80.,155.,210.])
v1 = svec(2)
v2 = svec(2)
v1.assign([1,2],[0,1]) # non-overlapping
v2.assign([3,4],[2,3])
self.assert_arraysequal(v1.add(v2,5,7).toarray(),[5.,10.,21.,28.])
self.assert_arraysequal(v2.add(v1,7,5).toarray(),[5.,10.,21.,28.])
v1 = svec(4) # one encompasses the other
v1.assign([1,2,3,4],[0,1,2,3])
v2.assign([10,20],[1,2])
self.assert_arraysequal(v1.add(v2,5,7).toarray(),[5.,80.,155.,20.])
self.assert_arraysequal(v2.add(v1,7,5).toarray(),[5.,80.,155.,20.])
def test_v_mul(self):
""" svec.mul """
v1 = svec(3) # [1,2,0,0,3]
v1.assign([1.,3.,2.],[0,4,1])
self.assert_arraysequal(v1.mul(10).toarray(),[10,20,0,0,30])
def test_pickle(self):
v = svec(4)
v.assign([1.,2.,5.,22], [3,5,1,0])
with open('outputfile.p', 'wb') as ofile:
pickle.dump(v, ofile)
with open('outputfile.p', 'rb') as ifile:
newv = pickle.load(ifile)
self.assertEqual(type(v), type(newv))
self.assertTrue(np.all(v.toarray() == newv.toarray()))
os.remove('outputfile.p')
class test_smat(unittest.TestCase,ArrayTests):
def setUp(self):
""" make mats for tests """
global smat_m,np_m
smat_m = smat()
smat_m.append_diag(np.array([0.,10.,200.]))
smat_m.append_diag_m(np.array([[1.,2.],[2.,1.]]))
smat_m.append_diag(np.array([4.,5.]))
smat_m.append_diag_m(np.array([[3.]]))
np_m = np.array([[ 0., 0., 0., 0., 0., 0., 0., 0.],
[ 0., 10., 0., 0., 0., 0., 0., 0.],
[ 0., 0., 200., 0., 0., 0., 0., 0.],
[ 0., 0., 0., 1., 2., 0., 0., 0.],
[ 0., 0., 0., 2., 1., 0., 0., 0.],
[ 0., 0., 0., 0., 0., 4., 0., 0.],
[ 0., 0., 0., 0., 0., 0., 5., 0.],
[ 0., 0., 0., 0., 0., 0., 0., 3.]])
def tearDown(self):
global smat_m,np_m
smat_m = None
np_m = None
def test_m_append(self):
""" smat.append_diag smat.append_diag_m smat.append_row smat.toarray"""
self.assert_arraysequal(smat_m.toarray(),np_m)
def test_m_dot(self):
""" smat.dot """
global smat_m,np_m
v = svec(2)
v.assign([10,100],[1,4])
np_v = v.toarray()
nv = len(np_v)
self.assert_arraysequal(smat_m.dot(v).toarray(),np.dot(np_m[:nv,:nv],np_v))
self.assert_arraysequal(smat_m.dot(v).toarray(),[0.,100.,0.,200.,100.])
self.assertEqual(smat_m.dot(v).dot(v),np.dot(np.dot(np_m[:nv,:nv],np_v),np_v))
self.assertEqual(smat_m.dot(v).size,3)
def test_m_expval(self):
""" smat.expval """
global smat_m,np_m
v = svec(2)
v.assign([10.,100.],[1,4])
np_v = v.toarray()
nv = len(np_v)
self.assertEqual(smat_m.expval(v),np.dot(np.dot(np_m[:nv,:nv],np_v),np_v))
def test_pickle(self):
""" pickle.dump(smat, outfile) """
global smat_m
with open('outputfile.p', 'wb') as ofile:
pickle.dump(smat_m, ofile)
with open('outputfile.p', 'rb') as ifile:
m = pickle.load(ifile)
self.assertEqual(type(smat_m), type(m))
self.assertTrue(np.all(smat_m.toarray() == m.toarray()))
os.remove('outputfile.p')
class test_smask(unittest.TestCase):
def test_smask(self):
def _test(imask):
mask = smask(imask)
np.testing.assert_array_equal(sum(imask[mask.starti:mask.stopi]), mask.len)
np.testing.assert_array_equal(imask, np.asarray(mask.mask))
np.testing.assert_array_equal(np.asarray(mask.map)[imask != 0], np.arange(mask.len))
np.testing.assert_array_equal(np.cumsum(imask[imask != 0]) - 1, np.asarray(mask.map)[imask != 0])
g = gvar([1, 2, 3], [4, 5, 6])
gvar(1,0)
imask = np.array(g[0].der + g[2].der, dtype=np.int8)
_test(imask)
def test_masked_ved(self):
def _test(imask, g):
mask = smask(imask)
vec = g.internaldata[1].masked_vec(mask)
np.testing.assert_array_equal(vec, g.der[imask!=0])
g = gvar([1, 2, 3], [4, 5, 6])
gvar(1,0)
imask = np.array(g[0].der + g[1].der, dtype=np.int8)
g[1:] += g[:-1]
g2 = g**2
_test(imask, g2[0])
_test(imask, g2[1])
_test(imask, g2[2])
def test_masked_mat(self):
a = np.random.rand(3,3)
g = gvar([1, 2, 3], a.dot(a.T))
imask = np.array((g[0].der + g[2].der) != 0, dtype=np.int8)
cov = evalcov([g[0], g[2]])
gvar(1,0)
mask = smask(imask)
np.testing.assert_allclose(cov, g[1].cov.masked_mat(mask))
class test_gvar1(unittest.TestCase,ArrayTests):
""" gvar1 - part 1 """
def setUp(self):
""" setup for tests """
global x,xmean,xsdev,gvar
# NB - powers of two important
xmean = 0.5
xsdev = 0.25
gvar = switch_gvar()
x = gvar(xmean,xsdev)
# ranseed((1968,1972,1972,1978,1980))
# random.seed(1952)
def tearDown(self):
""" cleanup after tests """
global x,gvar
gvar = restore_gvar()
x = None
def test_str(self):
""" str(x) """
global x,xmean,xsdev,gvar
self.assertEqual(str(x), x.fmt())
def test_call(self):
""" x() """
global x,xmean,xsdev,gvar
n = 10000
fac = 5. # 4 sigma
xlist = [x() for i in range(n)]
avg = np.average(xlist)
std = np.std(xlist)
self.assertAlmostEqual(avg,x.mean,delta=fac*x.sdev/n**0.5)
self.assertAlmostEqual(std,(1-1./n)**0.5*xsdev,delta=fac*x.sdev/(2*n)**0.5)
def test_cmp(self):
""" x==y x!=y x>y x<y"""
global x,xmean,xsdev,gvar
x = gvar(1, 10)
y = gvar(2, 20)
self.assertTrue(y!=x and 2*x==y and x==1 and y!=1 and 1==x and 1!=y)
self.assertTrue(
not y==x and not 2*x!=y and not x!=1 and not y==1
and not 1!=x and not 1==y
)
self.assertTrue(y>x and x<y and y>=x and x<=y and y>=2*x and 2*x<=y)
self.assertTrue(not y<x and not x>y and not y<=x and not x>=y)
self.assertTrue(y>1 and x<2 and y>=1 and x<=2 and y>=2 and 2*x<=2)
self.assertTrue(not y<1 and not x>2 and not y<=1 and not x>=2)
self.assertTrue(1<y and 2>x and 1<=y and 2>=x and 2<=y and 2>=2*x)
self.assertTrue(not 1>y and not 2<x and not 1>=y and not 2<=x)
def test_neg(self):
""" -x """
global x,xmean,xsdev,gvar
z = -x
self.assertEqual(x.mean,-z.mean)
self.assertEqual(x.var,z.var)
def test_pos(self):
""" +x """
global x,xmean,xsdev,gvar
z = +x
self.assertEqual(x.mean, z.mean)
self.assertEqual(x.var, x.var)
class test_gvar2(unittest.TestCase,ArrayTests):
""" gvar - part 2 """
def setUp(self):
global x,y,gvar
# NB x.mean < 1 and x.var < 1 and y.var > 1 (assumed below)
# and powers of 2 important
gvar = switch_gvar()
x,y = gvar([0.125,4.],[[0.25,0.0625],[0.0625,1.]])
# ranseed((1968,1972,1972,1978,1980))
# random.seed(1952)
self.label = None
def tearDown(self):
""" cleanup after tests """
global x,y,gvar
x = None
y = None
gvar = restore_gvar()
# if self.label is not None:
# print self.label
def test_add(self):
""" x+y """
global x,y,gvar
z = x+y
cov = evalcov([x,y])
self.assertEqual(z.mean,x.mean+y.mean)
self.assertEqual(z.var,cov[0,0]+cov[1,1]+2*cov[0,1])
z = x + y.mean
self.assertEqual(z.mean,x.mean+y.mean)
self.assertEqual(z.var,x.var)
z = y.mean + x
self.assertEqual(z.mean,x.mean+y.mean)
self.assertEqual(z.var,x.var)
def test_sub(self):
""" x-y """
global x,y,gvar
z = x-y
cov = evalcov([x,y])
self.assertEqual(z.mean,x.mean-y.mean)
self.assertEqual(z.var,cov[0,0]+cov[1,1]-2*cov[0,1])
z = x - y.mean
self.assertEqual(z.mean,x.mean-y.mean)
self.assertEqual(z.var,x.var)
z = y.mean - x
self.assertEqual(z.mean,y.mean-x.mean)
self.assertEqual(z.var,x.var)
def test_mul(self):
""" x*y """
z = x*y
dz = [y.mean,x.mean]
cov = evalcov([x,y])
self.assertEqual(z.mean,x.mean*y.mean)
self.assertEqual(z.var,np.dot(dz,np.dot(cov,dz)))
z = x * y.mean
dz = [y.mean,0.]
self.assertEqual(z.mean,x.mean*y.mean)
self.assertEqual(z.var,np.dot(dz,np.dot(cov,dz)))
z = y.mean * x
self.assertEqual(z.mean,x.mean*y.mean)
self.assertEqual(z.var,np.dot(dz,np.dot(cov,dz)))
def test_div(self):
""" x/y """
z = x/y
dz = [1./y.mean,-x.mean/y.mean**2]
cov = evalcov([x,y])
self.assertEqual(z.mean,x.mean/y.mean)
self.assertEqual(z.var,np.dot(dz,np.dot(cov,dz)))
z = x / y.mean
dz = [1./y.mean,0.]
self.assertEqual(z.mean,x.mean/y.mean)
self.assertEqual(z.var,np.dot(dz,np.dot(cov,dz)))
z = y.mean / x
dz = [-y.mean/x.mean**2,0.]
self.assertEqual(z.mean,y.mean/x.mean)
self.assertEqual(z.var,np.dot(dz,np.dot(cov,dz)))
def test_pow(self):
""" x**y """
z = x**y
dz = [y.mean*x.mean**(y.mean-1),x.mean**y.mean*log(x.mean)]
cov = evalcov([x,y])
self.assertEqual(z.mean,x.mean**y.mean)
self.assertEqual(z.var,np.dot(dz,np.dot(cov,dz)))
z = x ** y.mean
dz = [y.mean*x.mean**(y.mean-1),0.]
self.assertEqual(z.mean,x.mean**y.mean)
self.assertEqual(z.var,np.dot(dz,np.dot(cov,dz)))
z = y.mean ** x
dz = [y.mean**x.mean*log(y.mean),0.]
self.assertEqual(z.mean,y.mean**x.mean)
self.assertEqual(z.var,np.dot(dz,np.dot(cov,dz)))
def t_fcn(self,f,df):
""" tester for test_fcn """
gdict = dict(globals())
gdict['x'] = x # with GVar
fx = eval(f,gdict)
gdict['x'] = x.mean # with float
fxm = eval(f,gdict)
dfxm = eval(df,gdict)
self.assertAlmostEqual(fx.mean,fxm)
self.assertAlmostEqual(fx.var,x.var*dfxm**2)
def test_fcn(self):
""" f(x) """
flist = [
("sin(x)","cos(x)"), ("cos(x)","-sin(x)"), ("tan(x)","1 + tan(x)**2"),
("arcsin(x)","(1 - x**2)**(-1./2.)"), ("arccos(x)","-1/(1 - x**2)**(1./2.)"),
("arctan(x)","1/(1 + x**2)"),
("sinh(x)","cosh(x)"), ("cosh(x)","sinh(x)"), ("tanh(x)","1 - tanh(x)**2"),
("arcsinh(x)","1./sqrt(x**2+1.)"),("arccosh(1+x)","1./sqrt(x**2+2*x)"),
("arctanh(x)","1./(1-x**2)"),
("exp(x)","exp(x)"), ("log(x)","1/x"), ("sqrt(x)","1./(2*x**(1./2.))")
]
for f,df in flist:
self.label = f
self.t_fcn(f,df)
# arctan2 tests
x = gvar('0.5(0.5)')
y = gvar('2(2)')
f = arctan2(y, x)
fc = arctan(y / x)
self.assertAlmostEqual(f.mean, fc.mean)
self.assertAlmostEqual(f.sdev, fc.sdev)
self.assertAlmostEqual(arctan2(y, x).mean, numpy.arctan2(y.mean, x.mean))
self.assertAlmostEqual(arctan2(y, -x).mean, numpy.arctan2(y.mean, -x.mean))
self.assertAlmostEqual(arctan2(-y, -x).mean, numpy.arctan2(-y.mean, -x.mean))
self.assertAlmostEqual(arctan2(-y, x).mean, numpy.arctan2(-y.mean, x.mean))
self.assertAlmostEqual(arctan2(y, x*0).mean, numpy.arctan2(y.mean, 0))
self.assertAlmostEqual(arctan2(-y, x*0).mean, numpy.arctan2(-y.mean, 0))
def test_gvar_function(self):
""" gvar_function(x, f, dfdx) """
x = sqrt(gvar(0.1, 0.1) + gvar(0.2, 0.5))
def fcn(x):
return sin(x + x**2)
def dfcn_dx(x):
return cos(x + x**2) * (1 + 2*x)
f = fcn(x).mean
dfdx = dfcn_dx(x).mean
diff = gvar_function(x, f, dfdx) - fcn(x)
self.assertAlmostEqual(diff.mean, 0.0)
self.assertAlmostEqual(diff.sdev, 0.0)
diff = gvar_function([x, x + gvar(2,2)], f, [dfdx, 0]) - fcn(x)
self.assertAlmostEqual(diff.mean, 0.0)
self.assertAlmostEqual(diff.sdev, 0.0)
x = gvar(dict(a='1(1)', b=['2(2)', '3(3)']))
z = gvar(1,1)
def fcn(x):
return sin(x['a'] * x['b'][0]) * x['b'][1]
f = fcn(x)
dfdx = dict(a=f.deriv(x['a']), b=[f.deriv(x['b'][0]), f.deriv(x['b'][1])])
f = f.mean
diff = gvar_function(x, f, dfdx) - fcn(x)
self.assertAlmostEqual(diff.mean, 0.0)
self.assertAlmostEqual(diff.sdev, 0.0)
x = gvar(['1(1)', '2(2)', '3(3)'])
def fcn(x):
return sin(x[0] + x[1]) * x[2]
f = fcn(x)
dfdx = np.array([f.deriv(x[0]), f.deriv(x[1]), f.deriv(x[2])])
f = f.mean
diff = gvar_function(x, f, dfdx) - fcn(x)
self.assertAlmostEqual(diff.mean, 0.0)
self.assertAlmostEqual(diff.sdev, 0.0)
def test_wsum_der(self):
""" wsum_der """
gv = GVarFactory()
x = gv([1,2],[[3,4],[4,5]])
self.assert_arraysequal(wsum_der(np.array([10.,100]),x),[10.,100.])
def test_wsum_gvar(self):
""" wsum_gvar """
gv = GVarFactory()
x = gv([1,2],[[3,4],[4,5]])
v = np.array([10.,100.])
ws = wsum_gvar(v,x)
self.assertAlmostEqual(ws.val,np.dot(v,mean(x)))
self.assert_arraysclose(ws.der,wsum_der(v,x))
def test_dotder(self):
""" GVar.dotder """
gv = GVarFactory()
x = gv([1,2],[[3,4],[4,5]])*2
v = np.array([10.,100.])
self.assertAlmostEqual(x[0].dotder(v),20.)
self.assertAlmostEqual(x[1].dotder(v),200.)
def test_fmt(self):
""" x.fmt """
self.assertEqual(x.fmt(None), x.fmt(2))
self.assertEqual(x.fmt(3),"%.3f(%d)"%(x.mean,round(x.sdev*1000)))
self.assertEqual(y.fmt(3),"%.3f(%.3f)"%(y.mean,round(y.sdev,3)))
self.assertEqual(gvar(".1234(341)").fmt(), "0.123(34)")
self.assertEqual(gvar(" .1234(341)").fmt(), "0.123(34)")
self.assertEqual(gvar(".1234(341) ").fmt(), "0.123(34)")
self.assertEqual(gvar(".1234(341)").fmt(1), "0.1(0)")
self.assertEqual(gvar(".1234(341)").fmt(5), "0.12340(3410)")
self.assertEqual(gvar(".1234(0)").fmt(), "0.1234(0)")
self.assertEqual(gvar("-.1234(341)").fmt(), "-0.123(34)")
self.assertEqual(gvar("+.1234(341)").fmt(), "0.123(34)")
self.assertEqual(gvar("-0.1234(341)").fmt(), "-0.123(34)")
self.assertEqual(gvar("10(1.3)").fmt(), "10.0(1.3)")
self.assertEqual(gvar("10.2(1.3)").fmt(), "10.2(1.3)")
self.assertEqual(gvar("-10.2(1.3)").fmt(), "-10.2(1.3)")
self.assertEqual(gvar("10(1.3)").fmt(0),"10(1)")
self.assertEqual(gvar("1e-9 +- 1.23e-12").fmt(), "1.0000(12)e-09")
self.assertEqual(gvar("1e-9 +- 1.23e-6").fmt(), '1(1230)e-09')
self.assertEqual(gvar("1e+9 +- 1.23e+6").fmt(), "1.0000(12)e+09")
self.assertEqual(gvar("1e-9 +- 0").fmt(), "1(0)e-09")
self.assertEqual(gvar("0(0)").fmt(), "0(0)")
self.assertEqual(gvar("1.234e-9 +- 0.129").fmt(), '1e-09 +- 0.13')
self.assertEqual(gvar("1.23(4)e-9").fmt(), "1.230(40)e-09")
self.assertEqual(gvar("1.23 +- 1.23e-12").fmt(), "1.2300000000000(12)")
self.assertEqual(gvar("1.23 +- 1.23e-6").fmt(), "1.2300000(12)")
self.assertEqual(gvar("1.23456 +- inf").fmt(3), "1.235 +- inf")
self.assertEqual(gvar("1.23456 +- inf").fmt(), str(1.23456) + " +- inf")
self.assertEqual(gvar("10.23 +- 1e-10").fmt(), "10.23000000000(10)")
self.assertEqual(gvar("10.23(5.1)").fmt(), "10.2(5.1)")
self.assertEqual(gvar("10.23(5.1)").fmt(-1),"10.23 +- 5.1")
self.assertEqual(gvar(0.021, 0.18).fmt(), '0.02(18)')
self.assertEqual(gvar(0.18, 0.021).fmt(), '0.180(21)')
# boundary cases
self.assertEqual(gvar(0.096, 9).fmt(), '0.1(9.0)')
self.assertEqual(gvar(0.094, 9).fmt(), '0.09(9.00)')
self.assertEqual(gvar(0.96, 9).fmt(), '1.0(9.0)')
self.assertEqual(gvar(0.94, 9).fmt(), '0.9(9.0)')
self.assertEqual(gvar(-0.96, 9).fmt(), '-1.0(9.0)')
self.assertEqual(gvar(-0.94, 9).fmt(), '-0.9(9.0)')
self.assertEqual(gvar(9.6, 90).fmt(), '10(90)')
self.assertEqual(gvar(9.4, 90).fmt(), '9(90)')
self.assertEqual(gvar(99.6, 91).fmt(), '100(91)')
self.assertEqual(gvar(99.4, 91).fmt(), '99(91)')
self.assertEqual(gvar(0.1, 0.0996).fmt(), '0.10(10)')
self.assertEqual(gvar(0.1, 0.0994).fmt(), '0.100(99)')
self.assertEqual(gvar(0.1, 0.994).fmt(), '0.10(99)')
self.assertEqual(gvar(0.1, 0.996).fmt(), '0.1(1.0)')
self.assertEqual(gvar(12.3, 9.96).fmt(), '12(10)')
self.assertEqual(gvar(12.3, 9.94).fmt(), '12.3(9.9)')
# 0 +- stuff
self.assertEqual(gvar(0, 0).fmt(), '0(0)')
self.assertEqual(gvar(0, 99.6).fmt(), '0(100)')
self.assertEqual(gvar(0, 99.4).fmt(), '0(99)')
self.assertEqual(gvar(0, 9.96).fmt(), '0(10)')
self.assertEqual(gvar(0, 9.94).fmt(), '0.0(9.9)')
self.assertEqual(gvar(0, 0.996).fmt(), '0.0(1.0)')
self.assertEqual(gvar(0, 0.994).fmt(), '0.00(99)')
self.assertEqual(gvar(0, 1e5).fmt(), '0.0(1.0)e+05')
self.assertEqual(gvar(0, 1e4).fmt(), '0(10000)')
self.assertEqual(gvar(0, 1e-5).fmt(), '0.0(1.0)e-05')
self.assertEqual(gvar(0, 1e-4).fmt(), '0.00000(10)')
def test_fmt2(self):
""" fmt(x) """
g1 = gvar(1.5,0.5)
self.assertEqual(fmt(g1),g1.fmt())
g2 = [g1,2*g1]
fmtg2 = fmt(g2)
self.assertEqual(fmtg2[0],g2[0].fmt())
self.assertEqual(fmtg2[1],g2[1].fmt())
g3 = dict(g1=g1,g2=g2)
fmtg3 = fmt(g3)
self.assertEqual(fmtg3['g1'],g1.fmt())
self.assertEqual(fmtg3['g2'][0],g2[0].fmt())
self.assertEqual(fmtg3['g2'][1],g2[1].fmt())
def test_tabulate(self):
""" tabulate(g) """
g = BufferDict()
g['scalar'] = gv.gvar('10.3(1.2)')
g['vector'] = gv.gvar(['0.52(3)', '0.09(10)', '1.2(1)'])
g['tensor'] = gv.gvar([
['0.01(50)', '0.001(20)', '0.033(15)'],
['0.001(20)', '2.00(5)', '0.12(52)'],
['0.007(45)', '0.237(4)', '10.23(75)'],
])
table = gv.tabulate(g, ncol=2)
correct = '\n'. join([
' key/index value key/index value',
'--------------------------- ---------------------------',
' scalar 10.3 (1.2) 1,0 0.001 (20)',
' vector 0 0.520 (30) 1,1 2.000 (50)',
' 1 0.09 (10) 1,2 0.12 (52)',
' 2 1.20 (10) 2,0 0.007 (45)',
' tensor 0,0 0.01 (50) 2,1 0.2370 (40)',
' 0,1 0.001 (20) 2,2 10.23 (75)',
' 0,2 0.033 (15)',
])
self.assertEqual(table, correct, 'tabulate wrong')
def test_partialvar(self):
""" x.partialvar x.partialsdev fmt_errorbudget """
gvar = gvar_factory()
## test basic functionality ##
x = gvar(1,2)
y = gvar(3,4)
a,b = gvar([1,2],[[4,5],[5,16]])
z = x+y+2*a+3*b
self.assertEqual(z.var,x.var+y.var
+np.dot([2.,3.],np.dot(evalcov([a,b]),[2.,3.])))
self.assertEqual(z.partialvar(x,y),x.var+y.var)
self.assertEqual(z.partialvar(x,a),
x.var+np.dot([2.,3.],np.dot(evalcov([a,b]),[2.,3.])))
self.assertEqual(z.partialvar(a),z.partialvar(a))
##
## test different arg types, fmt_errorbudget, fmt_values ##
s = gvar(1,2)
a = np.array([[gvar(3,4),gvar(5,6)]])
d = BufferDict(s=gvar(7,8),v=[gvar(9,10),gvar(10,11)])
z = s + sum(a.flat) + sum(d.flat)
self.assertEqual(z.partialvar(s,a,d),z.var)
self.assertEqual(z.partialvar(s),s.var)
self.assertEqual(z.partialvar(a),sum(var(a).flat))
self.assertEqual(z.partialvar(d),sum(var(d).flat))
self.assertAlmostEqual(z.partialsdev(s,a,d),z.sdev)
tmp = fmt_errorbudget(
outputs=dict(z=z),
inputs=collections.OrderedDict([
('a', a), ('s', s), ('d', d),
('ad', [a,d]), ('sa', [s,a]), ('sd', [s,d]), ('sad', [s,a,d])
]),
ndecimal=1
)
out = "\n".join([
"Partial % Errors:",
" z",
"--------------------",
" a: 20.6",
" s: 5.7",
" d: 48.2",
" ad: 52.5",
" sa: 21.4",
" sd: 48.6",
" sad: 52.8",
"--------------------",
" total: 52.8",
""
])
self.assertEqual(tmp,out,"fmt_errorbudget output wrong")
tmp = fmt_errorbudget(
outputs=dict(z=z),
inputs=collections.OrderedDict([
('a', a), ('s', s), ('d', d),
('ad', [a,d]), ('sa', [s,a]), ('sd', [s,d]), ('sad', [s,a,d])
]),
ndecimal=1, colwidth=25
)
out = "\n".join([
"Partial % Errors:",
" z",
"--------------------------------------------------",
" a: 20.6",
" s: 5.7",
" d: 48.2",
" ad: 52.5",
" sa: 21.4",
" sd: 48.6",
" sad: 52.8",
"--------------------------------------------------",
" total: 52.8",
""
])
self.assertEqual(tmp,out,"fmt_errorbudget output wrong (with colwidth)")
tmp = fmt_values(outputs=collections.OrderedDict([('s',s),('z',z)]),ndecimal=1)
out = "\n".join([
"Values:",
" s: 1.0(2.0) ",
" z: 35.0(18.5) ",
""
])
self.assertEqual(tmp,out,"fmt_value output wrong")
def test_errorbudget_warnings(self):
""" fmt_errorbudget(...verify=True) """
a, b, c = gvar(3 * ['1.0(1)'])
b , c = (b+c) / 2., (b-c) /2.
outputs = dict(sum=a+b+c)
warnings.simplefilter('error')
fmt_errorbudget(outputs=outputs, inputs=dict(a=a, b=b), verify=True)
with self.assertRaises(UserWarning):
fmt_errorbudget(outputs=outputs, inputs=dict(a=a, b=b, c=c), verify=True)
with self.assertRaises(UserWarning):
fmt_errorbudget(outputs=outputs, inputs=dict(a=a), verify=True)
def test_der(self):
""" x.der """
global x,y
self.assert_arraysequal(x.der,[1.,0.])
self.assert_arraysequal(y.der,[0.,1.])
z = x*y**2
self.assert_arraysequal(z.der,[y.mean**2,2*x.mean*y.mean])
def test_construct_gvar(self):
""" construct_gvar """
v = 2.0
dv = np.array([0.,1.])
cov = smat()
cov.append_diag_m(np.array([[2.,4.],[4.,16.]]))
y = gvar(v,np.array([1,0.]),cov)
z = gvar(v,dv,cov)
cov = evalcov([y,z])
self.assertEqual(z.mean,v)
self.assert_arraysequal(z.der,dv)
self.assertEqual(z.var,np.dot(dv,np.dot(cov,dv)))
self.assertEqual(z.sdev,sqrt(z.var))
cov = smat()
cov.append_diag_m(np.array([[2.,4.],[4.,16.]]))
y = gvar(v,([1.], [0]), cov)
z = gvar(v, ([1.], [1]), cov)
cov = evalcov([y,z])
self.assertEqual(z.mean,v)
self.assert_arraysequal(z.der,dv)
self.assertEqual(z.var,np.dot(dv,np.dot(cov,dv)))
self.assertEqual(z.sdev,sqrt(z.var))
# zero covariance
x = gvar([1.], [[0.]])
self.assertEqual(str(x), '[1(0)]')
x = gvar(1, 0.)
self.assertEqual(str(x), '1(0)')
def t_gvar(self,args,xm,dx,xcov,xder):
""" worker for test_gvar """
gvar = gvar_factory()
x = gvar(*args)
self.assertEqual(x.mean,xm)
self.assertEqual(x.sdev,dx)
self.assert_arraysequal(evalcov([x]),xcov)
self.assert_arraysequal(x.der,xder)
def test_gvar(self):
""" gvar """
## tests for arguments corresponding to a single gvar ##
cov = smat()
cov.append_diag_m(np.array([[1.,2.],[2.,16.]]))
x = gvar(2.,np.array([0.,1.]),cov)
arglist = [(["4.125(250)"],4.125,0.25,[[.25**2]],[1.0],'"x(dx)"'), #]
(["-4.125(2.0)"],-4.125,2.0,[[2.**2]],[1.0],'"x(dx)"'),
(["4.125 +- 0.5"],4.125,0.5,[[0.5**2]],[1.0],'"x +- dx"'),
([x],x.mean,x.sdev,evalcov([x]),x.der,"x"),
([2.0],2.0,0.0,[[0.0]],[1.0],"x"),
([(2.0,4.0)],2.,4.,[[4.**2]],[1.0],"(x,dx)"),
([2.0,4.0],2.,4.,[[4.**2]],[1.0],"x,dx"),
([x.mean,x.der,x.cov],x.mean,x.sdev,evalcov([x]),x.der,"x,der,cov")
]
for a in arglist:
self.label = "gvar(%s)" % a[0]
self.t_gvar(a[0],a[1],a[2],a[3],a[4])
# tests involving a single argument that is sequence
x = gvar([[(0,1),(1,2)],[(3,4),(5,6)],[(7,8),(9,10)]])
y = np.array([[gvar(0,1),gvar(1,2)],[gvar(3,4),gvar(5,6)],
[gvar(7,8),gvar(9,10)]])
self.assert_gvclose(x,y)
x = gvar([[["0(1)"],["2(3)"]]])
y = np.array([[[gvar(0,1)],[gvar(2,3)]]])
self.assert_gvclose(x,y)
x = gvar([[1.,2.],[3.,4.]])
y = np.array([[gvar(1.,0),gvar(2.,0)],[gvar(3.,0),gvar(4.,0)]])
self.assert_gvclose(x,y)
x = gvar([gvar(0,1),gvar(2,3)])
y = np.array([gvar(0,1),gvar(2,3)])
self.assert_gvclose(x,y)
# tests involving dictionary arguments
x = gvar(dict(a=1,b=[2,3]), dict(a=10, b=[20,30]))
y = dict(a=gvar(1,10), b=[gvar(2,20), gvar(3,30)])
self.assert_gvclose(x,y)
a, b = gvar([1,2],[10,20])
a, b = a+b, a-b
x = gvar([a, a+b, b, b-a])
y = BufferDict()
y['a'] = [a, a+b]
y['b'] = [b, b-a]
self.assert_gvclose(y.flat, x)
z = gvar(mean(y), evalcov(y))
self.assert_gvclose(z.flat, y.flat)
self.assert_arraysclose(evalcov(z.flat), evalcov(x))
def _tst_compare_evalcovs(self):
" evalcov evalcov_blocks evalcov_blocks_dense agree "
def reconstruct(x, blocks, compress):
ans = np.zeros((len(x), len(x)), float)
if compress:
idx, sdev = blocks[0]
ans[idx, idx] = sdev ** 2
n = (len(idx), len(blocks))
blocks = blocks[1:]
else:
n = len(blocks)
for idx, bcov in blocks:
ans[idx[:,None], idx[:]] = bcov
return ans, n
for setup, compress in [
("N=10; a=np.random.rand(N,N); x=gv.gvar(N*[1.],a.dot(a.T)); x=a.dot(x);", True),
("N=10; a=np.random.rand(N,N); x=gv.gvar(N*[1.],a.dot(a.T)); x=a.dot(x);", False),
("N=10; x=gv.gvar(N*[1.],N*[1.]);", True),
("N=10; x=gv.gvar(N*[1.],N*[1.]);", False),
("N=10; x=gv.gvar(N*[1.],N*[1.]); x[1:] += x[:-1];", True),
("N=10; x=gv.gvar(N*[1.],N*[1.]); x[1:] += x[:-1];", False),
("N=10; a=np.random.rand(N,N); x=gv.gvar(N*[1.],a.dot(a.T));", True),
("N=10; a=np.random.rand(N,N); x=gv.gvar(N*[1.],a.dot(a.T));", False),
]:
tmp = locals()
exec(setup, globals(), tmp)
x = tmp['x']
ec = gv.evalcov(x)
ecb, necb = reconstruct(x, gv.evalcov_blocks(x, compress=compress), compress)
ecbd, necbd = reconstruct(x, gv.evalcov_blocks_dense(x, compress=compress), compress)
np.testing.assert_allclose(ec, ecbd)
np.testing.assert_allclose(ec, ecb)
self.assertEqual(necb, necbd)
# print(necb)
def test_compare_evalcovs(self):
" evalcov evalcov_blocks evalcov_blocks_dense agree "
self._tst_compare_evalcovs()
tmp, gv._CONFIG['evalcov_blocks'] = gv._CONFIG['evalcov_blocks'], 1
self._tst_compare_evalcovs()
gv._CONFIG['evalcov_blocks'] = tmp
def test_gvar_blocks(self):
" block structure created by gvar.gvar "
def blockid(g):
return g.cov.blockid(g.internaldata[1].indices()[0])
x = gvar([1., 2., 3.], [1., 1., 1.])
id = [blockid(xi) for xi in x]
self.assertNotEqual(id[0], id[1])
self.assertNotEqual(id[0], id[2])
self.assertNotEqual(id[1], id[2])
idlast = max(id)
x = gvar([1., 2., 3.], [[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]], fast=False)
id = [blockid(xi) for xi in x]
self.assertEqual(min(id), idlast + 1)
self.assertNotEqual(id[0], id[1])
self.assertNotEqual(id[0], id[2])
self.assertNotEqual(id[1], id[2])
idlast = max(id)
x = gvar([1., 2., 3.], [[1., 0.1, 0.], [0.1, 1., 0.], [0., 0., 1.]], fast=False)
id = [blockid(xi) for xi in x]
self.assertEqual(min(id), idlast + 1)
self.assertEqual(id[0], id[1])
self.assertNotEqual(id[0], id[2])
idlast = max(id)
x = gvar([1., 2., 3.], [[1., 0., 0.1], [0.0, 1., 0.0], [0.1, 0., 1.]], fast=False)
id = [blockid(xi) for xi in x]
self.assertEqual(min(id), idlast + 1)
self.assertEqual(id[0], id[2])
self.assertNotEqual(id[0], id[1])
idlast = max(id)
x = gvar([1., 2., 3.], [[1., 0., 0.0], [0.0, 1., 0.1], [0.0, 0.1, 1.]], fast=False)
id = [blockid(xi) for xi in x]
self.assertEqual(min(id), idlast + 1)
self.assertEqual(id[1], id[2])
self.assertNotEqual(id[0], id[1])
idlast = max(id)
x = gvar([1., 2., 3.], [[1., 0.1, 0.0], [0.1, 1., 0.1], [0.0, 0.1, 1.]], fast=False)
id = [blockid(xi) for xi in x]
self.assertEqual(min(id), idlast + 1)
self.assertEqual(id[1], id[2])
self.assertEqual(id[0], id[1])
idlast = max(id)
x = gvar([1., 2., 3.], [[1., 0.1, 0.1], [0.1, 1., 0.1], [0.1, 0.1, 1.]], fast=False)
id = [blockid(xi) for xi in x]
self.assertEqual(min(id), idlast + 1)
self.assertEqual(id[1], id[2])
self.assertEqual(id[0], id[1])
def test_gvar_verify(self):
" gvar(x, xx, verify=True) "
# case that does not generate an error
gvar([1., 2.], [[1., 2./10], [2./10., 1.]])
with self.assertRaises(ValueError):
gvar([1., 2.], [[1., .5], [.6, 1.]])
# cases that do generate errors
for a,b in [
(1., -1.), ([1., 2.], [2., -2.]),
([1., 2.], [[1., 2.], [2., 1.]]),
]:
with self.assertRaises(ValueError):
gvar(a, b, verify=True)
def test_asgvar(self):
""" gvar functions as asgvar """
z = gvar(x)
self.assertTrue(z is x)
z = gvar("2.00(25)")
self.assertEqual(z.mean,2.0)
self.assertEqual(z.sdev,0.25)
def test_basis5(self):
""" gvar(x,dx) """
xa = np.array([[2.,4.]])
dxa = np.array([[16.,64.]])
x = gvar(xa,dxa)
xcov = evalcov(x)
self.assertEqual(xcov.shape,2*x.shape)
for xai,dxai,xi in zip(xa.flat,dxa.flat,x.flat):
self.assertEqual(xai,xi.mean)
self.assertEqual(dxai,xi.sdev)
self.assertEqual(np.shape(xa),np.shape(x))
xcov = xcov.reshape((2,2))
self.assert_arraysequal(xcov.diagonal(),[dxa[0,0]**2,dxa[0,1]**2])
def test_basis6(self):
""" gvar(x,cov) """
xa = np.array([2.,4.])
cov = np.array([[16.,64.],[64.,4.]])
x = gvar(xa,cov)
xcov = evalcov(x)
for xai,dxai2,xi in zip(xa.flat,cov.diagonal().flat,x.flat):
self.assertEqual(xai,xi.mean)
self.assertEqual(dxai2,xi.sdev**2)
self.assertEqual(np.shape(xa),np.shape(x))
self.assert_arraysequal(xcov,cov.reshape((2,2)))
def test_mean_sdev_var(self):
""" mean(g) sdev(g) var(g) """
def compare(x,y):
self.assertEqual(set(x.keys()),set(y.keys()))
for k in x:
self.assertEqual(np.shape(x[k]),np.shape(y[k]))
if np.shape(x[k])==():
self.assertEqual(x[k],y[k])
else:
self.assertTrue(all(x[k]==y[k]))
# dictionaries of GVars
a = dict(x=gvar(1,2),y=np.array([gvar(3,4),gvar(5,6)]))
a_mean = dict(x=1.,y=np.array([3.,5.]))
a_sdev = dict(x=2.,y=np.array([4.,6.]))
a_var = dict(x=2.**2,y=np.array([4.**2,6.**2]))
compare(a_mean,mean(a))
compare(a_sdev,sdev(a))
compare(a_var,var(a))
# arrays of GVars
b = np.array([gvar(1,2),gvar(3,4),gvar(5,6)])
b_mean = np.array([1.,3.,5.])
b_sdev = np.array([2.,4.,6.])
self.assertTrue(all(b_mean==mean(b)))
self.assertTrue(all(b_sdev==sdev(b)))
self.assertTrue(all(b_sdev**2==var(b)))
# single GVar
self.assertEqual(mean(gvar(1,2)),1.)
self.assertEqual(sdev(gvar(1,2)),2.)
self.assertEqual(var(gvar(1,2)),4.)
# single non-GVar
self.assertEqual(mean(1.25), 1.25)
self.assertEqual(sdev(1.25), 0.0)
self.assertEqual(var(1.25), 0.0)
b = np.array([gvar(1,2), 3.0, gvar(5,6)])
self.assertTrue(all(mean(b)==[1., 3., 5.]))
self.assertTrue(all(sdev(b)==[2., 0., 6.]))
self.assertTrue(all(var(b)==[4., 0., 36.]))
def test_sdev_var(self):
" sdev var from covariance matrices "
a = np.random.rand(10, 10)
cov = a.dot(a.T)
x = gvar(cov.shape[0] * [1], cov)
xd = gvar(cov.shape[0] * [1], cov.diagonal() ** 0.5)
xt = a.dot(x)
covt = a.dot(cov.dot(a.T))
for nthreshold in [1, 1000]:
tmp, gv._CONFIG['var'] = gv._CONFIG['var'], nthreshold
numpy.testing.assert_allclose(var(x), cov.diagonal())
numpy.testing.assert_allclose(sdev(x), cov.diagonal() ** 0.5)
numpy.testing.assert_allclose(var(xd), cov.diagonal())
numpy.testing.assert_allclose(sdev(xd), cov.diagonal() ** 0.5)
numpy.testing.assert_allclose(var(xt), covt.diagonal())
numpy.testing.assert_allclose(sdev(xt), covt.diagonal() ** 0.5)
gv._CONFIG['var'] = tmp
def test_uncorrelated(self):
""" uncorrelated(g1, g2) """
a = dict(x=gvar(1,2),y=np.array([gvar(3,4),gvar(5,6)]))
b = dict(x=gvar(1,2),y=np.array([gvar(3,4),gvar(5,6)]))
c = np.array([gvar(1,2),gvar(3,4),gvar(5,6)])
d = np.array([gvar(1,2),gvar(3,4),gvar(5,6)])
self.assertTrue(uncorrelated(a,b))
self.assertTrue(not uncorrelated(a,a))
self.assertTrue(uncorrelated(a['x'],a['y']))
self.assertTrue(not uncorrelated(a['x'],a))
self.assertTrue(uncorrelated(a,c))
self.assertTrue(uncorrelated(c,a))
self.assertTrue(uncorrelated(c,d))
self.assertTrue(not uncorrelated(c,c))
a['x'] += b['x']
self.assertTrue(not uncorrelated(a,b))
d += c[0]
self.assertTrue(not uncorrelated(c,d))
self.assertTrue(not uncorrelated(a,b['x']))
a, b = gvar([1,2],[[1,.1],[.1,4]])
c = 2*a
self.assertTrue(not uncorrelated(a,c))
self.assertTrue(not uncorrelated(b,c))
self.assertTrue(not uncorrelated(a,b))
def test_deriv(self):
global x, y, gvar
f = 2 * x ** 2. + 3 * y
self.assertEqual(deriv(f, x), 4. * x.mean)
self.assertEqual(deriv(f, y), 3.)
with self.assertRaises(ValueError):
deriv(f, x+y)
self.assertEqual(f.deriv(x), 4. * x.mean)
self.assertEqual(f.deriv(y), 3.)
with self.assertRaises(ValueError):
f.deriv(x+y)
self.assertEqual(deriv(f, [x, y]).tolist(), [4. * x.mean, 3.])
self.assertEqual(deriv(f, [[x], [y]]).tolist(), [[4. * x.mean], [3.]])
self.assertEqual(deriv([f], [x, y]).tolist(), [[4. * x.mean, 3.]])
f = [2 * x + 3 * y, 4 * x]
self.assertEqual(deriv(f, x).tolist(), [2., 4.])
self.assertEqual(deriv(f, y).tolist(), [3., 0.])
with self.assertRaises(ValueError):
deriv(f, x+y)
df = deriv(f, [[x, y]])
self.assertEqual(df.tolist(), [[[2., 3.]], [[4., 0.]]])
f = BufferDict([('a', 2 * x + 3 * y), ('b', 4 * x)])
self.assertEqual(deriv(f, x), BufferDict([('a',2.), ('b',4.)]))
self.assertEqual(deriv(f, y), BufferDict([('a',3.), ('b',0.)]))
df = deriv(f, [x, y])
self.assertEqual(df['a'].tolist(), [2., 3.])
self.assertEqual(df['b'].tolist(), [4., 0.])
with self.assertRaises(ValueError):
deriv(f, x+y)
def test_correlate(self):
" correlate(g, corr) "
x = gvar([1., 2.], [[64., 4.], [4., 16.]])
xmean = mean(x)
xsdev = sdev(x)
xx = correlate(gvar(xmean, xsdev), evalcorr(x))
self.assert_arraysequal(xmean, mean(xx))
self.assert_arraysequal(evalcov(x), evalcov(xx))
# with upper, verify
corr = evalcorr(x)
corr[1, 0] = 0.
corr[1, 1] = 10.
with self.assertRaises(ValueError):
xx = correlate(gvar(xmean, xsdev), corr, upper=False, verify=True)
xx = correlate(gvar(xmean, xsdev), corr, upper=True, verify=True)
self.assert_arraysequal(xmean, mean(xx))
self.assert_arraysequal(evalcov(x), evalcov(xx))
# with lower, verify
corr = evalcorr(x)
corr[0, 1] = 0.
corr[0, 0] = 0.
with self.assertRaises(ValueError):
xx = correlate(gvar(xmean, xsdev), corr, lower=False, verify=True)
xx = correlate(gvar(xmean, xsdev), corr, lower=True, verify=True)
self.assert_arraysequal(xmean, mean(xx))
self.assert_arraysequal(evalcov(x), evalcov(xx))
# matrix
x.shape = (2, 1)
xmean = mean(x)
xsdev = sdev(x)
xx = correlate(gvar(xmean, xsdev), evalcorr(x))
self.assert_arraysequal(xmean, mean(xx))
self.assert_arraysequal(evalcov(x), evalcov(xx))
# dict
y = BufferDict()
y['a'] = x[0, 0]
y['b'] = x
ymean = mean(y)
ysdev = sdev(y)
yy = correlate(gvar(ymean, ysdev), evalcorr(y))
for k in y:
self.assert_arraysequal(mean(y[k]), mean(yy[k]))
ycov = evalcov(y)
yycov = evalcov(yy)
for k in ycov:
self.assert_arraysequal(ycov[k], yycov[k])
def test_evalcorr(self):
" evalcorr(array) "
x = gvar([1., 2.], [[64., 4.], [4., 16.]])
a, b = x
c = evalcorr([a, b])
self.assertEqual(corr(a,b), 1/8.)
self.assert_arraysequal(c, [[1., 1/8.], [1/8., 1.]])
c = evalcorr(x.reshape(2, 1))
self.assertEqual(c.shape, 2 * (2, 1))
self.assert_arraysequal(c.reshape(2,2), [[1., 1/8.], [1/8., 1.]])
y = dict(a=x[0], b=x)
c = evalcorr(y)
self.assertEqual(c['a', 'a'], [[1]])
self.assert_arraysequal(c['a', 'b'], [[1., 1/8.]])
self.assert_arraysequal(c['b', 'a'], [[1.], [1./8.]])
self.assert_arraysequal(c['b', 'b'], [[1., 1/8.], [1/8., 1.]])
def _tst_evalcov1(self):
""" evalcov(array) """
a,b = gvar([1.,2.],[[64.,4.],[4.,36.]])
c = evalcov([a,b/2])
self.assert_arraysequal(c,[[ 64.,2.],[ 2.,9.]])
self.assertEqual(cov(a, b/2), 2.)
c = evalcov([a/2,b])
self.assert_arraysequal(c,[[ 16.,2.],[ 2.,36.]])
z = gvar(8.,32.)
c = evalcov([x,y,z])
self.assert_arraysequal(c[:2,:2],evalcov([x,y]))
self.assertEqual(c[2,2],z.var)
self.assert_arraysequal(c[:2,2],np.zeros(np.shape(c[:2,2])))
self.assert_arraysequal(c[2,:2],np.zeros(np.shape(c[2,:2])))
rc = evalcov([x+y/2,2*x-y])
rotn = np.array([[1.,1/2.],[2.,-1.]])
self.assert_arraysequal(rc,np.dot(rotn,np.dot(c[:2,:2],rotn.transpose())))
def test_evalcov1(self):
""" evalcov(array) """
self._tst_evalcov1()
tmp, gv._CONFIG['evalcov'] = gv._CONFIG['evalcov'], 1
self._tst_evalcov1()
gv._CONFIG['evalcov'] = tmp
def _tst_evalcov2(self):
""" evalcov(dict) """
c = evalcov({0:x + y / 2, 1:2 * x - y})
rotn = np.array([[1., 1/2.], [2., -1.]])
cz = np.dot(rotn, np.dot(evalcov([x, y]), rotn.transpose()))
c = [[c[0,0][0,0], c[0,1][0,0]], [c[1,0][0,0], c[1,1][0,0]]]
self.assert_arraysequal(c, cz)
c = evalcov(dict(x=x, y=[x, y]))
self.assert_arraysequal(c['y','y'], evalcov([x, y]))
self.assertEqual(c['x','x'], [[x.var]])
self.assert_arraysequal(c['x','y'], [[x.var, evalcov([x,y])[0,1]]])
self.assert_arraysequal(c['y','x'], c['x','y'].T)
def test_evalcov2(self):
""" evalcov(dict) """
self._tst_evalcov2()
tmp, gv._CONFIG['evalcov'] = gv._CONFIG['evalcov'], 1
self._tst_evalcov2()
gv._CONFIG['evalcov'] = tmp
def test_sample(self):
" sample(g) "
glist = [
gvar('1(2)'), gv.gvar(['10(2)', '20(2)']) * gv.gvar('1(1)'),
gv.gvar(dict(a='100(2)', b=['200(2)', '300(2)'])),
]
for g in glist:
ranseed(12)
svdcut = 0.9
s1 = sample(g, svdcut=svdcut)
ranseed(12)
s2 = next(raniter(g, svdcut=svdcut))
self.assertEqual(str(s1), str(s2))
ranseed(12)
eps = 0.9
s1 = sample(g, eps=eps)
ranseed(12)
s2 = next(raniter(g, eps=eps))
self.assertEqual(str(s1), str(s2))
@unittest.skipIf(FAST,"skipping test_raniter for speed")
def test_raniter(self):
""" raniter """
global x,y,gvar
n = 1000
rtol = 5./n**0.5
x = gvar(x.mean, x.sdev)
y = gvar(y.mean, y.sdev)
f = raniter([x,y],n)
ans = [fi for fi in f]
# print(x, y, evalcov([x,y]))
# print (ans)
ans = np.array(ans).transpose()
self.assertAlmostEqual(ans[0].mean(),x.mean,delta=x.sdev*rtol)
self.assertAlmostEqual(ans[1].mean(),y.mean,delta=y.sdev*rtol)
self.assert_arraysclose(np.cov(ans[0],ans[1]),evalcov([x,y]),rtol=rtol)
@unittest.skipIf(FAST,"skipping test_raniter2 for speed")
def test_raniter2(self):
""" raniter & svd """
for svdcut in [1e-20,1e-2]:
pr = BufferDict()
pr[0] = gvar(1,1)
pr[1] = pr[0]+gvar(0.1,1e-4)
a0 = []
da = []
n = 10000
rtol = 5./n**0.5 # 5 sigma
for p in raniter(pr,n,svdcut=svdcut):
a0.append(p[0])
da.append(p[1]-p[0])
a0 = np.array(a0)
da = np.array(da)
dda = max(2*svdcut**0.5,1e-4) # largest eig is 2 -- hence 2*sqrt(svdcut)
self.assertAlmostEqual(da.std(),dda,delta=rtol*dda)
self.assertAlmostEqual(a0.mean(),1.,delta=rtol)
self.assertAlmostEqual(da.mean(),0.1,delta=rtol*da.std())
def test_bootstrap_iter(self):
""" bootstrap_iter """
p = BufferDict()
p = gvar(1,1)*np.array([1,1])+gvar(0.1,1e-4)*np.array([1,-1])
p_sw = np.array([p[0]+p[1],p[0]-p[1]])/2.
p_cov = evalcov(p_sw.flat)
p_mean = mean(p_sw.flat)
p_sdev = mean(p_sw.flat)
for pb in bootstrap_iter(p,3,svdcut=1e-20):
pb_sw = np.array([pb[0]+pb[1],pb[0]-pb[1]])/2.
self.assert_arraysclose(p_cov,evalcov(pb_sw.flat))
dp = np.abs(mean(pb_sw.flat)-p_mean)
self.assertGreater(p_sdev[0]*5,dp[0])
self.assertGreater(p_sdev[1]*5,dp[1])
for pb in bootstrap_iter(p,3,svdcut=1e-2):
pb_sw = np.array([pb[0]+pb[1],pb[0]-pb[1]])/2.
pb_mean = mean(pb_sw.flat)
pb_sdev = sdev(pb_sw.flat)
self.assertAlmostEqual(pb_sdev[0],p_sdev[0])
self.assertAlmostEqual(pb_sdev[1],p_sdev[0]/10.)
dp = abs(pb_mean-p_mean)
self.assertGreater(p_sdev[0]*5,dp[0])
self.assertGreater(p_sdev[0]*5./10.,dp[1])
def test_raniter3(self):
""" raniter & BufferDict """
pr = BufferDict()
pr['s'] = gvar(2.,4.)
pr['v'] = [gvar(4.,8.),gvar(8.,16.)]
pr['t'] = [[gvar(16.,32.),gvar(32.,64.)],[gvar(64.,128.),gvar(128.,256.)]]
pr['ps'] = gvar(256.,512.)
nran = 49
delta = 5./nran**0.5 # 5 sigma
prmean = mean(pr)
prsdev = sdev(pr)
ans = dict((k,[]) for k in pr)
for p in raniter(pr,nran):
for k in p:
ans[k].append(p[k])
for k in p:
ansmean = np.mean(ans[k],axis=0)
anssdev = np.std(ans[k],axis=0)
pkmean = prmean[k]
pksdev = prsdev[k]
self.assertAlmostEqual(np.max(np.abs((pkmean-ansmean)/pksdev)),0.0,delta=delta)
self.assertAlmostEqual(np.max(np.abs((pksdev-anssdev)/pksdev)),0.0,delta=delta)
def test_SVD(self):
""" SVD """
# error system
with self.assertRaises(ValueError):
SVD([1,2])
# non-singular
x,y = gvar([1,1],[1,4])
cov = evalcov([(x+y)/2**0.5,(x-y)/2**0.5])
s = SVD(cov)
e = s.val
v = s.vec
k = s.kappa
self.assert_arraysclose(e,[1.,16.],rtol=1e-6)
self.assert_arraysclose(e[0]/e[1],1./16.,rtol=1e-6)
self.assert_arraysclose(np.dot(cov,v[0]),e[0]*v[0],rtol=1e-6)
self.assert_arraysclose(np.dot(cov,v[1]),e[1]*v[1],rtol=1e-6)
self.assertTrue(np.allclose([np.dot(v[0],v[0]),np.dot(v[1],v[1]),np.dot(v[0],v[1])],
[1.,1.,0],rtol=1e-6))
self.assert_arraysclose(sum(np.outer(vi,vi)*ei for ei,vi in zip(e,v)),
cov,rtol=1e-6)
self.assertAlmostEqual(s.kappa,1/16.)
# on-axis 0
cov = np.array([[4.,0.0], [0.0, 0.0]])
s = SVD(cov, rescale=False, svdcut=None)
self.assertTrue(np.all(s.val == [0., 4.]))
# singular case
cov = evalcov([(x+y)/2**0.5,(x-y)/2**0.5,x,y])
s = SVD(cov)
e,v,k = s.val,s.vec,s.kappa
self.assert_arraysclose(e,[0,0,2.,32.],rtol=1e-6)
self.assert_arraysclose(sum(np.outer(vi,vi)*ei for ei,vi in zip(e,v)),
cov,rtol=1e-6)
s = SVD(cov,svdcut=1e-4,compute_delta=True)
e,v,k,d = s.val,s.vec,s.kappa,s.delta
self.assert_arraysclose(e,[32*1e-4,32*1e-4,2.,32.],rtol=1e-6)
ncov = sum(np.outer(vi,vi)*ei for ei,vi in zip(e,v))-evalcov(d)
self.assert_arraysclose(cov,ncov,rtol=1e-6)
s = SVD(cov,svdnum=2,compute_delta=True)
e,v,k,d = s.val,s.vec,s.kappa,s.delta
self.assert_arraysclose(e,[2.,32.],rtol=1e-6)
self.assertTrue(d is None)
s = SVD(cov,svdnum=3,svdcut=1e-4,compute_delta=True)
e,v,k,d = s.val,s.vec,s.kappa,s.delta
self.assert_arraysclose(e,[32*1e-4,2.,32.],rtol=1e-6)
# s.delta s.decomp
for rescale in [False,True]:
mat = [[1.,.25],[.25,2.]]
s = SVD(mat,rescale=rescale)
if rescale==False:
self.assertTrue(s.D is None)
else:
diag = np.diag(s.D)
self.assert_arraysclose(np.diag(np.dot(diag,np.dot(mat,diag))),
[1.,1.])
self.assert_arraysclose(mat, sum(np.outer(wj,wj) for wj in s.decomp(1)))
s = SVD(mat,svdcut=0.9,compute_delta=True,rescale=rescale)
mout = sum(np.outer(wj,wj) for wj in s.decomp(1))
self.assert_arraysclose(mat+evalcov(s.delta),mout)
self.assertTrue(not np.allclose(mat,mout))
s = SVD(mat,rescale=rescale)
minv = sum(np.outer(wj,wj) for wj in s.decomp(-1))
self.assert_arraysclose([[1.,0.],[0.,1.]],np.dot(mat,minv))
if rescale==False:
m2 = sum(np.outer(wj,wj) for wj in s.decomp(2))
self.assert_arraysclose(mat,np.dot(m2,minv))
def test_diagonal_blocks(self):
""" find_diagonal_blocks """
def make_blocks(*m_list):
m_list = [np.asarray(m, float) for m in m_list]
n = sum([m.shape[0] for m in m_list])
ans = np.zeros((n,n), float)
i = 0
for m in m_list:
j = i + m.shape[0]
ans[i:j, i:j] = m
i = j
# mean is irrelevant
return gvar(ans[0], ans)
def compare_blocks(b1, b2):
s1 = set([tuple(list(b1i)) for b1i in b1])
s2 = set([tuple(list(b2i)) for b2i in b2])
self.assertEqual(s1, s2)
m = make_blocks(
[[1]],
[[1, 1], [1, 1]],
[[1]]
)
idx = [idx.tolist() for idx,bcov in evalcov_blocks(m)]
compare_blocks(idx, [[0], [3], [1, 2]])
m = make_blocks(
[[1, 0, 1], [0, 1, 0], [1, 0, 1]],
[[1, 1], [1, 1]],
[[1]],
[[1]]
)
idx = [idx.tolist() for idx,bcov in evalcov_blocks(m)]
compare_blocks(idx, [[1], [5], [6], [0, 2], [3, 4]])
m = make_blocks(
[[1, 0, 1, 1],
[0, 1, 0, 1],
[1, 0, 1, 1],
[1, 1, 1, 1]],
[[1, 1], [1, 1]],
[[1]],
[[1]]
)
idx = [idx.tolist() for idx,bcov in evalcov_blocks(m)]
compare_blocks(idx, [[6], [7], [0, 1, 2, 3] , [4, 5]])
def test_evalcov_blocks(self):
def test_cov(g):
if hasattr(g, 'keys'):
g = BufferDict(g)
g = g.flat[:]
cov = np.zeros((len(g), len(g)), dtype=float)
for idx, bcov in evalcov_blocks(g):
cov[idx[:,None], idx] = bcov
self.assertEqual(str(evalcov(g)), str(cov))
g = gv.gvar(5 * ['1(1)'])
test_cov(g)
g[-1] = g[0] + g[1]
test_cov(g)
test_cov(g * gv.gvar('2(1)'))
g = gv.gvar(5 * ['1(1)'])
g[0] = g[-1] + g[-2]
test_cov(g)
def test_evalcov_blocks_compress(self):<|fim▁hole|> blocks = evalcov_blocks(g, compress=True)
g = g.flat[:]
cov = np.zeros((len(g), len(g)), dtype=float)
idx, bsdev = blocks[0]
if len(idx) > 0:
cov[idx, idx] = bsdev ** 2
for idx, bcov in blocks[1:]:
cov[idx[:,None], idx] = bcov
self.assertEqual(str(evalcov(g)), str(cov))
g = gv.gvar(5 * ['1(1)'])
test_cov(g)
test_cov(dict(g=g))
g[-1] = g[0] + g[1]
test_cov(g)
test_cov(dict(g=g))
test_cov(g * gv.gvar('2(1)'))
g = gv.gvar(5 * ['1(1)'])
g[0] = g[-1] + g[-2]
test_cov(g)
test_cov(dict(g=g))
g[1:] += g[:-1]
test_cov(g)
test_cov(dict(g=g))
def test_svd(self):
""" svd """
def make_mat(wlist, n):
ans = np.zeros((n,n), float)
i, wgts = wlist[0]
if len(i) > 0:
ans[i, i] = np.array(wgts) ** 2
for i, wgts in wlist[1:]:
for w in wgts:
ans[i, i[:, None]] += np.outer(w, w)
return ans
def test_gvar(a, b):
self.assertEqual(a.fmt(4), b.fmt(4))
def test_cov(wgts, cov, atol=1e-7):
invcov = make_mat(wgts, cov.shape[0])
np.testing.assert_allclose(
invcov.dot(cov), np.eye(*cov.shape), atol=atol
)
np.testing.assert_allclose(svd.logdet, np.log(np.linalg.det(cov)), atol=atol)
# diagonal
f = gvar(['1(2)', '3(4)'])
g, wgts = svd(f, svdcut=0.9, wgts=-1)
test_gvar(g[0], f[0])
test_gvar(g[1], f[1])
test_cov(wgts, evalcov(g))
self.assertEqual(svd.nmod, 0)
self.assertEqual(svd.eigen_range, 1.)
# degenerate
g, wgts = svd(3 * [gvar('1(1)')], svdcut=1e-10, wgts=-1)
test_cov(wgts, evalcov(g), atol=1e-4)
self.assertEqual(svd.nmod, 2)
self.assertAlmostEqual(svd.eigen_range, 0.0)
# blocks
x = gvar(10 * ['1(1)'])
x[:5] += gvar('1(1)') # half are correlated
g = svd(x, svdcut=0.5)
self.assertEqual(svd.nmod, 4)
p = np.random.permutation(10)
gp = svd(x[p], svdcut=0.5)
self.assertEqual(svd.nmod, 4)
invp = np.argsort(p)
np.testing.assert_allclose(evalcov(g), evalcov(gp[invp]), atol=1e-7)
np.testing.assert_allclose(mean(g), mean(gp[invp]), atol=1e-7)
# cov[i,i] independent of i, cov[i,j] != 0
x, dx = gvar(['1(1)', '0.01(1)'])
g, wgts = svd([(x+dx)/2, (x-dx)/2.], svdcut=0.2 ** 2, wgts=-1)
y = g[0] + g[1]
dy = g[0] - g[1]
test_gvar(y, x)
test_gvar(dy, gvar('0.01(20)'))
test_cov(wgts, evalcov(g))
self.assertEqual(svd.nmod, 1)
self.assertAlmostEqual(svd.eigen_range, 0.01**2)
# negative svdcut
x, dx = gvar(['1(1)', '0.01(1)'])
g, wgts = svd([(x+dx)/2, (x-dx)/20.], svdcut=-0.2 ** 2, wgts=-1)
y = g[0] + g[1] * 10
dy = g[0] - g[1] * 10
np.testing.assert_allclose(evalcov([y, dy]), [[1, 0], [0, 0]], atol=1e-7)
test_gvar(y, x)
test_gvar(dy, gvar('0(0)'))
self.assertEqual(svd.dof, 1)
self.assertAlmostEqual(svd.eigen_range, 0.01**2)
# cov[i,i] independent of i, cov[i,j] != 0 --- cut too small
x, dx = gvar(['1(1)', '0.01(1)'])
g, wgts = svd([(x+dx)/2, (x-dx)/2.], svdcut=0.0099999** 2, wgts=-1)
y = g[0] + g[1]
dy = g[0] - g[1]
test_gvar(y, x)
test_gvar(dy, dx)
test_cov(wgts, evalcov(g))
self.assertEqual(svd.nmod, 0)
self.assertAlmostEqual(svd.eigen_range, 0.01**2)
# cov[i,i] independent of i after rescaling, cov[i,j] != 0
# rescaling turns this into the previous case
g, wgts = svd([(x+dx)/2., (x-dx)/20.], svdcut=0.2 ** 2, wgts=-1)
y = g[0] + g[1] * 10.
dy = g[0] - g[1] * 10.
test_gvar(y, x)
test_gvar(dy, gvar('0.01(20)'))
test_cov(wgts, evalcov(g))
self.assertEqual(svd.nmod, 1)
self.assertAlmostEqual(svd.eigen_range, 0.01**2)
# dispersed correlations
g2, g4 = gvar(['2(2)', '4(4)'])
orig_g = np.array([g2, (x+dx)/2., g4, (x-dx)/20.])
g, wgts = svd(orig_g, svdcut=0.2 ** 2, wgts=-1)
y = g[1] + g[3] * 10.
dy = g[1] - g[3] * 10.
test_gvar(y, x)
test_gvar(dy, gvar('0.01(20)'))
test_gvar(g[0], g2)
test_gvar(g[2], g4)
test_cov(wgts, evalcov(g))
self.assertEqual(svd.nmod, 1)
self.assertAlmostEqual(svd.eigen_range, 0.01**2)
self.assertEqual(svd.nblocks[1], 2)
self.assertEqual(svd.nblocks[2], 1)
# remove svd correction
g -= g.correction
y = g[1] + g[3] * 10.
dy = g[1] - g[3] * 10.
test_gvar(y, x)
test_gvar(dy, dx)
test_gvar(g[0], g2)
test_gvar(g[2], g4)
np.testing.assert_allclose(evalcov(g.flat), evalcov(orig_g), atol=1e-7)
# noise=True
x, dx = gvar(['1(1)', '0.01(1)'])
g, wgts = svd([(x+dx)/2, (x-dx)/2.], svdcut=0.2 ** 2, wgts=-1, noise=True)
y = g[0] + g[1]
dy = g[0] - g[1]
offsets = mean(g.correction)
self.assertEqual(g.nmod, 1)
self.assertAlmostEqual(offsets[0], -offsets[1])
self.assertGreater(chi2(g.correction[0]).Q, 0.01)
self.assertLess(chi2(g.correction[0]).Q, 0.99)
with self.assertRaises(AssertionError):
test_gvar(y, x)
test_gvar(dy, gvar('0.01(20)'))
self.assertTrue(equivalent(
g - g.correction, [(x+dx)/2, (x-dx)/2.]
))
self.assertTrue(not equivalent(
g, [(x+dx)/2, (x-dx)/2.]
))
# bufferdict
g = {}
g[0] = (x+dx)/2.
g[1] = (x-dx)/20.
g, wgts = svd({0:(x+dx)/2., 1:(x-dx)/20.}, svdcut=0.2 ** 2, wgts=-1)
assert isinstance(g, BufferDict)
y = g[0] + g[1] * 10.
dy = g[0] - g[1] * 10.
test_gvar(y, x)
test_gvar(dy, gvar('0.01(20)'))
test_cov(wgts, evalcov(g.flat))
self.assertEqual(svd.nmod, 1)
self.assertAlmostEqual(svd.eigen_range, 0.01**2)
self.assertTrue(equivalent(
g - g.correction, {0:(x+dx)/2, 1:(x-dx)/20.}
))
self.assertTrue(not equivalent(
g, {0:(x+dx)/2, 1:(x-dx)/20.}
))
def test_valder(self):
""" valder_var """
alist = [[1.,2.,3.]]
a = valder([[1.,2.,3.]])
alist = np.array(alist)
self.assertEqual(np.shape(a),np.shape(alist))
na = len(alist.flat)
for i,(ai,ali) in enumerate(zip(a.flat,alist.flat)):
der = np.zeros(na,float)
der[i] = 1.
self.assert_arraysequal(ai.der,der)
self.assertEqual(ai.val,ali)
def test_ranseed(self):
""" ranseed """
f = raniter([x,y])
ranseed((1,2))
f1 = next(f)
x1 = x()
y1 = y()
ranseed((1,2))
self.assert_arraysequal(f1,next(f))
self.assertEqual(x1,x())
self.assertEqual(y1,y())
# default initialization
ranseed()
f1 = next(f)
ranseed(ranseed.seed)
self.assert_arraysequal(f1, next(f))
def test_rebuild(self):
""" rebuild """
gvar = gvar_factory()
a = gvar([1.,2.],[[4.,2.],[2.,16.]])
b = a*gvar(1.,10.)
c = rebuild(b)
self.assert_arraysequal(c[0].der[-2:],b[0].der[:-1])
self.assert_arraysclose(evalcov(c),evalcov(b))
gvar = gvar_factory()
c = rebuild({0:b[0],1:b[1]},gvar=gvar)
c = np.array([c[0],c[1]])
self.assert_arraysequal(c[0].der,b[0].der[:-1])
self.assert_arraysclose(evalcov(c),evalcov(b) )
def test_chi2(self):
""" chi2(g1, g2) """
# uncorrelated
g = gvar([1., 2.], [1., 2.])
x = [2., 4.]
ans = chi2(x, g)
self.assertAlmostEqual(ans, 2., places=5)
self.assertEqual(ans.dof, 2)
self.assertAlmostEqual(ans.Q, 0.36787944, places=2)
# correlated
g = np.array([g[0]+g[1], g[0]-g[1]])
x = np.array([x[0]+x[1], x[0]-x[1]])
ans = chi2(x, g)
self.assertAlmostEqual(ans, 2., places=5)
self.assertEqual(ans.dof, 2)
self.assertAlmostEqual(ans.Q, 0.36787944, places=2)
# correlated with 0 mode and svdcut < 0
g = np.array([g[0], g[1], g[0]+g[1]])
x = np.array([x[0], x[1], x[0]+x[1]])
ans = chi2(x, g, svdcut=-1e-10)
self.assertAlmostEqual(ans, 2., places=5)
self.assertEqual(ans.dof, 2)
self.assertAlmostEqual(ans.Q, 0.36787944, places=2)
# dictionaries with different keys
g = dict(a=gvar(1,1), b=[[gvar(2,2)], [gvar(3,3)], [gvar(4,4)]], c=gvar(5,5))
x = dict(a=2., b=[[4.], [6.]])
ans = chi2(x,g)
self.assertAlmostEqual(ans, 3.)
self.assertEqual(ans.dof, 3)
self.assertAlmostEqual(ans.Q, 0.3916252, places=2)
ans = chi2(g,x)
self.assertAlmostEqual(ans, 3.)
self.assertEqual(ans.dof, 3)
self.assertAlmostEqual(ans.Q, 0.3916252, places=2)
ans = chi2(2., gvar(1,1))
self.assertAlmostEqual(ans, 1.)
self.assertEqual(ans.dof, 1)
self.assertAlmostEqual(ans.Q, 0.31731051, places=2)
# two dictionaries
g1 = dict(a=gvar(1, 1), b=[gvar(2, 2)])
g2 = dict(a=gvar(2, 2), b=[gvar(4, 4)])
ans = chi2(g1, g2)
self.assertAlmostEqual(ans, 0.2 + 0.2)
self.assertEqual(ans.dof, 2)
self.assertAlmostEqual(ans.Q, 0.81873075, places=2)
def test_corr(self):
""" rebuild (corr!=0) """
a = gvar([1., 2.], [3., 4.])
corr = 1.
b = rebuild(a, corr=corr)
self.assert_arraysclose(evalcov(a).diagonal(),evalcov(b).diagonal())
bcov = evalcov(b)
self.assert_arraysclose(bcov[0,1],corr*(bcov[0,0]*bcov[1,1])**0.5)
self.assert_arraysclose(bcov[1,0],bcov[0,1])
self.assert_arraysclose((b[1]-b[0]).sdev,1.0)
self.assert_arraysclose((a[1]-a[0]).sdev,5.0)
def test_filter(self):
g = collections.OrderedDict([('a', 2.3), ('b', [gv.gvar('12(2)'), 3.]), ('c', 'string')])
gm = collections.OrderedDict([('a', 2.3), ('b', [2., 3.]), ('c', 'string')])
self.assertEqual(str(gv.filter(g, gv.sdev)), str(gm))
def test_pickle(self):
""" pickle strategies """
for g in [
'1(5)',
[['2(1)'], ['3(2)']],
dict(a='4(2)', b=[['5(5)', '6(9)']]),
]:
g1 = gvar(g)
gtuple = (mean(g1), evalcov(g1))
gpickle = pickle.dumps(gtuple)
g2 = gvar(pickle.loads(gpickle))
self.assertEqual(str(g1), str(g2))
self.assertEqual(str(evalcov(g1)), str(evalcov(g2)))
def test_dump_load(self):
dict = collections.OrderedDict
gs = gv.gvar('1(2)') * gv.gvar('3(2)')
ga = gv.gvar([2, 3], [[5., 1.], [1., 10.]])
gd = gv.gvar(dict(s='1(2)', v=['2(2)', '3(3)'], g='4(4)'))
gd['v'] += gv.gvar('0(1)')
gd[(1,3)] = gv.gvar('13(13)')
gd['v'] = 1 / gd['v']
def _test(g, outputfile=None, test_cov=True):
s = dump(g, outputfile=outputfile)
d = load(s if outputfile is None else outputfile)
self.assertEqual( str(g), str(d))
if test_cov:
self.assertEqual( str(gv.evalcov(g)), str(gv.evalcov(d)))
# cleanup
if isinstance(outputfile, str):
os.remove(outputfile)
return d
for g in [gs, ga, gd]:
_test(g)
_test(g, outputfile='xxx.pickle')
_test(g, outputfile='xxx')
gd['x'] = 5.0
_test(gd, test_cov=False)
_test(gd, outputfile='xxx', test_cov=False)
for g in [gs, ga, gd]:
g = gv.mean(g)
_test(g, test_cov=False)
# misc types
g = dict(
s=set([1,2,12.2]),
a=1,
b=[1,[gv.gvar('3(1)') * gv.gvar('2(1)'), 4]],
c=dict(a=gv.gvar(5 * ['1(2)']), b=np.array([[4]])),
d=collections.deque([1., 2, gv.gvar('4(1)')]),
e='a string',
g=(3, 'hi', gv.gvar('-1(2)')),
)
g['f'] = ['str', g['b'][1][0] * gv.gvar('5(2)')]
d = _test(g, outputfile='xxx', test_cov=False)
# dumping classes, without and with special methods
g['C'] = C(gv.gvar(2 * ['3(4)']) * gv.gvar('10(1)'), 'str', (1,2,gv.gvar('2(1)')))
d = _test(g, test_cov=False)
self.assertEqual(str(gv.evalcov(d['C'].x)), str(gv.evalcov(g['C'].x)))
g['C'] = CC(gv.gvar(2 * ['3(4)']) * gv.gvar('10(1)'), 'str', 12.)
d = gv.loads(gv.dumps(g))
self.assertEqual(d['C'].z, None)
self.assertEqual(g['C'].z, 12.)
self.assertEqual(str(gv.evalcov(d['C'].x)), str(gv.evalcov(g['C'].x)))
def test_dump_load_errbudget(self):
dict = collections.OrderedDict
def _test(d, add_dependencies=False):
d = gv.BufferDict(d)
newd = loads(dumps(d, add_dependencies=add_dependencies))
str1 = str(d) + fmt_errorbudget(
outputs=dict(a=d['a'], b=d['b']),
inputs=dict(x=d['x'], y=d['y'], z=d['z']),
)
d = newd
str2 = str(d) + fmt_errorbudget(
outputs=dict(a=d['a'], b=d['b']),
inputs=dict(x=d['x'], y=d['y'], z=d['z']),
)
self.assertEqual(str1, str2)
# all primaries included
x = gv.gvar('1(2)')
y = gv.gvar('2(3)') ** 2
z = gv.gvar('3(4)') ** 0.5
u = gv.gvar([2, 3], [[5., 1.], [1., 10.]])
a = x*y
b = x*y - z
d = dict(a=a, b=b, x=x, y=y, z=z, u=u, uu=u*gv.gvar('1(1)'), xx=x)
_test(d)
del d['xx']
_test(d)
# a,b are primaries
a, b = gvar(mean([d['a'], d['b']]), evalcov([d['a'], d['b']]))
d['a'] = a
d['b'] = b
_test(d)
# no primaries included explicitly
x = gv.gvar('1(2)') + gv.gvar('1(2)')
y = gv.gvar('2(3)') ** 2 + gv.gvar('3(1)')
z = gv.gvar('3(4)') ** 0.5 + gv.gvar('4(1)')
a = x*y
b = x*y - z + gv.gvar('10(1)')
d = dict(a=a, b=b, x=x, y=y, z=z, uu=u*gv.gvar('1(1)'), xx=x)
_test(d, add_dependencies=True)
# mixture
x = gv.gvar('1(2)')
y = gv.gvar('2(3)') ** 2 + gv.gvar('3(1)')
z = gv.gvar('3(4)') ** 0.5 + gv.gvar('4(1)')
a = x*y
b = x*y - z + gv.gvar('10(1)')
d = dict(a=a, b=b, x=x, y=y, z=z, u=u, uu=u*gv.gvar('1(1)'), xx=x)
_test(d, add_dependencies=True)
def test_more_dump(self):
" check on particular issue "
x = gv.gvar(4 * ['1(2)'])
x[0] -= x[1] * gv.gvar('1(10)')
x[2] += x[1]
str1 = str(x) + str(evalcov(x))
x = loads(dumps(x))
str2 = str(x) + str(evalcov(x))
self.assertEqual(str1, str2)
def test_dumps_loads(self):
dict = collections.OrderedDict
gs = gv.gvar('1(2)')
ga = (gv.gvar(['2(2)', '3(3)']) + gv.gvar('0(1)') )
gd = gv.gvar(dict(s='1(2)', v=['2(2)', '3(3)'], g='4(4)'))
gd['v'] += gv.gvar('0(1)')
gd[(1,3)] = gv.gvar('13(13)')
gd['v'] = 1 / gd['v']
def _test(g):
s = dumps(g)
d = loads(s)
self.assertEqual( str(g), str(d))
self.assertEqual( str(gv.evalcov(g)), str(gv.evalcov(d)))
for g in [gs, ga, gd]:
_test(g)
###############
def test_gdump_gload(self):
gs = gv.gvar('1(2)') * gv.gvar('3(2)')
ga = gv.gvar([2, 3], [[5., 1.], [1., 10.]])
gd = gv.gvar(dict(s='1(2)', v=['2(2)', '3(3)'], g='4(4)'))
gd['v'] += gv.gvar('0(1)')
gd[(1,3)] = gv.gvar('13(13)')
gd['v'] = 1 / gd['v']
def _test(g, outputfile=None, method=None):
s = gdump(g, outputfile=outputfile, method=method)
d = gload(s if outputfile is None else outputfile, method=method)
self.assertEqual( str(g), str(d))
self.assertEqual( str(gv.evalcov(g)), str(gv.evalcov(d)))
# cleanup
if isinstance(outputfile, str):
os.remove(outputfile)
for g in [gs, ga, gd]:
_test(g)
_test(g, outputfile='xxx.json')
_test(g, outputfile='xxx.pickle')
_test(g, outputfile='xxx')
_test(g, outputfile='xxx', method='pickle')
_test(g, method='json')
_test(g, method='pickle')
_test(g, method='dict')
def test_gdump_gload_errbudget(self):
def _test(d, add_dependencies=False):
d = gv.BufferDict(d)
newd = gloads(gdumps(d, add_dependencies=add_dependencies))
str1 = str(d) + fmt_errorbudget(
outputs=dict(a=d['a'], b=d['b']),
inputs=dict(x=d['x'], y=d['y'], z=d['z']),
)
d = newd
str2 = str(d) + fmt_errorbudget(
outputs=dict(a=d['a'], b=d['b']),
inputs=dict(x=d['x'], y=d['y'], z=d['z']),
)
self.assertEqual(str1, str2)
# all primaries included
x = gv.gvar('1(2)')
y = gv.gvar('2(3)') ** 2
z = gv.gvar('3(4)') ** 0.5
u = gv.gvar([2, 3], [[5., 1.], [1., 10.]])
a = x*y
b = x*y - z
d = dict(a=a, b=b, x=x, y=y, z=z, u=u, uu=u*gv.gvar('1(1)'), xx=x)
_test(d)
del d['xx']
_test(d)
# a,b are primaries
a, b = gvar(mean([d['a'], d['b']]), evalcov([d['a'], d['b']]))
d['a'] = a
d['b'] = b
_test(d)
# no primaries included explicitly
x = gv.gvar('1(2)') + gv.gvar('1(2)')
y = gv.gvar('2(3)') ** 2 + gv.gvar('3(1)')
z = gv.gvar('3(4)') ** 0.5 + gv.gvar('4(1)')
a = x*y
b = x*y - z + gv.gvar('10(1)')
d = dict(a=a, b=b, x=x, y=y, z=z, uu=u*gv.gvar('1(1)'), xx=x)
_test(d, add_dependencies=True)
# mixture
x = gv.gvar('1(2)')
y = gv.gvar('2(3)') ** 2 + gv.gvar('3(1)')
z = gv.gvar('3(4)') ** 0.5 + gv.gvar('4(1)')
a = x*y
b = x*y - z + gv.gvar('10(1)')
d = dict(a=a, b=b, x=x, y=y, z=z, u=u, uu=u*gv.gvar('1(1)'), xx=x)
_test(d, add_dependencies=True)
def test_more_gdump(self):
" check on particular issue "
x = gv.gvar(4 * ['1(2)'])
x[0] -= x[1] * gv.gvar('1(10)')
x[2] += x[1]
str1 = str(x) + str(evalcov(x))
x = gloads(gdumps(x))
str2 = str(x) + str(evalcov(x))
self.assertEqual(str1, str2)
def test_gdumps_gloads(self):
gs = gv.gvar('1(2)')
ga = (gv.gvar(['2(2)', '3(3)']) + gv.gvar('0(1)') )
gd = gv.gvar(dict(s='1(2)', v=['2(2)', '3(3)'], g='4(4)'))
gd['v'] += gv.gvar('0(1)')
gd[(1,3)] = gv.gvar('13(13)')
gd['v'] = 1 / gd['v']
# json (implicit)
def _test(g):
s = gdumps(g)
d = gloads(s)
self.assertEqual( str(g), str(d))
self.assertEqual( str(gv.evalcov(g)), str(gv.evalcov(d)))
for g in [gs, ga, gd]:
_test(g)
# json
def _test(g):
s = gdumps(g, method='json')
d = gloads(s)
self.assertEqual( str(g), str(d))
self.assertEqual( str(gv.evalcov(g)), str(gv.evalcov(d)))
for g in [gs, ga, gd]:
_test(g)
# pickle
def _test(g):
s = gdumps(g, method='pickle')
d = gloads(s)
self.assertEqual( str(g), str(d))
self.assertEqual( str(gv.evalcov(g)), str(gv.evalcov(d)))
for g in [gs, ga, gd]:
_test(g)
################
def test_oldload(self):
gd = gv.gvar(dict(s='1(2)', v=['2(2)', '3(3)'], g='4(4)'))
for g in [gd, gd['s'], gd['v']]:
with warnings.catch_warnings():
warnings.simplefilter("ignore")
g = gv.gvar(dict(s='1(2)', v=['2(2)', '3(3)'], g='4(4)'))
olddump(g, 'xxx.p')
d = load('xxx.p')
assert str(g) == str(d)
assert str(gv.evalcov(g)) == str(gv.evalcov(d))
olddump(g, 'xxx.json', method='json')
d = load('xxx.json', method='json')
assert str(g) == str(d)
assert str(gv.evalcov(g)) == str(gv.evalcov(d))
def test_dependencies(self):
def _test(g):
dep = dependencies(g)
new_g = g.mean + sum(dep * g.deriv(dep))
self.assertEqual(str(new_g - g), str(gvar('0(0)')))
self.assertTrue(equivalent(g, new_g))
x = gv.gvar('1(2)')
y = gv.gvar('2(3)') ** 2
z = gv.gvar('3(4)') ** 0.5 * y
_test(x * y)
_test(x * y - z)
self.assertEqual(len(dependencies([y, x])), 0)
self.assertEqual(len(dependencies([y, 'string', x])), 0)
self.assertEqual(len(dependencies([y, x, x**2, 2*y])), 0)
self.assertEqual(len(dependencies([x*y, x])), 1)
self.assertEqual(len(dependencies([x*y, x, x, x])), 1)
self.assertEqual(len(dependencies([x*y, x], all=True)), 2)
self.assertEqual(len(dependencies([x*y, x, 'string'], all=True)), 2)
self.assertEqual(len(dependencies([x*y, x, x, x], all=True)), 2)
self.assertTrue(missing_dependencies([x*y, x]))
self.assertTrue(missing_dependencies([x*y, x+y, x, x]))
self.assertTrue(not missing_dependencies([y, x]))
self.assertTrue(not missing_dependencies([x*y, x, y]))
def test_gammaQ(self):
" gammaQ(a, x) "
cases = [
(2.371, 5.243, 0.05371580082389009, 0.9266599665892222),
(20.12, 20.3, 0.4544782602230986, 0.4864172139106905),
(100.1, 105.2, 0.29649013488390663, 0.6818457585776236),
(1004., 1006., 0.4706659307021259, 0.5209695379094582),
]
for a, x, gax, gxa in cases:
np.testing.assert_allclose(gax, gv._utilities.gammaQ(a, x), rtol=0.01)
np.testing.assert_allclose(gxa, gv._utilities.gammaQ(x, a), rtol=0.01)
def test_erf(self):
" erf(x) "
for x in [-1.1, 0.2]:
self.assertAlmostEqual(erf(x), math.erf(x))
x = [[-1.1], [0.2]]
np.testing.assert_allclose(erf(x), [[math.erf(-1.1)], [math.erf(0.2)]])
x = gv.gvar('0(2)')
erfx = erf(x)
self.assertAlmostEqual(erfx.mean, math.erf(0))
self.assertAlmostEqual(
erfx.sdev,
2 * (math.erf(1e-10) - math.erf(-1e-10)) / 2e-10
)
x = gv.gvar('1.5(2)')
self.assertAlmostEqual(erf(x).mean, math.erf(x.mean))
x = gv.gvar(['0(2)', '1.5(2)'])
erfx = erf(x)
self.assertAlmostEqual(erfx[0].mean, math.erf(x[0].mean))
self.assertAlmostEqual(erfx[1].mean, math.erf(x[1].mean))
def test_equivalent(self):
" equivalent(g1, g2) "
x = gvar(['1(1)', '2(2)'])
y = gvar(['1(1)', '2(2)'])
u = 2 ** 0.5 * np.array([[0.5, 0.5],[-0.5, 0.5]])
ux = u.dot(x)
uTy = u.T.dot(y)
ux_y = ux + y
xnew = u.T.dot(ux_y) - uTy
self.assertTrue(equivalent(x, xnew))
self.assertTrue(not equivalent(x, y))
self.assertTrue(equivalent(x[0], xnew[0]))
d = dict(x=x, y0=y[0])
dnew = dict(x=xnew, y0=y[0])
self.assertTrue(equivalent(d, dnew))
dnew = dict(x=y, y0=y[0])
self.assertTrue(not equivalent(d, dnew))
dnew = dict(x=xnew, y0=x[0])
self.assertTrue(not equivalent(d, dnew))
def test_is_primary(self):
" is_primary(g) "
self.assertTrue(gvar('1(1)').is_primary())
self.assertTrue((2 * gvar('1(1)')).is_primary())
self.assertFalse((gvar('2(1)') * gvar('1(1)')).is_primary())
gs = gvar('1(1)')
ga = gvar(2 * [3 * ['1(1)']])
gd = dict(s=gs, a=ga)
self.assertEqual(is_primary(gs), True)
self.assertEqual(is_primary(ga).tolist(), 2 * [3 * [True]])
self.assertEqual(is_primary(gd).buf.tolist(), 7 * [True])
self.assertEqual(is_primary([gs, gs]).tolist(), [True, False])
gs = gs + gvar('1(1)')
ga[0, 0] += gvar('2(1)')
ga[1, 0] *= 5.
gd = BufferDict()
gd['s'] = gs
gd['a'] = ga
self.assertEqual(is_primary(gs), False)
self.assertEqual(is_primary(ga).tolist(), [[False, True, True], [True, True, True]])
self.assertEqual(is_primary(gd).buf.tolist(), [False, False] + 5 * [True])
def test_disassemble(self):
" d = disassemble(g); reassemble(d) "
# gvar
g = gvar('1(2)')
gn = reassemble(disassemble(g), gvar.cov)
d = gn - g
self.assertEqual(d.mean, 0.0)
self.assertEqual(d.sdev, 0.0)
# array
g = gvar([['1(2)', '2(3)'], ['3(4)', '4(5)']])
gn = reassemble(disassemble(g), gvar.cov)
self.assertEqual(g.shape, gn.shape)
d = gn - g
self.assertTrue(np.all(gv.mean(d) == 0.0))
self.assertTrue(np.all(gv.sdev(d) == 0.0))
# dict
g = gvar(
dict(s=gvar('1(2)'), a=gvar([['1(2)', '2(3)'], ['3(4)', '4(5)']]))
)
gn = reassemble(disassemble(g), gvar.cov)
for k in g:
d = gn[k] - g[k]
self.assertTrue(np.all(gv.mean(d) == 0.0))
self.assertTrue(np.all(gv.mean(d) == 0.0))
@unittest.skipIf(FAST,"skipping test_pdfstats for speed")
def test_pdfstats(self):
" PDFStatistics(moments) "
x = gv.gvar('3.0(4)')
avgs = np.zeros((10,4), float)
for i in range(10):
moments = np.zeros(4, float)
for xi in gv.raniter(x, 100):
moments += xi ** np.arange(1, 5)
s = PDFStatistics(moments / 100.)
avgs[i] = [s.mean, s.sdev, s.skew, s.ex_kurt]
mean = np.mean(avgs,axis=0)
sdev = np.std(avgs, axis=0)
diff = gvar(mean, sdev) - [x.mean, x.sdev, 0., 0.]
self.assertTrue(
np.all(np.fabs(gv.mean(diff)) < 5 * gv.sdev(diff))
)
@unittest.skipIf(not have_vegas, "vegas not installed")
@unittest.skipIf(FAST,"skipping test_pdfstatshist for speed")
def test_pdfstatshist(self):
" PDFStatistics(histogram) "
g = gv.gvar('2(1.0)')
hist = PDFHistogram(g + 0.1, nbin=50, binwidth=0.2)
integ = vegas.PDFIntegrator(g)
integ(neval=1000, nitn=5)
def f(p):
return hist.count(p)
results = integ(f, neval=1000, nitn=5,adapt=False)
for stats in [
PDFStatistics(histogram=(hist.bins, results)),
hist.analyze(results).stats
]:
self.assertTrue(
abs(stats.median.mean - g.mean) < 5 * stats.median.sdev
)
self.assertTrue(
abs(stats.plus.mean - g.sdev) < 5 * stats.plus.sdev
)
self.assertTrue(
abs(stats.minus.mean - g.sdev) < 5 * stats.minus.sdev
)
def test_regulate(self):
D = np.array([1., 2., 3.])
corr = np.array([[1., .1, .2], [.1, 1., .3], [.2, .3, 1.]])
cov = D[:, None] * corr * D[None, :]
g1 = gvar(1, 10)
g2 = gvar(3 * [2], cov)
g3 = gvar(3 * [3], 2 * cov)
g = np.concatenate(([g1], g2, g3))
cov = evalcov(g)
eps = 0.25
norm = np.linalg.norm(evalcorr(g), np.inf)
gr = regulate(g, eps=eps / norm, wgts=False)
self.assertTrue(gv.equivalent(gr - gr.correction, g))
self.assertEqual(g.size, gr.dof)
self.assertEqual(g.size - 1, gr.nmod)
self.assertAlmostEqual(gr.eps, eps / norm)
self.assertEqual(gr.svdcut, None)
covr = evalcov(gr)
np.testing.assert_allclose(covr[0, :], cov[0, :])
np.testing.assert_allclose(covr[:, 0], cov[:, 0])
covr[1:, 1:][np.diag_indices_from(covr[1:, 1:])] -= eps * cov[1:, 1:].diagonal()
np.testing.assert_allclose(covr[1:, 1:], cov[1:, 1:])
gr, dummy = regulate(g, eps=eps / norm, wgts=True)
self.assertTrue(gv.equivalent(gr - gr.correction, g))
self.assertEqual(g.size - 1, gr.nmod)
self.assertEqual(g.size, gr.dof)
covr = evalcov(gr)
np.testing.assert_allclose(covr[0, :], cov[0, :])
np.testing.assert_allclose(covr[:, 0], cov[:, 0])
covr[1:, 1:][np.diag_indices_from(covr[1:, 1:])] -= eps * cov[1:, 1:].diagonal()
np.testing.assert_allclose(covr[1:, 1:], cov[1:, 1:])
def test_regulate_svdcut(self):
" regulate -> svd "
D = np.array([1., 2., 3.])
corr = np.array([[1., .1, .2], [.1, 1., .3], [.2, .3, 1.]])
cov = D[:, None] * corr * D[None, :]
g1 = gvar(1, 10)
g2 = gvar(3 * [2], cov)
g3 = gvar(3 * [3], 2 * cov)
g = np.concatenate(([g1], g2, g3))
svdcut = 0.25
# verify that svd is being called in each case
gr = regulate(g, svdcut=svdcut, wgts=False)
self.assertTrue(gv.equivalent(gr - gr.correction, g))
self.assertEqual(gr.svdcut, svdcut)
self.assertEqual(gr.eps, None)
gr = regulate(g, wgts=False)
self.assertTrue(gv.equivalent(gr - gr.correction, g))
self.assertEqual(gr.svdcut, 1e-12) # default
self.assertEqual(gr.eps, None)
gr = regulate(g, svdcut=svdcut, eps=svdcut, wgts=False)
self.assertTrue(gv.equivalent(gr - gr.correction, g))
self.assertEqual(gr.svdcut, svdcut)
self.assertEqual(gr.eps, None)
def test_regulate_singular(self):
D = np.array([1., 2., 3.])
# two zero eigenvalues
corr = np.array([[1., 1., 1.], [1., 1., 1.], [1.,1.,1.]])
cov = D[:, None] * corr * D[None, :]
g1 = gvar(1, 10)
g2 = gvar(3 * [2], cov)
g3 = gvar(3 * [3], 2 * cov)
g = np.concatenate(([g1], g2, g3))
cov = evalcov(g)
corr = evalcorr(g)
eps = 0.1
norm = np.linalg.norm(evalcorr(g), np.inf)
gr = regulate(g, eps=eps / norm, wgts=False)
self.assertTrue(gv.equivalent(gr - gr.correction, g))
covr = evalcov(gr)
np.testing.assert_allclose(covr[0, :], cov[0, :])
np.testing.assert_allclose(covr[:, 0], cov[:, 0])
covr[1:, 1:][np.diag_indices_from(covr[1:, 1:])] -= eps * cov[1:, 1:].diagonal()
np.testing.assert_allclose(covr[1:, 1:], cov[1:, 1:])
gr, dummy = regulate(g, eps=eps / norm, wgts=True)
self.assertTrue(gv.equivalent(gr - gr.correction, g))
covr = evalcov(gr)
np.testing.assert_allclose(covr[0, :], cov[0, :])
np.testing.assert_allclose(covr[:, 0], cov[:, 0])
covr[1:, 1:][np.diag_indices_from(covr[1:, 1:])] -= eps * cov[1:, 1:].diagonal()
np.testing.assert_allclose(covr[1:, 1:], cov[1:, 1:])
with self.assertRaises(np.linalg.LinAlgError):
# det(corr)=0, so this should trigger an error
gr, dummy = regulate(g, eps=0, wgts=True)
def test_regulate_dict(self):
D = np.array([1., 2., 3.])
corr = np.array([[1., .1, .2], [.1, 1., .3], [.2, .3, 1.]])
cov = D[:, None] * corr * D[None, :]
g = BufferDict()
g[1] = gvar(1, 10)
g[2] = gvar(3 * [2], cov)
g[3] = gvar(3 * [3], 2 * cov)
cov = evalcov(g.flat)
eps = 0.1
norm = np.linalg.norm(evalcorr(g.flat), np.inf)
gr = regulate(g, eps=eps / norm, wgts=False)
self.assertTrue(gv.equivalent(gr - gr.correction, g))
covr = evalcov(gr.flat)
np.testing.assert_allclose(covr[0, :], cov[0, :])
np.testing.assert_allclose(covr[:, 0], cov[:, 0])
covr[1:, 1:][np.diag_indices_from(covr[1:, 1:])] -= eps * cov[1:, 1:].diagonal()
np.testing.assert_allclose(covr[1:, 1:], cov[1:, 1:])
gr, dummy = regulate(g, eps=eps / norm, wgts=True)
self.assertTrue(gv.equivalent(gr - gr.correction, g))
covr = evalcov(gr.flat)
np.testing.assert_allclose(covr[0, :], cov[0, :])
np.testing.assert_allclose(covr[:, 0], cov[:, 0])
covr[1:, 1:][np.diag_indices_from(covr[1:, 1:])] -= eps * cov[1:, 1:].diagonal()
np.testing.assert_allclose(covr[1:, 1:], cov[1:, 1:])
def test_regulate_wgts(self):
D = np.array([1., 2., 3.])
corr = np.array([[1., .1, .2], [.1, 1., .3], [.2, .3, 1.]])
cov = D[:, None] * corr * D[None, :]
g1 = gvar(1, 10)
g2 = gvar(3 * [2], cov)
g3 = gvar(3 * [3], 2 * cov)
g = np.concatenate(([g1], g2, g3))
gr, i_wgts = regulate(g, eps=1e-15, wgts=1)
covr = np.zeros((g.size, g.size), dtype=float)
i, wgts = i_wgts[0]
if len(i) > 0:
covr[i, i] = np.array(wgts) ** 2
for i, wgts in i_wgts[1:]:
covr[i[:, None], i] = (wgts.T).dot(wgts) # wgts.T @ wgts
np.testing.assert_allclose(numpy.log(numpy.linalg.det(covr)), gr.logdet)
self.assertEqual(gr.nmod, 6)
np.testing.assert_allclose(covr[0,0], 100.)
np.testing.assert_allclose(covr[1:4,1:4], cov)
np.testing.assert_allclose(covr[4:7,4:7], 2 * cov)
gr, i_wgts = regulate(g, eps=1e-15, wgts=-1)
invcovr = np.zeros((g.size, g.size), dtype=float)
i, wgts = i_wgts[0]
if len(i) > 0:
invcovr[i, i] = np.array(wgts) ** 2
for i, wgts in i_wgts[1:]:
invcovr[i[:, None], i] += (wgts.T).dot(wgts) # wgts.T @ wgts
np.testing.assert_allclose(invcovr[0,0], 1/100.)
np.testing.assert_allclose(invcovr[1:4,1:4], np.linalg.inv(cov))
np.testing.assert_allclose(invcovr[4:7,4:7], 0.5 * np.linalg.inv(cov))
class C:
def __init__(self, x, y, z):
self.x = x
self.y = y
self.z = z
def __str__(self):
return str(self.__dict__)
def __repr__(self):
return str(self.__dict__)
class CC:
def __init__(self, x, y, z):
self.x = x
self.y = y
self.z = z
def __str__(self):
return str(self.__dict__)
def __repr__(self):
return str(self.__dict__)
def _remove_gvars(self, gvlist):
c = copy.copy(self)
c.z = None
c._dict__ = gv.remove_gvars(c.__dict__, gvlist)
return c
def _distribute_gvars(self, gvlist):
self.__dict__ = gv.distribute_gvars(self.__dict__, gvlist)
return self
if __name__ == '__main__':
unittest.main()<|fim▁end|> | def test_cov(g):
if hasattr(g, 'keys'):
g = BufferDict(g) |
<|file_name|>argumentsparser.py<|end_file_name|><|fim▁begin|>import argparse
import select
<|fim▁hole|>
def parse_args(args, input):
parser = argparse.ArgumentParser()
parser.add_argument('--url', help="URL of the target data-set",
required=True)
parser.add_argument('--token', help="Bearer token for the target data-set",
required=True)
parser.add_argument('--timeout', help="Request timeout. Default: 5 seconds",
required=False, default=5, type=float)
parser.add_argument('--attempts', help="Number of times to attempt sending data. Default: 3",
required=False, default=3, type=int)
parser.add_argument('--failfast', help="Don't retry sending data",
required=False, default=False, action='store_true')
parser.add_argument('--sleep', help=argparse.SUPPRESS,
required=False, default=3, type=int)
parser.add_argument('file', help="File containing JSON to send", nargs='?',
type=argparse.FileType('r'),
default=input)
arguments = parser.parse_args(args)
if arguments.failfast:
arguments.attempts = 1
if no_piped_input(arguments):
parser.error("No input provided")
return arguments<|fim▁end|> | def no_piped_input(arguments):
inputs_ready, _, _ = select.select([arguments.file], [], [], 0)
return not bool(inputs_ready) |
<|file_name|>txindex.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test txindex generation and fetching
#
import time
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.script import *
from test_framework.mininode import *
import binascii
class TxIndexTest(BitcoinTestFramework):
def setup_chain(self):
print("Initializing test directory "+self.options.tmpdir)
initialize_chain_clean(self.options.tmpdir, 4)
def setup_network(self):
self.nodes = []
# Nodes 0/1 are "wallet" nodes
self.nodes.append(start_node(0, self.options.tmpdir, ["-debug"]))
self.nodes.append(start_node(1, self.options.tmpdir, ["-debug", "-txindex"]))
# Nodes 2/3 are used for testing
self.nodes.append(start_node(2, self.options.tmpdir, ["-debug", "-txindex"]))
self.nodes.append(start_node(3, self.options.tmpdir, ["-debug", "-txindex"]))
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[0], 2)
connect_nodes(self.nodes[0], 3)
self.is_network_split = False
self.sync_all()
def run_test(self):
print("Mining blocks...")
self.nodes[0].generate(105)
self.sync_all()
chain_height = self.nodes[1].getblockcount()
assert_equal(chain_height, 105)
print("Testing transaction index...")
privkey = "cU4zhap7nPJAWeMFu4j6jLrfPmqakDAzy8zn8Fhb3oEevdm4e5Lc"
address = "yeMpGzMj3rhtnz48XsfpB8itPHhHtgxLc3"
addressHash = binascii.unhexlify("C5E4FB9171C22409809A3E8047A29C83886E325D")
scriptPubKey = CScript([OP_DUP, OP_HASH160, addressHash, OP_EQUALVERIFY, OP_CHECKSIG])
unspent = self.nodes[0].listunspent()
tx = CTransaction()
amount = unspent[0]["amount"] * 100000000
tx.vin = [CTxIn(COutPoint(int(unspent[0]["txid"], 16), unspent[0]["vout"]))]
tx.vout = [CTxOut(amount, scriptPubKey)]
tx.rehash()<|fim▁hole|> txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], True)
self.nodes[0].generate(1)
self.sync_all()
# Check verbose raw transaction results
verbose = self.nodes[3].getrawtransaction(unspent[0]["txid"], 1)
assert_equal(verbose["vout"][0]["valueSat"], 5000000000);
assert_equal(verbose["vout"][0]["value"], 50);
print("Passed\n")
if __name__ == '__main__':
TxIndexTest().main()<|fim▁end|> |
signed_tx = self.nodes[0].signrawtransaction(binascii.hexlify(tx.serialize()).decode("utf-8")) |
<|file_name|>antenna_grab.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# script to measure save a bunch of VNA phase measurements while stepping beam numbers
# useful for characterizing the RF path (transmitter antenna port to receiver input) and looking for time delay differences
# requires ssh key for QNX box and VNA
# jon klein, jtklein@alaska.edu, mit license
# jef spaleta
from pylab import *
from vna_control import *
from csv_utils import *
import argparse, os, time, sys
SWEEP_CENTER = 15e6
SWEEP_SPAN = 20e6
SWEEP_POINTS = 1201
TX_STARTUP_DELAY = 2 # 20
BEAMS = 24
if __name__ == '__main__':
# setup arguement parser and parse arguements
parser = argparse.ArgumentParser()
parser.add_argument("--cal", action="count", help="run through calibration on VNA before taking measurements", default=0)
parser.add_argument("--vnaip", help="specify VNA ip address", default=VNAHOST)
parser.add_argument("--ddir", help="specify a directory to save the data in", default='adw_cable_short')
parser.add_argument("--avg", type=int, help="specify count to average", default=1)
parser.add_argument("--paths", type=int, help="specify number of paths to calibrate", default=20)
args = parser.parse_args()
# sanity check arguements
if args.avg < 1:
sys.exit("error: average count is less than 1")
if not os.path.exists(args.ddir):
sys.exit("error: data directory does not exist: %s" % (directory))
if args.paths < 1:
sys.exit("error: path count is less than 1")
# open connection with VNA
vna = lan_init(args.vnaip)
# preset VNA if calibrating
if args.cal:
vna_preset(vna)
# init VNA measurements
vna_init(vna, param='S22')
# configure VNA measurements (add smoothing to time delay channel, enable averaging)
vna_setspan(vna, SWEEP_SPAN, SWEEP_CENTER, SWEEP_POINTS)
vna_setave(vna,args.avg)
vna_enableave(vna,True)
vna_smoothapeture(vna,2,5.0)
vna_enablesmoothing(vna,2,True)
# calibrate VNA if run with --cal
if args.cal:
print 'calibrating VNA'
vna_through_cal(vna)
vna_trigger(vna, args.avg)
# setup csv data structure
csvdat = csv_data()
csvdat.sweep_count = SWEEP_POINTS
csvdat.ave_count = args.avg
csvdat.ave_enable = (args.avg > 1)
csvdat.smoothing_percent = 5
csvdat.smoothing_enable = True
csvdat.freqs = vna_readspan(vna)
csvdat.freq_start = min(csvdat.freqs)
csvdat.freq_end = max(csvdat.freqs)
# step through each path and measure phase, time delay, and magnitude at each beam setting<|fim▁hole|> csvdat.card = p
csvdat.beam = 0
vna_clearave(vna)
vna_trigger(vna, args.avg)
csvdat.tdelay = vna_readtimedelay(vna)
csvdat.ephase = vna_readextendedphase(vna)
csvdat.phase = vna_readphase(vna)
csvdat.mlog = vna_readmlog(vna)
write_csv(args.ddir, csvdat)
lan_close(vna)<|fim▁end|> | for p in range(args.paths):
p = int(raw_input('connect and enter a path number and then press enter to continue... '))
time.sleep(TX_STARTUP_DELAY) # wait for transmitter to warm up |
<|file_name|>stickyVoteButtons.user.js<|end_file_name|><|fim▁begin|>// ==UserScript==
// @name Sticky vote buttons
// @namespace http://stackexchange.com/users/4337810/
// @version 1.0
// @description Makes the vote buttons next to posts sticky whilst scrolling on that post
// @author ᔕᖺᘎᕊ (http://stackexchange.com/users/4337810/)
// @match *://*.stackexchange.com/*
// @match *://*.stackoverflow.com/*
// @match *://*.superuser.com/*
// @match *://*.serverfault.com/*
// @match *://*.askubuntu.com/*
// @match *://*.stackapps.com/*
// @match *://*.mathoverflow.net/*
// @require https://cdn.rawgit.com/EnzoMartin/Sticky-Element/master/jquery.stickyelement.js
// @grant none
// ==/UserScript==
$(document).ready(function() {
$(window).scroll(function(){
$(".votecell").each(function(){
var offset = 0;
if($(".topbar").css("position") == "fixed"){
offset = 34;
}
var vote = $(this).find(".vote");
if($(this).offset().top - $(window).scrollTop() + offset <= 0){
if($(this).offset().top + $(this).height() + offset - $(window).scrollTop() - vote.height() > 0){<|fim▁hole|> }
}else{
vote.css({position:"relative", left:0, top:0});
}
});
});
});<|fim▁end|> | vote.css({position:"fixed", left:$(this).offset().left, top:0 + offset});
}else{
vote.css({position:"relative", left:0, top:$(this).height()-vote.height()}); |
<|file_name|>readcsv.rs<|end_file_name|><|fim▁begin|>///
/// This example parses, sorts and groups the iris dataset
/// and does some simple manipulations.
///
/// Iterators and itertools functionality are used throughout.
///
///
use ndarray::Array;
use dataframe::DataFrame;
use util::traits::UtahNum;
use util::error::*;
use util::traits::Constructor;
use rustc_serialize::Decodable;
use csv;
pub trait ReadCSV<T>
where T: UtahNum + Decodable
{
fn read_csv(file: &'static str) -> Result<DataFrame<T>>;
}
impl<T> ReadCSV<T> for DataFrame<T>
where T: UtahNum + Decodable
{
fn read_csv(file: &'static str) -> Result<DataFrame<T>> {
let mut rdr = csv::Reader::from_file(file).unwrap();
let columns = rdr.headers().unwrap();
let (mut nrow, ncol) = (0, columns.len());
let mut v: Vec<T> = Vec::new();
for record in rdr.decode() {
nrow += 1;
let e: Vec<T> = record.unwrap();
v.extend(e.into_iter())
}<|fim▁hole|>}<|fim▁end|> |
let matrix = Array::from_shape_vec((nrow, ncol), v).unwrap();
DataFrame::new(matrix).columns(&columns[..])
} |
<|file_name|>test_geometric.py<|end_file_name|><|fim▁begin|>import numpy as np
from numpy.testing import (assert_equal, assert_array_almost_equal,
assert_raises)
from skimage.transform._geometric import _stackcopy
from skimage.transform._geometric import GeometricTransform
from skimage.transform import (estimate_transform, matrix_transform,
SimilarityTransform, AffineTransform,
ProjectiveTransform, PolynomialTransform,
PiecewiseAffineTransform)
SRC = np.array([
[-12.3705, -10.5075],
[-10.7865, 15.4305],
[8.6985, 10.8675],
[11.4975, -9.5715],
[7.8435, 7.4835],
[-5.3325, 6.5025],
[6.7905, -6.3765],
[-6.1695, -0.8235],
])
DST = np.array([
[0, 0],
[0, 5800],<|fim▁hole|> [4900, 5800],
[4900, 0],
[4479, 4580],
[1176, 3660],
[3754, 790],
[1024, 1931],
])
def test_stackcopy():
layers = 4
x = np.empty((3, 3, layers))
y = np.eye(3, 3)
_stackcopy(x, y)
for i in range(layers):
assert_array_almost_equal(x[..., i], y)
def test_estimate_transform():
for tform in ('similarity', 'affine', 'projective', 'polynomial'):
estimate_transform(tform, SRC[:2, :], DST[:2, :])
assert_raises(ValueError, estimate_transform, 'foobar',
SRC[:2, :], DST[:2, :])
def test_matrix_transform():
tform = AffineTransform(scale=(0.1, 0.5), rotation=2)
assert_equal(tform(SRC), matrix_transform(SRC, tform._matrix))
def test_similarity_estimation():
# exact solution
tform = estimate_transform('similarity', SRC[:2, :], DST[:2, :])
assert_array_almost_equal(tform(SRC[:2, :]), DST[:2, :])
assert_equal(tform._matrix[0, 0], tform._matrix[1, 1])
assert_equal(tform._matrix[0, 1], - tform._matrix[1, 0])
# over-determined
tform2 = estimate_transform('similarity', SRC, DST)
assert_array_almost_equal(tform2.inverse(tform2(SRC)), SRC)
assert_equal(tform2._matrix[0, 0], tform2._matrix[1, 1])
assert_equal(tform2._matrix[0, 1], - tform2._matrix[1, 0])
# via estimate method
tform3 = SimilarityTransform()
tform3.estimate(SRC, DST)
assert_array_almost_equal(tform3._matrix, tform2._matrix)
def test_similarity_init():
# init with implicit parameters
scale = 0.1
rotation = 1
translation = (1, 1)
tform = SimilarityTransform(scale=scale, rotation=rotation,
translation=translation)
assert_array_almost_equal(tform.scale, scale)
assert_array_almost_equal(tform.rotation, rotation)
assert_array_almost_equal(tform.translation, translation)
# init with transformation matrix
tform2 = SimilarityTransform(tform._matrix)
assert_array_almost_equal(tform2.scale, scale)
assert_array_almost_equal(tform2.rotation, rotation)
assert_array_almost_equal(tform2.translation, translation)
# test special case for scale if rotation=0
scale = 0.1
rotation = 0
translation = (1, 1)
tform = SimilarityTransform(scale=scale, rotation=rotation,
translation=translation)
assert_array_almost_equal(tform.scale, scale)
assert_array_almost_equal(tform.rotation, rotation)
assert_array_almost_equal(tform.translation, translation)
def test_affine_estimation():
# exact solution
tform = estimate_transform('affine', SRC[:3, :], DST[:3, :])
assert_array_almost_equal(tform(SRC[:3, :]), DST[:3, :])
# over-determined
tform2 = estimate_transform('affine', SRC, DST)
assert_array_almost_equal(tform2.inverse(tform2(SRC)), SRC)
# via estimate method
tform3 = AffineTransform()
tform3.estimate(SRC, DST)
assert_array_almost_equal(tform3._matrix, tform2._matrix)
def test_affine_init():
# init with implicit parameters
scale = (0.1, 0.13)
rotation = 1
shear = 0.1
translation = (1, 1)
tform = AffineTransform(scale=scale, rotation=rotation, shear=shear,
translation=translation)
assert_array_almost_equal(tform.scale, scale)
assert_array_almost_equal(tform.rotation, rotation)
assert_array_almost_equal(tform.shear, shear)
assert_array_almost_equal(tform.translation, translation)
# init with transformation matrix
tform2 = AffineTransform(tform._matrix)
assert_array_almost_equal(tform2.scale, scale)
assert_array_almost_equal(tform2.rotation, rotation)
assert_array_almost_equal(tform2.shear, shear)
assert_array_almost_equal(tform2.translation, translation)
def test_piecewise_affine():
tform = PiecewiseAffineTransform()
tform.estimate(SRC, DST)
# make sure each single affine transform is exactly estimated
assert_array_almost_equal(tform(SRC), DST)
assert_array_almost_equal(tform.inverse(DST), SRC)
def test_projective_estimation():
# exact solution
tform = estimate_transform('projective', SRC[:4, :], DST[:4, :])
assert_array_almost_equal(tform(SRC[:4, :]), DST[:4, :])
# over-determined
tform2 = estimate_transform('projective', SRC, DST)
assert_array_almost_equal(tform2.inverse(tform2(SRC)), SRC)
# via estimate method
tform3 = ProjectiveTransform()
tform3.estimate(SRC, DST)
assert_array_almost_equal(tform3._matrix, tform2._matrix)
def test_projective_init():
tform = estimate_transform('projective', SRC, DST)
# init with transformation matrix
tform2 = ProjectiveTransform(tform._matrix)
assert_array_almost_equal(tform2._matrix, tform._matrix)
def test_polynomial_estimation():
# over-determined
tform = estimate_transform('polynomial', SRC, DST, order=10)
assert_array_almost_equal(tform(SRC), DST, 6)
# via estimate method
tform2 = PolynomialTransform()
tform2.estimate(SRC, DST, order=10)
assert_array_almost_equal(tform2._params, tform._params)
def test_polynomial_init():
tform = estimate_transform('polynomial', SRC, DST, order=10)
# init with transformation parameters
tform2 = PolynomialTransform(tform._params)
assert_array_almost_equal(tform2._params, tform._params)
def test_polynomial_default_order():
tform = estimate_transform('polynomial', SRC, DST)
tform2 = estimate_transform('polynomial', SRC, DST, order=2)
assert_array_almost_equal(tform2._params, tform._params)
def test_polynomial_inverse():
assert_raises(Exception, PolynomialTransform().inverse, 0)
def test_union():
tform1 = SimilarityTransform(scale=0.1, rotation=0.3)
tform2 = SimilarityTransform(scale=0.1, rotation=0.9)
tform3 = SimilarityTransform(scale=0.1 ** 2, rotation=0.3 + 0.9)
tform = tform1 + tform2
assert_array_almost_equal(tform._matrix, tform3._matrix)
tform1 = AffineTransform(scale=(0.1, 0.1), rotation=0.3)
tform2 = SimilarityTransform(scale=0.1, rotation=0.9)
tform3 = SimilarityTransform(scale=0.1 ** 2, rotation=0.3 + 0.9)
tform = tform1 + tform2
assert_array_almost_equal(tform._matrix, tform3._matrix)
assert tform.__class__ == ProjectiveTransform
def test_geometric_tform():
tform = GeometricTransform()
assert_raises(NotImplementedError, tform, 0)
assert_raises(NotImplementedError, tform.inverse, 0)
assert_raises(NotImplementedError, tform.__add__, 0)
def test_invalid_input():
assert_raises(ValueError, ProjectiveTransform, np.zeros((2, 3)))
assert_raises(ValueError, AffineTransform, np.zeros((2, 3)))
assert_raises(ValueError, SimilarityTransform, np.zeros((2, 3)))
assert_raises(ValueError, AffineTransform,
matrix=np.zeros((2, 3)), scale=1)
assert_raises(ValueError, SimilarityTransform,
matrix=np.zeros((2, 3)), scale=1)
assert_raises(ValueError, PolynomialTransform, np.zeros((3, 3)))
if __name__ == "__main__":
from numpy.testing import run_module_suite
run_module_suite()<|fim▁end|> | |
<|file_name|>test_grafana.py<|end_file_name|><|fim▁begin|># Copyright (C) 2014 Nicolas Lamirault <nicolas.lamirault@gmail.com>
<|fim▁hole|># the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from hyperiontests import hyperion
from hyperiontests import settings
class TestGrafana(hyperion.HyperionTestCase):
def setUp(self):
super(TestGrafana, self).setUp()
self._host = "http://%s:%s" % (settings.HYPERION_HOST,
settings.HYPERION_WEB)
def test_can_retrieve_default_dashboard(self):
response = self.http_get("grafana/#/dashboard/file/default.json")
self.assertEqual(200, response.status_code)<|fim▁end|> | # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by |
<|file_name|>models.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from sqlalchemy import create_engine
from sqlalchemy import Column, Integer, String, UniqueConstraint
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
engine = create_engine('sqlite:///challenge.sqlite', echo=False)
Session = sessionmaker(bind=engine)
session = Session()
Base = declarative_base()
class Endereco(Base):
__tablename__ = "endereco"
#id = Column(Integer, primary_key=True)
logradouro = Column(String)
bairro = Column(String)
cidade = Column(String)
estado = Column(String)
cep = Column(String, primary_key=True)<|fim▁hole|>
def __repr__(self):
return "{}".format(self.cep)
Base.metadata.create_all(engine)<|fim▁end|> | __table_args__ = (UniqueConstraint('cep'),) |
<|file_name|>authentication.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
def requests_handshaker():
consumer_key = config.OAUTH_CONSUMER_KEY
consumer_secret = config.OAUTH_CONSUMER_SECRET
consumer_token = ConsumerToken(consumer_key, consumer_secret)
return Handshaker("https://meta.wikimedia.org/w/index.php", consumer_token)
def get_username(request):
handshaker = requests_handshaker()
if 'access_token_key' in request.session:
access_key = request.session['access_token_key'].encode('utf-8')
access_secret = request.session['access_token_secret'].encode('utf-8')
access_token = tokens.AccessToken(key=access_key, secret=access_secret)
return handshaker.identify(access_token)['username']
else:
return None<|fim▁end|> | from . import config
from django.shortcuts import render
from mwoauth import ConsumerToken, Handshaker, tokens |
<|file_name|>payment.braintree.providersettings.controller.js<|end_file_name|><|fim▁begin|>angular.module('merchello.plugins.braintree').controller('Merchello.Plugins.GatewayProviders.Dialogs.PaymentMethodAddEditController',
['$scope', 'braintreeProviderSettingsBuilder',
function($scope, braintreeProviderSettingsBuilder) {
$scope.providerSettings = {};
function init() {
var json = JSON.parse($scope.dialogData.provider.extendedData.getValue('braintreeProviderSettings'));
$scope.providerSettings = braintreeProviderSettingsBuilder.transform(json);
$scope.$watch(function () {
return $scope.providerSettings;
}, function (newValue, oldValue) {
$scope.dialogData.provider.extendedData.setValue('braintreeProviderSettings', angular.toJson(newValue));
}, true);
}<|fim▁hole|>
// initialize the controller
init();
}]);<|fim▁end|> | |
<|file_name|>icf.py<|end_file_name|><|fim▁begin|># Copyright 2008 Dan Smith <dsmith@danplanet.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import struct
import re
import time
import logging
from chirp import chirp_common, errors, util, memmap
from chirp.settings import RadioSetting, RadioSettingGroup, \
RadioSettingValueBoolean, RadioSettings
LOG = logging.getLogger(__name__)
CMD_CLONE_OUT = 0xE2
CMD_CLONE_IN = 0xE3
CMD_CLONE_DAT = 0xE4
CMD_CLONE_END = 0xE5
SAVE_PIPE = None
class IcfFrame:
"""A single ICF communication frame"""
src = 0
dst = 0
cmd = 0
payload = ""
def __str__(self):
addrs = {0xEE: "PC",
0xEF: "Radio"}
cmds = {0xE0: "ID",
0xE1: "Model",
0xE2: "Clone out",
0xE3: "Clone in",
0xE4: "Clone data",
0xE5: "Clone end",
0xE6: "Clone result"}
return "%s -> %s [%s]:\n%s" % (addrs[self.src], addrs[self.dst],
cmds[self.cmd],
util.hexprint(self.payload))
def __init__(self):
pass
def parse_frame_generic(data):
"""Parse an ICF frame of unknown type from the beginning of @data"""
frame = IcfFrame()
frame.src = ord(data[2])
frame.dst = ord(data[3])
frame.cmd = ord(data[4])
try:
end = data.index("\xFD")
except ValueError:
return None, data
frame.payload = data[5:end]
return frame, data[end+1:]
class RadioStream:
"""A class to make reading a stream of IcfFrames easier"""
def __init__(self, pipe):
self.pipe = pipe
self.data = ""
def _process_frames(self):
if not self.data.startswith("\xFE\xFE"):
LOG.error("Out of sync with radio:\n%s" % util.hexprint(self.data))
raise errors.InvalidDataError("Out of sync with radio")
elif len(self.data) < 5:
return [] # Not enough data for a full frame
frames = []
while self.data:
try:
cmd = ord(self.data[4])
except IndexError:
break # Out of data
try:
frame, rest = parse_frame_generic(self.data)
if not frame:
break
elif frame.src == 0xEE and frame.dst == 0xEF:
# PC echo, ignore
pass
else:
frames.append(frame)
self.data = rest
except errors.InvalidDataError, e:
LOG.error("Failed to parse frame (cmd=%i): %s" % (cmd, e))
return []<|fim▁hole|>
return frames
def get_frames(self, nolimit=False):
"""Read any pending frames from the stream"""
while True:
_data = self.pipe.read(64)
if not _data:
break
else:
self.data += _data
if not nolimit and len(self.data) > 128 and "\xFD" in self.data:
break # Give us a chance to do some status
if len(self.data) > 1024:
break # Avoid an endless loop of chewing garbage
if not self.data:
return []
return self._process_frames()
def get_model_data(radio, mdata="\x00\x00\x00\x00"):
"""Query the @radio for its model data"""
send_clone_frame(radio, 0xe0, mdata, raw=True)
stream = RadioStream(radio.pipe)
frames = stream.get_frames()
if len(frames) != 1:
raise errors.RadioError("Unexpected response from radio")
return frames[0].payload
def get_clone_resp(pipe, length=None, max_count=None):
"""Read the response to a clone frame"""
def exit_criteria(buf, length, cnt, max_count):
"""Stop reading a clone response if we have enough data or encounter
the end of a frame"""
if max_count is not None:
if cnt >= max_count:
return True
if length is None:
return buf.endswith("\xfd")
else:
return len(buf) == length
resp = ""
cnt = 0
while not exit_criteria(resp, length, cnt, max_count):
resp += pipe.read(1)
cnt += 1
return resp
def send_clone_frame(radio, cmd, data, raw=False, checksum=False):
"""Send a clone frame with @cmd and @data to the @radio"""
payload = radio.get_payload(data, raw, checksum)
frame = "\xfe\xfe\xee\xef%s%s\xfd" % (chr(cmd), payload)
if SAVE_PIPE:
LOG.debug("Saving data...")
SAVE_PIPE.write(frame)
# LOG.debug("Sending:\n%s" % util.hexprint(frame))
# LOG.debug("Sending:\n%s" % util.hexprint(hed[6:]))
if cmd == 0xe4:
# Uncomment to avoid cloning to the radio
# return frame
pass
radio.pipe.write(frame)
if radio.MUNCH_CLONE_RESP:
# Do max 2*len(frame) read(1) calls
get_clone_resp(radio.pipe, max_count=2*len(frame))
return frame
def process_data_frame(radio, frame, _mmap):
"""Process a data frame, adding the payload to @_mmap"""
_data = radio.process_frame_payload(frame.payload)
# Checksum logic added by Rick DeWitt, 9/2019, issue # 7075
if len(_mmap) >= 0x10000: # This map size not tested for checksum
saddr, = struct.unpack(">I", _data[0:4])
length, = struct.unpack("B", _data[4])
data = _data[5:5+length]
sumc, = struct.unpack("B", _data[5+length])
addr1, = struct.unpack("B", _data[0])
addr2, = struct.unpack("B", _data[1])
addr3, = struct.unpack("B", _data[2])
addr4, = struct.unpack("B", _data[3])
else: # But this one has been tested for raw mode radio (IC-2730)
saddr, = struct.unpack(">H", _data[0:2])
length, = struct.unpack("B", _data[2])
data = _data[3:3+length]
sumc, = struct.unpack("B", _data[3+length])
addr1, = struct.unpack("B", _data[0])
addr2, = struct.unpack("B", _data[1])
addr3 = 0
addr4 = 0
cs = addr1 + addr2 + addr3 + addr4 + length
for byte in data:
cs += ord(byte)
vx = ((cs ^ 0xFFFF) + 1) & 0xFF
if sumc != vx:
LOG.error("Bad checksum in address %04X frame: %02x "
"calculated, %02x sent!" % (saddr, vx, sumc))
raise errors.InvalidDataError(
"Checksum error in download! "
"Try disabling High Speed Clone option in Settings.")
try:
_mmap[saddr] = data
except IndexError:
LOG.error("Error trying to set %i bytes at %05x (max %05x)" %
(bytes, saddr, len(_mmap)))
return saddr, saddr + length
def start_hispeed_clone(radio, cmd):
"""Send the magic incantation to the radio to go fast"""
buf = ("\xFE" * 20) + \
"\xEE\xEF\xE8" + \
radio.get_model() + \
"\x00\x00\x02\x01\xFD"
LOG.debug("Starting HiSpeed:\n%s" % util.hexprint(buf))
radio.pipe.write(buf)
radio.pipe.flush()
resp = radio.pipe.read(128)
LOG.debug("Response:\n%s" % util.hexprint(resp))
LOG.info("Switching to 38400 baud")
radio.pipe.baudrate = 38400
buf = ("\xFE" * 14) + \
"\xEE\xEF" + \
chr(cmd) + \
radio.get_model()[:3] + \
"\x00\xFD"
LOG.debug("Starting HiSpeed Clone:\n%s" % util.hexprint(buf))
radio.pipe.write(buf)
radio.pipe.flush()
def _clone_from_radio(radio):
md = get_model_data(radio)
if md[0:4] != radio.get_model():
LOG.info("This model: %s" % util.hexprint(md[0:4]))
LOG.info("Supp model: %s" % util.hexprint(radio.get_model()))
raise errors.RadioError("I can't talk to this model")
if radio.is_hispeed():
start_hispeed_clone(radio, CMD_CLONE_OUT)
else:
send_clone_frame(radio, CMD_CLONE_OUT, radio.get_model(), raw=True)
LOG.debug("Sent clone frame")
stream = RadioStream(radio.pipe)
addr = 0
_mmap = memmap.MemoryMap(chr(0x00) * radio.get_memsize())
last_size = 0
while True:
frames = stream.get_frames()
if not frames:
break
for frame in frames:
if frame.cmd == CMD_CLONE_DAT:
src, dst = process_data_frame(radio, frame, _mmap)
if last_size != (dst - src):
LOG.debug("ICF Size change from %i to %i at %04x" %
(last_size, dst - src, src))
last_size = dst - src
if addr != src:
LOG.debug("ICF GAP %04x - %04x" % (addr, src))
addr = dst
elif frame.cmd == CMD_CLONE_END:
LOG.debug("End frame (%i):\n%s" %
(len(frame.payload), util.hexprint(frame.payload)))
LOG.debug("Last addr: %04x" % addr)
if radio.status_fn:
status = chirp_common.Status()
status.msg = "Cloning from radio"
status.max = radio.get_memsize()
status.cur = addr
radio.status_fn(status)
return _mmap
def clone_from_radio(radio):
"""Do a full clone out of the radio's memory"""
try:
return _clone_from_radio(radio)
except Exception, e:
raise errors.RadioError("Failed to communicate with the radio: %s" % e)
def send_mem_chunk(radio, start, stop, bs=32):
"""Send a single chunk of the radio's memory from @start-@stop"""
_mmap = radio.get_mmap()
status = chirp_common.Status()
status.msg = "Cloning to radio"
status.max = radio.get_memsize()
for i in range(start, stop, bs):
if i + bs < stop:
size = bs
else:
size = stop - i
if radio.get_memsize() >= 0x10000:
chunk = struct.pack(">IB", i, size)
else:
chunk = struct.pack(">HB", i, size)
chunk += _mmap[i:i+size]
send_clone_frame(radio,
CMD_CLONE_DAT,
chunk,
raw=False,
checksum=True)
if radio.status_fn:
status.cur = i+bs
radio.status_fn(status)
return True
def _clone_to_radio(radio):
global SAVE_PIPE
# Uncomment to save out a capture of what we actually write to the radio
# SAVE_PIPE = file("pipe_capture.log", "w", 0)
md = get_model_data(radio)
if md[0:4] != radio.get_model():
raise errors.RadioError("I can't talk to this model")
# This mimics what the Icom software does, but isn't required and just
# takes longer
# md = get_model_data(radio, mdata=md[0:2]+"\x00\x00")
# md = get_model_data(radio, mdata=md[0:2]+"\x00\x00")
stream = RadioStream(radio.pipe)
if radio.is_hispeed():
start_hispeed_clone(radio, CMD_CLONE_IN)
else:
send_clone_frame(radio, CMD_CLONE_IN, radio.get_model(), raw=True)
frames = []
for start, stop, bs in radio.get_ranges():
if not send_mem_chunk(radio, start, stop, bs):
break
frames += stream.get_frames()
send_clone_frame(radio, CMD_CLONE_END, radio.get_endframe(), raw=True)
if SAVE_PIPE:
SAVE_PIPE.close()
SAVE_PIPE = None
for i in range(0, 10):
try:
frames += stream.get_frames(True)
result = frames[-1]
except IndexError:
LOG.debug("Waiting for clone result...")
time.sleep(0.5)
if len(frames) == 0:
raise errors.RadioError("Did not get clone result from radio")
return result.payload[0] == '\x00'
def clone_to_radio(radio):
"""Initiate a full memory clone out to @radio"""
try:
return _clone_to_radio(radio)
except Exception, e:
logging.exception("Failed to communicate with the radio")
raise errors.RadioError("Failed to communicate with the radio: %s" % e)
def convert_model(mod_str):
"""Convert an ICF-style model string into what we get from the radio"""
data = ""
for i in range(0, len(mod_str), 2):
hexval = mod_str[i:i+2]
intval = int(hexval, 16)
data += chr(intval)
return data
def convert_data_line(line):
"""Convert an ICF data line to raw memory format"""
if line.startswith("#"):
return ""
line = line.strip()
if len(line) == 38:
# Small memory (< 0x10000)
size = int(line[4:6], 16)
data = line[6:]
else:
# Large memory (>= 0x10000)
size = int(line[8:10], 16)
data = line[10:]
_mmap = ""
i = 0
while i < (size * 2):
try:
val = int("%s%s" % (data[i], data[i+1]), 16)
i += 2
_mmap += struct.pack("B", val)
except ValueError, e:
LOG.debug("Failed to parse byte: %s" % e)
break
return _mmap
def read_file(filename):
"""Read an ICF file and return the model string and memory data"""
f = file(filename)
mod_str = f.readline()
dat = f.readlines()
model = convert_model(mod_str.strip())
_mmap = ""
for line in dat:
if not line.startswith("#"):
_mmap += convert_data_line(line)
return model, memmap.MemoryMap(_mmap)
def is_9x_icf(filename):
"""Returns True if @filename is an IC9x ICF file"""
f = file(filename)
mdata = f.read(8)
f.close()
return mdata in ["30660000", "28880000"]
def is_icf_file(filename):
"""Returns True if @filename is an ICF file"""
f = file(filename)
data = f.readline()
data += f.readline()
f.close()
data = data.replace("\n", "").replace("\r", "")
return bool(re.match("^[0-9]{8}#", data))
class IcomBank(chirp_common.Bank):
"""A bank that works for all Icom radios"""
# Integral index of the bank (not to be confused with per-memory
# bank indexes
index = 0
class IcomNamedBank(IcomBank):
"""A bank with an adjustable name"""
def set_name(self, name):
"""Set the name of the bank"""
pass
class IcomBankModel(chirp_common.BankModel):
"""Icom radios all have pretty much the same simple bank model. This
central implementation can, with a few icom-specific radio interfaces
serve most/all of them"""
def get_num_mappings(self):
return self._radio._num_banks
def get_mappings(self):
banks = []
for i in range(0, self._radio._num_banks):
index = chr(ord("A") + i)
bank = self._radio._bank_class(self, index, "BANK-%s" % index)
bank.index = i
banks.append(bank)
return banks
def add_memory_to_mapping(self, memory, bank):
self._radio._set_bank(memory.number, bank.index)
def remove_memory_from_mapping(self, memory, bank):
if self._radio._get_bank(memory.number) != bank.index:
raise Exception("Memory %i not in bank %s. Cannot remove." %
(memory.number, bank))
self._radio._set_bank(memory.number, None)
def get_mapping_memories(self, bank):
memories = []
for i in range(*self._radio.get_features().memory_bounds):
if self._radio._get_bank(i) == bank.index:
memories.append(self._radio.get_memory(i))
return memories
def get_memory_mappings(self, memory):
index = self._radio._get_bank(memory.number)
if index is None:
return []
else:
return [self.get_mappings()[index]]
class IcomIndexedBankModel(IcomBankModel,
chirp_common.MappingModelIndexInterface):
"""Generic bank model for Icom radios with indexed banks"""
def get_index_bounds(self):
return self._radio._bank_index_bounds
def get_memory_index(self, memory, bank):
return self._radio._get_bank_index(memory.number)
def set_memory_index(self, memory, bank, index):
if bank not in self.get_memory_mappings(memory):
raise Exception("Memory %i is not in bank %s" % (memory.number,
bank))
if index not in range(*self._radio._bank_index_bounds):
raise Exception("Invalid index")
self._radio._set_bank_index(memory.number, index)
def get_next_mapping_index(self, bank):
indexes = []
for i in range(*self._radio.get_features().memory_bounds):
if self._radio._get_bank(i) == bank.index:
indexes.append(self._radio._get_bank_index(i))
for i in range(0, 256):
if i not in indexes:
return i
raise errors.RadioError("Out of slots in this bank")
def compute_checksum(data):
cs = 0
for byte in data:
cs += ord(byte)
return ((cs ^ 0xFFFF) + 1) & 0xFF
class IcomCloneModeRadio(chirp_common.CloneModeRadio):
"""Base class for Icom clone-mode radios"""
VENDOR = "Icom"
BAUDRATE = 9600
# Ideally, the driver should read clone response after each clone frame
# is sent, but for some reason it hasn't behaved this way for years.
# So not to break the existing tested drivers the MUNCH_CLONE_RESP flag
# was added. It's False by default which brings the old behavior,
# i.e. clone response is not read. The expectation is that new Icom
# drivers will use MUNCH_CLONE_RESP = True and old drivers will be
# gradually migrated to this. Once all Icom drivers will use
# MUNCH_CLONE_RESP = True, this flag will be removed.
MUNCH_CLONE_RESP = False
_model = "\x00\x00\x00\x00" # 4-byte model string
_endframe = "" # Model-unique ending frame
_ranges = [] # Ranges of the mmap to send to the radio
_num_banks = 10 # Most simple Icoms have 10 banks, A-J
_bank_index_bounds = (0, 99)
_bank_class = IcomBank
_can_hispeed = False
@classmethod
def is_hispeed(cls):
"""Returns True if the radio supports hispeed cloning"""
return cls._can_hispeed
@classmethod
def get_model(cls):
"""Returns the Icom model data for this radio"""
return cls._model
@classmethod
def get_endframe(cls):
"""Returns the magic clone end frame for this radio"""
return cls._endframe
@classmethod
def get_ranges(cls):
"""Returns the ranges this radio likes to have in a clone"""
return cls._ranges
def process_frame_payload(self, payload):
"""Convert BCD-encoded data to raw"""
bcddata = payload
data = ""
i = 0
while i+1 < len(bcddata):
try:
val = int("%s%s" % (bcddata[i], bcddata[i+1]), 16)
i += 2
data += struct.pack("B", val)
except ValueError, e:
LOG.error("Failed to parse byte: %s" % e)
break
return data
def get_payload(self, data, raw, checksum):
"""Returns the data with optional checksum BCD-encoded for the radio"""
if raw:
return data
payload = ""
for byte in data:
payload += "%02X" % ord(byte)
if checksum:
payload += "%02X" % compute_checksum(data)
return payload
def sync_in(self):
self._mmap = clone_from_radio(self)
self.process_mmap()
def sync_out(self):
clone_to_radio(self)
def get_bank_model(self):
rf = self.get_features()
if rf.has_bank:
if rf.has_bank_index:
return IcomIndexedBankModel(self)
else:
return IcomBankModel(self)
else:
return None
# Icom-specific bank routines
def _get_bank(self, loc):
"""Get the integral bank index of memory @loc, or None"""
raise Exception("Not implemented")
def _set_bank(self, loc, index):
"""Set the integral bank index of memory @loc to @index, or
no bank if None"""
raise Exception("Not implemented")
def get_settings(self):
return make_speed_switch_setting(self)
def set_settings(self, settings):
return honor_speed_switch_setting(self, settings)
def flip_high_order_bit(data):
return [chr(ord(d) ^ 0x80) for d in list(data)]
def escape_raw_byte(byte):
"""Escapes a raw byte for sending to the radio"""
# Certain bytes are used as control characters to the radio, so if one of
# these bytes is present in the stream to the radio, it gets escaped as
# 0xff followed by (byte & 0x0f)
if ord(byte) > 0xf9:
return "\xff%s" % (chr(ord(byte) & 0xf))
return byte
def unescape_raw_bytes(escaped_data):
"""Unescapes raw bytes from the radio."""
data = ""
i = 0
while i < len(escaped_data):
byte = escaped_data[i]
if byte == '\xff':
if i + 1 >= len(escaped_data):
raise errors.InvalidDataError(
"Unexpected escape character at end of data")
i += 1
byte = chr(0xf0 | ord(escaped_data[i]))
data += byte
i += 1
return data
class IcomRawCloneModeRadio(IcomCloneModeRadio):
"""Subclass for Icom clone-mode radios using the raw data protocol."""
def process_frame_payload(self, payload):
"""Payloads from a raw-clone-mode radio are already in raw format."""
return unescape_raw_bytes(payload)
def get_payload(self, data, raw, checksum):
"""Returns the data with optional checksum in raw format."""
if checksum:
cs = chr(compute_checksum(data))
else:
cs = ""
payload = "%s%s" % (data, cs)
# Escape control characters.
escaped_payload = [escape_raw_byte(b) for b in payload]
return "".join(escaped_payload)
def sync_in(self):
# The radio returns all the bytes with the high-order bit flipped.
_mmap = clone_from_radio(self)
_mmap = flip_high_order_bit(_mmap.get_packed())
self._mmap = memmap.MemoryMap(_mmap)
self.process_mmap()
def get_mmap(self):
_data = flip_high_order_bit(self._mmap.get_packed())
return memmap.MemoryMap(_data)
class IcomLiveRadio(chirp_common.LiveRadio):
"""Base class for an Icom Live-mode radio"""
VENDOR = "Icom"
BAUD_RATE = 38400
_num_banks = 26 # Most live Icoms have 26 banks, A-Z
_bank_index_bounds = (0, 99)
_bank_class = IcomBank
def get_bank_model(self):
rf = self.get_features()
if rf.has_bank:
if rf.has_bank_index:
return IcomIndexedBankModel(self)
else:
return IcomBankModel(self)
else:
return None
def make_speed_switch_setting(radio):
if not radio.__class__._can_hispeed:
return {}
drvopts = RadioSettingGroup("drvopts", "Driver Options")
top = RadioSettings(drvopts)
rs = RadioSetting("drv_clone_speed", "Use Hi-Speed Clone",
RadioSettingValueBoolean(radio._can_hispeed))
drvopts.append(rs)
return top
def honor_speed_switch_setting(radio, settings):
for element in settings:
if element.get_name() == "drvopts":
return honor_speed_switch_setting(radio, element)
if element.get_name() == "drv_clone_speed":
radio.__class__._can_hispeed = element.value.get_value()
return<|fim▁end|> | |
<|file_name|>CONSTANTS_DIRECTIONS.java<|end_file_name|><|fim▁begin|>package br.com.tosin.ssd.utils;
/**
* Created by roger on 11/03/17.
*/
public class CONSTANTS_DIRECTIONS {<|fim▁hole|> public static final String EAST = "E";
public static final String NORTHWEST = "NW";
public static final String NORTHEAST = "NE";
public static final String SOUTH_WEST = "SW";
public static final String SOUTHEAST = "SE";
}<|fim▁end|> |
public static final String NORTH = "N";
public static final String SOUTH = "S";
public static final String WEST = "W"; |
<|file_name|>PublicDnsPropertiesMutable.cpp<|end_file_name|><|fim▁begin|>/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/servicediscovery/model/PublicDnsPropertiesMutable.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <utility>
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
namespace Aws
{
namespace ServiceDiscovery
{
namespace Model
{
PublicDnsPropertiesMutable::PublicDnsPropertiesMutable() :
m_sOAHasBeenSet(false)
{
}
PublicDnsPropertiesMutable::PublicDnsPropertiesMutable(JsonView jsonValue) :
m_sOAHasBeenSet(false)
{
*this = jsonValue;
}
PublicDnsPropertiesMutable& PublicDnsPropertiesMutable::operator =(JsonView jsonValue)
{
if(jsonValue.ValueExists("SOA"))
{
m_sOA = jsonValue.GetObject("SOA");
m_sOAHasBeenSet = true;
}
return *this;
}
JsonValue PublicDnsPropertiesMutable::Jsonize() const
{
JsonValue payload;
if(m_sOAHasBeenSet)
{
payload.WithObject("SOA", m_sOA.Jsonize());
}
return payload;
}
<|fim▁hole|>} // namespace Aws<|fim▁end|> | } // namespace Model
} // namespace ServiceDiscovery |
<|file_name|>config.js<|end_file_name|><|fim▁begin|>/**
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
var path = require('path'),
fs = require('fs'),
url = require('url'),
shell = require('shelljs');
// Map of project_root -> JSON
var configCache = {};
var autoPersist = true;
/**
* 将opts中的属性值以json格式添加到<proj_root>/.xface/config.json中
* config.json包含的属性主要有id, name, lib, dev_type。id和name分别为工程的id和名称,
* dev_type用于标识是内部项目开发还是外部开发者使用('internal'表示内部项目开发,不存在或者为空时为外部使用)
* @param {String} project_root
* @param {Object} opts
*/
function config(project_root, opts) {
var json = config.read(project_root);
for (var p in opts) {
json[p] = opts[p];
}
if (autoPersist) {
config.write(project_root, json);
} else {
configCache[project_root] = JSON.stringify(json);
}
return json;
};
config.setAutoPersist = function(value) {
autoPersist = value;
};
config.read = function get_config(project_root) {
var data = configCache[project_root];
if (data === undefined) {
var configPath = path.join(project_root, '.xface', 'config.json');
if (!fs.existsSync(configPath)) {
data = '{}';
} else {
data = fs.readFileSync(configPath, 'utf-8');
}
}
configCache[project_root] = data;
return JSON.parse(data);<|fim▁hole|>
config.write = function set_config(project_root, json) {
var configPath = path.join(project_root, '.xface', 'config.json');
var contents = JSON.stringify(json, null, 4);
configCache[project_root] = contents;
// Don't write the file for an empty config.
if (contents != '{}' || !fs.existsSync(configPath)) {
shell.mkdir('-p', path.join(project_root, '.xface'));
fs.writeFileSync(configPath, contents, 'utf-8');
}
return json;
};
config.has_custom_path = function(project_root, platform) {
var json = config.read(project_root);
if (json.lib && json.lib[platform]) {
var uri = url.parse(json.lib[platform].uri);
if (!(uri.protocol)) return uri.path;
else if (uri.protocol && uri.protocol[1] ==':') return uri.href;
}
return false;
};
/**
* 判断指定工程是否为内部开发使用的工程
* @param {String} project_root 工程根路径
*/
config.internalDev = function(project_root) {
var json = config.read(project_root);
return json.dev_type === 'internal';
};
module.exports = config;<|fim▁end|> | }; |
<|file_name|>book.service.ts<|end_file_name|><|fim▁begin|>import {Injectable} from '@angular/core';
import {Headers, Http, Response} from '@angular/http';
import 'rxjs/add/operator/toPromise';
import {Book} from './../models/book';
@Injectable()
export class BookService {
private booksUrl: string = 'api/books';
constructor(private http: Http) {}
getBooks(): Promise<Book[]> {<|fim▁hole|> return this.http.get(this.booksUrl)
.toPromise()
.then(response => response.json())
.catch(this.handleError);
}
getBook(id: string) {
return this.http.get(`${this.booksUrl}/${id}`)
.toPromise()
.then(response => response.json())
.catch(this.handleError);
}
private handleError(error: any) {
console.error('An error occurred', error);
return Promise.reject(error.message || error);
}
}<|fim▁end|> | |
<|file_name|>selection_behavior.rs<|end_file_name|><|fim▁begin|>use crate::{api::prelude::*, proc_macros::*};
/// Used for selection.
pub enum SelectionAction {
ToggleSelection,
}
/// The `SelectionBehaviorState` handles the `SelectionBehavior` widget.
#[derive(Default, AsAny)]
pub struct SelectionBehaviorState {
target: Entity,<|fim▁hole|> self.target = (*ctx.widget().get::<u32>("target")).into();
toggle_flag("selected", &mut ctx.get_widget(self.target));
ctx.get_widget(self.target).update(false);
}
fn messages(
&mut self,
mut messages: MessageReader,
_registry: &mut Registry,
ctx: &mut Context,
) {
for message in messages.read::<SelectionAction>() {
match message {
SelectionAction::ToggleSelection => {
let selected = *ctx.get_widget(self.target).get::<bool>("selected");
ctx.get_widget(self.target).set("selected", !selected);
toggle_flag("selected", &mut ctx.get_widget(self.target));
ctx.get_widget(self.target).update(false);
}
};
}
}
}
widget!(
/// The `SelectionBehavior` widget will take care to handle the actions,
/// that should be triggered if text regions are marked or selected.
///
/// **style:** `check_box`
SelectionBehavior<SelectionBehaviorState>: MouseHandler {
/// Sets or shares the target of the behavior.
target: u32,
/// Sets or shares the selected property.
selected: bool,
/// Sets the parent id.
parent: u32
}
);
impl Template for SelectionBehavior {
fn template(self, id: Entity, _: &mut BuildContext) -> Self {
self.name("SelectionBehavior")
.selected(true)
.on_click(move |ctx, _| {
ctx.send_message(SelectionAction::ToggleSelection, id);
false
})
}
}<|fim▁end|> | }
impl State for SelectionBehaviorState {
fn init(&mut self, _: &mut Registry, ctx: &mut Context) { |
<|file_name|>PointGeometry.java<|end_file_name|><|fim▁begin|>/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specic language governing permissions and
* limitations under the License.
*/
package app.metatron.discovery.common.geospatial.geojson;
public class PointGeometry implements GeoJsonGeometry {
private double[] coordinates;
private double[] bbox;
public PointGeometry() {
}
public PointGeometry(double[] coordinates) {
this.coordinates = coordinates;
}
public double[] getCoordinates() {
return coordinates;<|fim▁hole|> }
public double[] getBbox() {
return bbox;
}
}<|fim▁end|> | |
<|file_name|>TicketSelectionPage.java<|end_file_name|><|fim▁begin|>package task03.pages;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.support.FindBy;
import org.openqa.selenium.support.ui.ExpectedConditions;
public class TicketSelectionPage extends Page {
public TicketSelectionPage(PageManager pages) {
super(pages);
}
@FindBy(xpath = ".//*[@id='fareRowContainer_0']/tbody/tr[2]/td[2]")
private WebElement firstTicket;
@FindBy(xpath = ".//*[@id='fareRowContainer_0']/tbody/tr[2]/td[2]")
private WebElement secondTicket;
@FindBy(id = "tripSummarySubmitBtn")
private WebElement submitButton;
public void select2Tickets() {<|fim▁hole|> wait.until(ExpectedConditions.elementToBeClickable(firstTicket));
firstTicket.click();
wait.until(ExpectedConditions.elementToBeClickable(secondTicket));
secondTicket.click();
wait.until(ExpectedConditions.elementToBeClickable(submitButton));
submitButton.submit();
}
}<|fim▁end|> | |
<|file_name|>test_server.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -- Content-Encoding: UTF-8 --
"""
Tests the pooled server
:license: Apache License 2.0
"""
# JSON-RPC library
from jsonrpclib import ServerProxy
from jsonrpclib.SimpleJSONRPCServer import PooledJSONRPCServer
from jsonrpclib.threadpool import ThreadPool
<|fim▁hole|>import random
import threading
import unittest
# ------------------------------------------------------------------------------
def add(a, b):
return a+b
class PooledServerTests(unittest.TestCase):
"""
These tests verify that the pooled server works correctly
"""
def test_default_pool(self, pool=None):
"""
Tests the default pool
"""
# Setup server
server = PooledJSONRPCServer(("localhost", 0), thread_pool=pool)
server.register_function(add)
# Serve in a thread
thread = threading.Thread(target=server.serve_forever)
thread.daemon = True
thread.start()
# Find its port
port = server.socket.getsockname()[1]
# Make the client
client = ServerProxy("http://localhost:{0}".format(port))
# Check calls
for _ in range(10):
a, b = random.random(), random.random()
result = client.add(a, b)
self.assertEqual(result, a+b)
# Close server
server.server_close()
thread.join()
def test_custom_pool(self):
"""
Tests the ability to have a custom pool
"""
# Setup the pool
pool = ThreadPool(2)
pool.start()
self.test_default_pool(pool)<|fim▁end|> | # Standard library |
<|file_name|>abduction.py<|end_file_name|><|fim▁begin|>'''abduction.py
Base functionality for logical abduction using a knowledge base of definite clauses
Andrew S. Gordon
'''
import itertools
from . import parse
from . import unify
def abduction(obs, kb, maxdepth, skolemize = True):
'''Logical abduction: returns a list of all sets of assumptions that entail the observations given the kb'''
indexed_kb = index_by_consequent_predicate(kb)
res = []
listoflists = [and_or_leaflists([ob], indexed_kb, maxdepth) for ob in obs]
for u in itertools.product(*listoflists):
u = list(itertools.chain.from_iterable(u))
res.extend(crunch(u))
if skolemize:
return [unify.skolemize(r) for r in res]
else:
return res
def index_by_consequent_predicate(kb):
res = {}
for dc in kb:
predicate = parse.consequent(dc)[0]
if predicate in res:
res[predicate].append(dc)<|fim▁hole|> return res
def and_or_leaflists(remaining, indexed_kb, depth, antecedents = [], assumptions = []):
'''Returns list of all entailing sets of leafs in the and-or backchaining tree'''
if depth == 0 and antecedents: # fail
return [] # (empty) list of lists
elif not remaining: # done with this level
if not antecedents: # found one
return [assumptions] # list of lists
else:
return and_or_leaflists(antecedents, indexed_kb, depth - 1, [], assumptions)
else: # more to go on this level
literal = remaining[0] # first of remaining
predicate = literal[0]
if predicate not in indexed_kb:
return and_or_leaflists(remaining[1:], indexed_kb, depth, antecedents, [literal] + assumptions) # shift literal to assumptions
else:
revisions = []
for rule in indexed_kb[predicate]: # indexed by predicate of literal
theta = unify.unify(literal, parse.consequent(rule))
if theta != None:
if depth == 0: # no depth for revision
return [] # (empty) list of lists
revisions.append([unify.subst(theta, remaining[1:]), # new remaining with substitutions
indexed_kb,
depth,
unify.standardize(unify.subst(theta, parse.antecedent(rule))) +
unify.subst(theta, antecedents), # new antecedents with substitutions
unify.subst(theta, assumptions)]) # new assumptions with substitutions
return itertools.chain(*[and_or_leaflists(*rev) for rev in revisions]) # list of lists (if any)
def crunch(conjunction):
'''Returns all possible ways that literals in a conjunction could be unified'''
return [k for k,v in itertools.groupby(sorted(cruncher(conjunction, 0)))] # dedupe solutions
def cruncher(conjunction, idx = 0):
if idx >= len(conjunction) - 1: # last one
return [[k for k,v in itertools.groupby(sorted(conjunction))]] # dedupe literals in solution
else:
res = []
for subsequent in range(idx + 1,len(conjunction)):
theta = unify.unify(conjunction[idx], conjunction[subsequent])
if theta:
new_conjunction = unify.subst(theta,
conjunction[0:subsequent] +
conjunction[(subsequent + 1):len(conjunction)])
res.extend(cruncher(new_conjunction, idx))
res.extend(cruncher(conjunction, idx + 1))
return res<|fim▁end|> | else:
res[predicate] = [dc] |
<|file_name|>mooedit.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
The moo Editor
~~~~~~~~~~~~~~
:copyright: 2005-2008 by The PIDA Project
:license: GPL 2 or later (see README/COPYING/LICENSE)
"""
# Standard Libs
import os
import gtk
import gobject
import re
from gtk import gdk
# UGLY UGLY workarround as suggested by muntyan_
# this will be changed someday when therue will be a correct
# api for this.
from pida.core.environment import home, workspace_name
SYS_DATA = os.environ.get("XDG_DATA_DIRS",
"/usr/share:/usr/local/share")
MOO_DATA_DIRS=os.pathsep.join((
str(home()/'moo'),
os.path.join(os.path.dirname(__file__), "shared"),
os.pathsep.join([os.path.join(x, "moo")
for x in SYS_DATA.split(os.pathsep)]),
"/usr/share/moo",
"/usr/local/share/moo",
"/usr/share/pida",
"/usr/local/share/pida",
))
os.environ['MOO_DATA_DIRS'] = MOO_DATA_DIRS
def _load_pix(fn):
#XXX: not zip save
path = os.path.join(os.path.dirname(__file__), 'pixmaps', fn)
return gtk.gdk.pixbuf_new_from_file(path)
_PIXMAPS = {
'bookmark': _load_pix('bookmark.png'),
'debugger_breakpoint': _load_pix('breakpoint.png'),
'debugger_position': _load_pix('breakpoint.png'),
}
# Moo Imports
try:
import moo
except ImportError:
moo = None
# PIDA Imports
from pida.ui.views import PidaView
from pida.core.editors import EditorService, EditorActionsConfig
from pida.core.actions import TYPE_NORMAL, TYPE_TOGGLE
from pida.core.events import EventsConfig
from pida.core.document import DocumentException
from pida.core.options import OptionsConfig, choices
from pida.ui.completer import (PidaCompleter, PidaCompleterWindow,
SuggestionsList)
from pygtkhelpers.gthreads import GeneratorTask, gcall, AsyncTask
from pida.core.languages import Suggestion
from pida.ui.languages import PidaDocWindow
# locale
from pida.core.locale import Locale
locale = Locale('mooedit')
_ = locale.gettext
from .langs import build_mapping, MAPPINGS
class MooeditMain(PidaView):
"""Main Mooedit View.
This View contains a gtk.Notebook for displaying buffers.
"""
def create_ui(self):
self._embed = MooeditEmbed(self)
self.add_main_widget(self._embed)
#AA Needs implementing
#EA really? I didn't see it called anytime.
# Did it with relay to the service for now.
def grab_input_focus(self):
print "\n\ngrab_input_focus\n\n"
self.svc.grab_focus()
pass
class MooeditOptionsConfig(OptionsConfig):
def create_options(self):
self.create_option(
'display_type',
_('Display notebook title'),
choices({'filename':_('Filename'), 'fullpath':_('Full path'),
'project_or_filename':_('Project relative path or filename')}),
'project_or_filename',
_('Text to display in the Notebook'),
)
self.create_option(
'autocomplete',
_('Enable Autocompleter'),
bool,
True,
_('Shall the Autocompleter be active'),
)
self.create_option(
'auto_chars',
_('Autocompleter chars'),
int,
3,
_('Open Autocompleter after howmany characters'),
)
self.create_option(
'auto_attr',
_('On attribute'),
bool,
True,
_('Open Autocompleter after attribute accessor'),
)
class MooeditPreferences(PidaView):
"""Mooedit Preferences View.
Here the Mooedit editor preferences dialog is included
and provided with some buttons.
"""
label_text = _('Mooedit Preferences')
icon_name = 'package_utilities'
def create_ui(self):
prefs = self.svc._editor_instance.prefs_page()
prefs.emit('init')
prefs.show()
vbox = gtk.VBox()
vbox.pack_start(prefs)
self._prefs = prefs
bb = gtk.HButtonBox()
bb.set_spacing(6)
bb.set_layout(gtk.BUTTONBOX_END)
self._apply_but = gtk.Button(stock=gtk.STOCK_APPLY)
self._apply_but.connect('clicked', self.cb_apply_button_clicked)
self._ok_but = gtk.Button(stock=gtk.STOCK_OK)
self._ok_but.connect('clicked', self.cb_ok_button_clicked)
self._cancel_but = gtk.Button(stock=gtk.STOCK_CANCEL)
self._cancel_but.connect('clicked', self.cb_cancel_button_clicked)
bb.pack_start(self._apply_but)
bb.pack_start(self._cancel_but)
bb.pack_start(self._ok_but)
bb.show_all()
vbox.pack_start(bb)
vbox.show()
self.add_main_widget(vbox)
def cb_ok_button_clicked(self, button):
self._apply()
self.svc.show_preferences(self.svc.get_action('mooedit_preferences').set_active(False))
def cb_apply_button_clicked(self, button):
self._apply()
def cb_cancel_button_clicked(self, button):
self.svc.show_preferences(self.svc.get_action('mooedit_preferences').set_active(False))
def _apply(self):
self._prefs.emit('apply')
self.svc.save_moo_state()
try:
self.svc._editor_instance.apply_prefs()
except AttributeError:
pass
def can_be_closed(self):
self.svc.get_action('mooedit_preferences').set_active(False)
class MooeditEmbed(gtk.Notebook):
"""Mooedit Embed
This is the actual Notebook that holds our buffers.
"""
def __init__(self, mooedit):
gtk.Notebook.__init__(self)
self.set_scrollable(True)
self.popup_enable()
self._mooedit = mooedit
self.show_all()
def _create_tab(self, document):
editor = document.editor
hb = gtk.HBox(spacing=2)
editor._label = gtk.Label()
ns = self._mooedit.svc._get_document_title(document)
editor._label.set_markup(ns)
editor._label._markup = ns
b = gtk.Button()
b.set_border_width(0)
b.connect("clicked", self._close_cb, document)
b.set_relief(gtk.RELIEF_NONE)
b.set_size_request(20, 20)
img = gtk.Image()
img.set_from_stock(gtk.STOCK_CLOSE, gtk.ICON_SIZE_MENU)
b.add(img)
vb = gtk.VBox()
vb.pack_start(gtk.Alignment())
vb.pack_start(b, expand=False)
vb.pack_start(gtk.Alignment())
hb.pack_start(editor._label)
hb.pack_start(vb, expand=False)
hb.show_all()
return hb
def _close_cb(self, btn, document):
self._mooedit.svc.boss.get_service('buffer').cmd('close_file', document=document)
class MooeditView(gtk.ScrolledWindow):
"""Mooedit View
A gtk.ScrolledWindow containing the editor instance we get from mooedit.
"""
def __init__(self, document):
gtk.ScrolledWindow.__init__(self)
self.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
self.set_editor(document.editor)
self.document = document
self.line_markers = []
self.show_all()
def set_editor(self, editor):
self.editor = editor
self.editor.props.buffer.connect('changed', self.on_changed)
self.add(self.editor)
self.editor.show()
def on_changed(self, textbuffer):
#FIXME: this doesn't work, nor does connect_after work correctly.
# this is always one changed event to late. as the markers line position
# is updated after this event :(
self.editor.props.buffer.do_changed(textbuffer)
for lm in self.line_markers:
lm.update(lm._moo_marker.get_line()+1)
return True
def close(self):
buf = self.editor.get_buffer()
for lm in self.line_markers:
if hasattr(lm, '_moo_marker'):
lm._moo_marker.props.visible = False
buf.delete_line_mark(lm._moo_marker)
del lm._moo_marker
self.editor.inputter.disconnect()
def update_marker(self, marker):
if marker.line == -1:
# should be deleted
if marker in self.line_markers and hasattr(marker, '_moo_marker'):
marker._moo_marker.props.visible = False
self.editor.props.buffer.delete_line_mark(marker._moo_marker)
self.line_markers.remove(marker)
return
if not hasattr(marker, '_moo_marker'):
lm = moo.edit.LineMark()
lm.set_pixbuf(_PIXMAPS.get(marker.type, 'bookmark'))
#lm.set_markup('BOO')
lm.props.visible = True
marker._moo_marker = lm
buf = self.editor.props.buffer
if marker not in self.line_markers:
self.line_markers.append(marker)
buf.add_line_mark(marker._moo_marker,
min(max(0, int(marker.line)-1),buf.get_line_count()))
marker._moo_marker.props.visible = True
else:
self.editor.props.buffer.move_line_mark(marker._moo_marker,
min(max(0, int(marker.line)-1),buf.get_line_count()))
class MooeditActionsConfig(EditorActionsConfig):
"""Mooedit Actions Config
This defines some menu items for the edit menu.
"""
def create_actions(self):
EditorActionsConfig.create_actions(self)
self.create_action(
'mooedit_save_as',
TYPE_NORMAL,
_('Save _as'),
_('Save file as'),
gtk.STOCK_SAVE_AS,
self.on_save_as,
'<Shift><Control>S'
)
self.create_action(
'mooedit_reload',
TYPE_NORMAL,
_('Reload'),<|fim▁hole|> gtk.STOCK_REFRESH,
self.on_reload,
''
)
self.create_action(
'mooedit_preferences',
TYPE_TOGGLE,
_('Edit Mooedit Preferences'),
_('Show the editors preferences dialog'),
gtk.STOCK_PREFERENCES,
self.on_project_preferences,
)
self.create_action(
'mooedit_find',
TYPE_NORMAL,
_('_Find in buffer'),
_('Find'),
gtk.STOCK_FIND,
self.on_find,
'<Control>F'
)
self.create_action(
'mooedit_find_next',
TYPE_NORMAL,
_('Find _next in buffer'),
'',
gtk.STOCK_GO_FORWARD,
self.on_find_next,
'F3',
)
self.create_action(
'mooedit_find_prev',
TYPE_NORMAL,
_('Find previous in buffer'),
'',
gtk.STOCK_GO_BACK,
self.on_find_prev,
'<Shift>F3',
)
self.create_action(
'mooedit_replace',
TYPE_NORMAL,
_('Find and _replace'),
_('Find & replace'),
gtk.STOCK_FIND_AND_REPLACE,
self.on_replace,
'<Control>R',
)
self.create_action(
'mooedit_find_word_next',
TYPE_NORMAL,
_('Find current word down'),
'',
gtk.STOCK_GO_BACK,
self.on_find_word_next,
'F4',
)
self.create_action(
'mooedit_find_word_prev',
TYPE_NORMAL,
_('Find _current word up'),
'',
gtk.STOCK_GO_FORWARD,
self.on_find_word_prev,
'<Shift>F4',
)
self.create_action(
'mooedit_goto',
TYPE_NORMAL,
_('_Goto line'),
_('Goto line'),
gtk.STOCK_GO_DOWN,
self.on_goto,
'<Control>G',
)
self.create_action(
'mooedit_last_edit',
TYPE_NORMAL,
_('Goto _last edit place'),
_('Goto last edit place'),
gtk.STOCK_JUMP_TO,
self.on_last_edit,
'<Control>q',
)
self.create_action(
'mooedit_comment',
TYPE_NORMAL,
_('Comment'),
_('Comment current selection'),
'',
self.on_comment,
'',
)
self.create_action(
'mooedit_uncomment',
TYPE_NORMAL,
_('Uncomment'),
_('Uncomment current selection'),
'',
self.on_uncomment,
'',
)
act = self.create_action(
'mooedit_completer_close',
TYPE_NORMAL,
_('Close completer'),
_('Close completer'),
'',
None,
'Escape',
)
# ne need to disconnect the accelerator as our text widget are
# handeling the shortcuts
act.disconnect_accelerator()
act.opt.add_notify(self.on_completer_change)
act = self.create_action(
'mooedit_complete_toggle',
TYPE_NORMAL,
_('Toggels the autocompleter'),
_('Toggels the autocompleter'),
'',
None,
'<Control>space',
)
act.disconnect_accelerator()
act.opt.add_notify(self.on_completer_change)
act = self.create_action(
'mooedit_completer_next',
TYPE_NORMAL,
_('Next suggestion'),
_('Next suggestion'),
'',
None,
'Down',
)
act.disconnect_accelerator()
act.opt.add_notify(self.on_completer_change)
act = self.create_action(
'mooedit_completer_prev',
TYPE_NORMAL,
_('Previous suggestion'),
_('Previous suggestion'),
'',
None,
'Up',
)
act.disconnect_accelerator()
act.opt.add_notify(self.on_completer_change)
act = self.create_action(
'mooedit_completer_accept',
TYPE_NORMAL,
_('Accept suggestion'),
_('Accept suggestion'),
'',
None,
'Tab',
)
act.disconnect_accelerator()
act.opt.add_notify(self.on_completer_change)
def on_completer_change(self, *args):
self.svc._update_keyvals()
return False
def on_project_preferences(self, action):
self.svc.show_preferences(action.get_active())
def on_save_as(self, action):
# open in current filebrowser path
moo.utils.prefs_new_key_string('Editor/last_dir')
moo.utils.prefs_set_string('Editor/last_dir',
self.svc.boss.cmd('filemanager', 'get_browsed_path'))
self.svc._current.editor.save_as()
def on_reload(self, action):
self.svc.reload_document(self.svc._current.document)
def on_find(self, action):
self.svc._current.editor.emit('find-interactive')
def on_find_next(self, action):
self.svc._current.editor.emit('find-next-interactive')
def on_find_prev(self, action):
self.svc._current.editor.emit('find-prev-interactive')
def on_replace(self, action):
self.svc._current.editor.emit('replace-interactive')
def on_find_word_next(self, action):
self.svc._current.editor.emit('find-word-at-cursor', True)
def on_find_word_prev(self, action):
self.svc._current.editor.emit('find-word-at-cursor', False)
def on_goto(self, action):
cl = self.svc.get_current_line()
self.svc._current.editor.emit('goto-line-interactive')
nl = self.svc.get_current_line()
if cl != nl:
self.svc.boss.get_service('buffer').emit('document-goto',
document=self.svc._current, line=nl)
def on_last_edit(self, action):
self.svc.boss.editor.goto_last_edit()
def on_comment(self, action):
self.svc._current.editor.emit('comment')
def on_uncomment(self, action):
self.svc._current.editor.emit('uncomment')
class PidaMooInput(object):
"""
Handles all customizations in the input event handling of the editor.
It handles autocompletion and snippets for example
"""
def __init__(self, svc, editor, document):
self.svc = svc
self.editor = editor
self.document = document
self.completer_window = PidaCompleterWindow(type_=gtk.WINDOW_POPUP,
show_input=False)
self.completer = self.completer_window.widget
self.completer.show_all()
self.completer.connect("user-accept", self.accept)
self.completer.connect("suggestion-selected", self.suggestion_selected)
self.editor.connect("cursor-moved", self.on_cursor_moved)
self.model = SuggestionsList()
self.completer.set_model(self.model)
#self.completer.hide()
#self.completer_visible = False
self.completer_added = False
self.completer_pos = 0
self.completer_pos_user = 0
# two markers are used to mark the text inserted by the completer
self.completer_start = None
self.completer_end = None
self.show_auto = False
self._task = None
# db stuff for the autocompleter
self.list_matcher = re.compile("""\w{3,100}""")
self.list_cache = {}
self.list_all = set()
editor.connect("key-press-event", self.on_keypress)
editor.connect("focus-out-event", self.on_do_hide)
editor.get_toplevel().connect("focus-out-event", self.on_do_hide)
#editor.connect_after("key-press-event", self.on_after_keypress)
def disconnect(self):
self.editor.disconnect_by_func(self.on_keypress)
self.editor.disconnect_by_func(self.on_do_hide)
#try:
# self.editor.get_toplevel().disconnect_by_func(self.on_do_hide)
#except ValueError: pass
self.completer.disconnect_by_func(self.accept)
self.completer.disconnect_by_func(self.suggestion_selected)
self.editor.disconnect_by_func(self.on_cursor_moved)
#def on_
def update_completer_and_add(self, cmpl, start, ignore=()):
"""
Returns items for completion widgets
"""
# we run the language completer first and the we add our own results
# to the completer list
if cmpl:
for i in cmpl.run(self.svc.get_current_word(),
unicode(self.editor.get_text()), start):
try:
if i not in ignore:
yield i
except Exception, e:
self.svc.log.exception(e)
#self.update_completer()
y = 0
clst = self.list_all.copy()
for x in clst:
if x not in ignore:
yield x
def get_completer_visible(self):
if self.completer_window and self.completer_window.window and \
self.completer_window.window.is_visible():
return True
return False
def set_completer_visible(self, value):
pass
completer_visible = property(get_completer_visible, set_completer_visible)
def on_do_hide(self, *args, **kwargs):
self.hide()
def toggle_popup(self):
if self.completer_visible:
self.hide()
else:
self.show()
def hide(self):
if not self.completer_visible:
return
self.completer_window.hide()
#self.completer.hide_all()
self.completer_visible = False
self.show_auto = False
# delete markers
buf = self.editor.get_buffer()
self._delete_suggested()
if self.completer_start:
buf.delete_mark(self.completer_start)
self.completer_start = None
if self.completer_end:
buf.delete_mark(self.completer_end)
self.completer_end = None
def _get_start(self, i):
info = self.svc.boss.get_service('language').get_info(self.document)
while i.get_char() in info.word:
if not i.backward_char():
break
else:
i.forward_char()
return i
def show(self, visible=True, show_auto=True):
#self.completer_pos = self.completer_pos_user = \
# self.editor.props.buffer.props.cursor_position
cmpl = self.svc.boss.get_service('language').get_completer(self.document)
info = self.svc.boss.get_service('language').get_info(self.document)
if info:
self.completer.ignore_case = not info.case_sensitive
else:
self.completer.ignore_case = False
buf = self.editor.get_buffer()
cpos = buf.props.cursor_position
# we may already in a word. so we have to find the start as base
i = buf.get_iter_at_offset(cpos)
i.backward_char()
self._get_start(i)
start = i.get_offset()
self.completer_pos = buf.create_mark('completer_pos',
buf.get_iter_at_offset(start),
left_gravity=True)
self.completer_start = buf.create_mark('completer_start',
buf.get_iter_at_offset(cpos),
left_gravity=True)
self.completer_end = buf.create_mark('completer_end',
buf.get_iter_at_offset(cpos))
rec = self.editor.get_iter_location(
self.editor.props.buffer.get_iter_at_offset(
buf.props.cursor_position))
pos = self.editor.buffer_to_window_coords(gtk.TEXT_WINDOW_WIDGET,
rec.x, rec.y + rec.height)
#tw = self.editor.window.get_toplevel()
#abspos = tw.get_position()
abspos = self.editor.window.get_origin()
rpos = (pos[0]+abspos[0], pos[1]+abspos[1])
#self.completer_window.show_all()
#self.completer_window.move(rpos[0],rpos[1])
self.completer.place(rpos[0],rpos[1] - rec.height, rec.height)
self.completer_window.set_transient_for(self.svc.boss.window)
#self.completer_window.window.set_accept_focus(False)
#self.completer_window.window.set_focus_on_map(False)
#self.completer_window.window.set_skip_taskbar_hint(True)
#self.completer_window.window.set_skip_pager_hint(True)
self.editor.grab_focus()
#if not self.completer_added:
#self.editor.add_child_in_window(self.completer,
# gtk.TEXT_WINDOW_TOP,
# pos[0],
# pos[1])
#
# self.completer_window.show_all()
# #self.completer_window.move(pos[0], pos[1])
# self.completer_added = True
#else:
# self.completer_window.show_all()
#self.completer_window.move(pos[0], pos[1])
#self.editor.move_child(self.completer, pos[0], pos[1])
#self.boss.get_service('language').
self.model.clear()
if start != pos:
self.completer.filter = buf.get_text(
buf.get_iter_at_offset(start),
buf.get_iter_at_offset(cpos))
else:
self.completer.filter = ""
self._task = GeneratorTask(self.update_completer_and_add,
self.add_str)
self._task.start(cmpl, start, ignore=(self.svc.get_current_word(),))
self.show_auto = show_auto
if visible:
self.completer_window.show()
self.completer.show_all()
#self.completer_visible = True
def accept(self, widget, suggestion):
self._delete_typed()
self._insert_typed(suggestion)
self.hide()
def _get_complete(self):
buf = self.editor.get_buffer()
i1 = buf.get_iter_at_mark(self.completer_pos)
i2 = buf.get_iter_at_mark(self.completer_end)
return buf.get_text(i1, i2)
def _get_typed(self):
buf = self.editor.get_buffer()
i1 = buf.get_iter_at_mark(self.completer_pos)
i2 = buf.get_iter_at_mark(self.completer_start)
return buf.get_text(i1, i2)
def _delete_typed(self):
buf = self.editor.props.buffer
i1 = buf.get_iter_at_mark(self.completer_pos)
i2 = buf.get_iter_at_mark(self.completer_start)
buf.delete(i1, i2)
def _insert_typed(self, text):
buf = self.editor.props.buffer
i1 = buf.get_iter_at_mark(self.completer_pos)
buf.insert(i1, text)
buf.move_mark(self.completer_start, i1)
i1.backward_chars(len(text))
buf.move_mark(self.completer_pos, i1)
def _append_typed(self, char):
if not char:
return
self._replace_typed(self._get_typed() + char)
def _replace_typed(self, text):
buf = self.editor.props.buffer
i1 = buf.get_iter_at_mark(self.completer_pos)
i2 = buf.get_iter_at_mark(self.completer_start)
buf.delete(i1, i2)
buf.insert(i1, text)
#i1.backward_chars(len(text))
buf.move_mark(self.completer_start, i1)
def _get_suggested(self):
buf = self.editor.props.buffer
i1 = buf.get_iter_at_mark(self.completer_start)
i2 = buf.get_iter_at_mark(self.completer_end)
return buf.get_text(i1, i2)
def _delete_suggested(self):
buf = self.editor.props.buffer
if not self.completer_start or not self.completer_end:
return
i1 = buf.get_iter_at_mark(self.completer_start)
i2 = buf.get_iter_at_mark(self.completer_end)
buf.delete(i1, i2)
def d(self):
buf = self.editor.props.buffer
if self.completer_start:
print "cur", buf.props.cursor_position
print "pos", buf.get_iter_at_mark(self.completer_pos).get_offset()
print "start", buf.get_iter_at_mark(self.completer_start).get_offset()
print "end", buf.get_iter_at_mark(self.completer_end).get_offset()
def _replace_suggested(self, text, mark=True):
buf = self.editor.props.buffer
i1 = buf.get_iter_at_mark(self.completer_start)
i2 = buf.get_iter_at_mark(self.completer_end)
buf.delete(i1, i2)
buf.insert(i1, text)
i2 = buf.get_iter_at_mark(self.completer_end)
if mark:
buf.select_range(
i2,
i1)
def _get_missing(self, word):
# returns the missing part a suggestion that was already typed
return word[len(self._get_typed()):]
#buf.place_cursor(i1)
#return i
def suggestion_selected(self, widget, suggestion):
pos = self.completer_pos_user #editor.props.buffer.props.cursor_position
#buf.
#intext = self._get_missing(suggestion)
typed = self._get_typed()
self._delete_typed()
self._replace_typed(suggestion[:len(typed)])
self._replace_suggested(suggestion[len(typed):])
#self.editor.get_buffer().insert_at_cursor(suggestion)
#self.completer_visible = False
def tokenize(self, text):
#tokenize the text into usable autocompleter chunks
return self.list_matcher.findall(text)
def update_completer(self, full=False):
#update the state of simple internal completer
self.list_all.clear()
buf = self.editor.get_buffer()
it = buf.get_iter_at_offset(buf.props.cursor_position)
if buf.get_line_count() != len(self.list_cache) or full:
# full update of cache
lines = range(0, buf.get_line_count())
else:
# incremental update. we update the current line + above and below
lines = range(max(it.get_line()-1, 0),
min(it.get_line()+1, buf.get_line_count()) + 1)
for line in lines:
its = buf.get_iter_at_line(line)
if its.ends_line():
self.list_cache[line] = []
continue
ite = its.copy()
ite.forward_to_line_end()
ite.forward_char()
self.list_cache[line] = self.tokenize(buf.get_text(its, ite))
for val in self.list_cache.itervalues():
self.list_all.update(val)
def add_str(self, line):
#print "add line", line
if len(self.completer) > 3000:
#emergency stop
self.svc.log.info(
_("Emergency stop of completer: Too many entries"))
self._task.stop()
return
if isinstance(line, Suggestion):
self.completer.add_str(line, type_=line.type_)
else:
self.completer.add_str(line)
# if we are in show_auto mode, the completion window
# is delayed until we have the first visible item.
if not self.completer_visible and self.show_auto and \
self.editor.get_toplevel().has_toplevel_focus() and \
self.editor.is_focus():
if len(self.completer.model):
self.completer_window.show()
def on_cursor_moved(self, widget, itr):
buf = self.editor.get_buffer()
pos = buf.props.cursor_position
if self.completer_visible and (
pos < buf.get_iter_at_mark(self.completer_pos).get_offset()
or pos > buf.get_iter_at_mark(self.completer_end).get_offset()
):
# buffer is visible, but the position of the cursor is not longer
# in the suggestion range.
self.hide()
def on_keypress(self, editor, event):
#print event
if event.type == gdk.KEY_PRESS and self.svc.opt('autocomplete'):
modifiers = event.get_state() & gtk.accelerator_get_default_mod_mask()
#print event.keyval, event.state, modifiers
#print event.keyval & modifiers
#print int(modifiers)
#print self.svc.key_toggle
#print self.svc.key_close
#print self.svc.key_next
#print self.svc.key_prev
#print self.svc.key_accept
def etest(pref):
return event.keyval == pref[0] and modifiers == pref[1]
#tab 65289
if etest(self.svc.key_toggle):
#self.completion.present()
self.toggle_popup()
return True
# enter tab
elif etest((gtk.keysyms.Return, 0)):
if self.completer_visible and \
len(self._get_suggested()):
self.accept(None, self._get_complete())
return True
elif etest(self.svc.key_accept):
if self.completer_visible:
self.accept(None, self._get_complete())
return True
# key up, key down, ?, pgup, pgdown
elif any((etest(self.svc.key_next), etest(self.svc.key_prev),
etest((gtk.keysyms.Page_Up,0)),
etest((gtk.keysyms.Page_Down,0)))):
#(65362, 65364, 65293, 65366, 65365):
if self.completer_visible:
self.completer.on_key_press_event(editor, event)
return True
elif etest(self.svc.key_close): # esc
self.hide()
#elif event.keyval == 65056:
# return True
#elif event.keyval == 65515:
# # show
# return True
# FIXME: this should usally be done via connect_after
# and the code later should be a extra function
# but doesn't work as this super function returns True
# and stops the processing of connect_after functions
modified = self.editor.do_key_press_event(editor, event)
#print modified, repr(event.string)
#self.d()
#if self.completer_start:
# buf = self.editor.get_buffer()
# buf.move_mark(self.completer_start,
# buf.get_iter_at_offset(buf.props.cursor_position))
#if modified:
# task = AsyncTask(work_callback=self.update_completer)
# task.start()
if self.completer_visible:
if event.keyval in (gtk.keysyms.BackSpace, gtk.keysyms.Delete): # delete
# once again the buffer problem
typed = self._get_typed()
if not len(typed):
self.hide()
else:
self.completer.filter = typed
elif len(event.string):
info = self.svc.boss.get_service('language').get_info(self.document)
if event.string not in info.word:
self.hide()
else:
#print "will delete", self._get_suggested(), self._get_typed()
if self.completer_start:
buf = self.editor.get_buffer()
buf.move_mark(self.completer_start,
buf.get_iter_at_offset(buf.props.cursor_position))
self.completer.filter = self._get_typed()
return True
# we have to retest as the completer could just have been closed by
# a non word character but an attrib char should open it again
if not self.completer_visible:
info = self.svc.boss.get_service('language').get_info(self.document)
buf = self.editor.get_buffer()
it = buf.get_iter_at_offset(buf.props.cursor_position)
if self.svc.opt('auto_attr'):
# we have to build a small buffer, because the character
# typed is not in the buffer yet
for x in info.completer_open:
end = it.copy()
end.backward_chars(len(x))
rv = it.backward_search(x, gtk.TEXT_SEARCH_TEXT_ONLY, end)
if rv and x[-1] == event.string:
gcall(self.show, visible=False, show_auto=True)
break
if self.show_auto:
# the completer should be shown, but the user typed a non word
# character so break up
if len(event.string) and event.string not in info.word:
self.show_auto = False
elif len(event.string):
#print "append typed", self._get_suggested(), self._get_typed()
self._delete_suggested()
self._append_typed(event.string)
self.completer.filter = self._get_typed()
#if self.svc.opt('auto_char'):
# info = self.svc.boss.get_service('language').get_info(self.document)
# buf = self.editor.get_buffer()
# it = buf.get_iter_at_offset(buf.props.cursor_position)
# # we have to build a small buffer, because the character
# # typed is not in the buffer yet
# it2 = buf.get_iter_at_offset(max(buf.props.cursor_position-self.svc.opt('auto_char'), 0))
# sbuf = buf.get_text(it, it2) + event.string
# print sbuf
# for x in info.attributerefs:
# if sbuf.rfind(x) == len(sbuf)-1 and \
# sbuf[-1] == event.string:
# gcall(self.show)
# return
#res = it.backward_search(x, gtk.TEXT_SEARCH_TEXT_ONLY)
#print res
#print res[0].get_offset(), res[1].get_offset(), it.get_offset(), buf.props.cursor_position
#if res and res[1].get_offset() == it.get_offset()+1:
# self.show()
# break
#self.completer.filter += event.string
#self.completer_pos_user += len(event.string)
if modified:
#prio of 50 is higher then
gobject.idle_add(self.update_completer,
gobject.PRIORITY_HIGH)
#self.update_completer()
# task = AsyncTask(work_callback=self.update_completer)
return True
class MooeditEventsConfig(EventsConfig):
def subscribe_all_foreign(self):
self.subscribe_foreign('editor', 'marker-changed',
self.marker_changed)
self.subscribe_foreign('buffer', 'document-typchanged',
self.doctype_changed)
def marker_changed(self, marker):
self.svc.on_marker_changed(marker)
def doctype_changed(self, document):
if document.doctype and getattr(document, 'editor', None):
document.editor.set_lang(MAPPINGS.get(document.doctype.internal,
None))
# Service class
class Mooedit(EditorService):
"""Moo Editor Interface for PIDA
Let's you enjoy all the GUI love from mooedit with all the superb IDE
features PIDA has to offer. Use with caution, may lead to addiction.
"""
options_config = MooeditOptionsConfig
actions_config = MooeditActionsConfig
events_config = MooeditEventsConfig
def pre_start(self):
# mooedit is able to open empty documents
self._last_modified = None
self._docwin = None
self.features.publish('new_file')
try:
self.script_path = os.path.join(pida_home, 'pida_mooedit.rc')
self._state_path = os.path.join(pida_home, 'pida_mooedit.state')
try:
moo.utils.prefs_load(sys_files=None, file_rc=self.script_path, file_state=self._state_path)
except gobject.GError:
pass
# if a workspace specific rc file exists, load it and make it the current one
if os.path.exists(os.path.join(pida_home, 'pida_mooedit.%s.rc' %workspace_name())):
self.script_path = os.path.join(pida_home, 'pida_mooedit.%s.rc' %workspace_name())
try:
moo.utils.prefs_load(sys_files=None, file_rc=self.script_path, file_state=None)
except gobject.GError:
pass
self._editor_instance = moo.edit.create_editor_instance()
moo.edit.plugin_read_dirs()
self._documents = {}
self._current = None
self._main = MooeditMain(self)
self._preferences = MooeditPreferences(self)
self._embed = self._main._embed
self._embed.connect("switch-page", self._changed_page)
self._embed.connect("drag_drop", self._drag_drop_cb)
self._embed.connect("drag_motion", self._drag_motion_cb)
self._embed.connect ("drag_data_received", self._drag_data_recv)
self._embed.connect('focus-out-event', self.do_doc_destroy)
self.boss.window.connect('focus-out-event', self.do_doc_destroy)
self._embed.drag_dest_set(0, [
("GTK_NOTEBOOK_TAB", gtk.TARGET_SAME_APP, 1),
("text/uri-list", 0, 2)],
gtk.gdk.ACTION_COPY | gtk.gdk.ACTION_MOVE)
self.boss.cmd('window', 'add_view', paned='Editor', view=self._main)
return True
except Exception, err:
import traceback
traceback.print_exc()
return False
def start(self):
# we only disable the buttons if no document is loaded
# session may already have loaded docs
if not len(self._documents):
self.update_actions(enabled=False)
self.get_action('mooedit_last_edit').set_sensitive(False)
self._update_keyvals()
self.boss.get_service('editor').emit('started')
# build a mapping table
build_mapping(moo.edit.lang_mgr_default(),
self.boss.get_service('language').doctypes)
return True
def on_marker_changed(self, marker):
# called when a marker changed. update the editor
for view in self._documents.itervalues():
# we iterate over all markers so they
if view.document.filename == marker.filename:
view.update_marker(marker)
def save_moo_state(self):
moo.utils.prefs_save(self.script_path, self._state_path)
def show_preferences(self, visible):
if visible:
self.boss.cmd('window', 'add_view', paned='Plugin',
view=self._preferences)
else:
self.boss.cmd('window', 'remove_view',
view=self._preferences)
def pre_stop(self):
views = [view for view in self._documents.values()]
rv = True
for view in views:
editor_close = view.editor.close()
if not editor_close:
rv = False
else:
self._embed.remove_page(self._embed.page_num(view))
return rv
def update_actions(self, enabled=True):
all = True
if not enabled:
all = False
self.get_action('save').set_sensitive(all)
self.get_action('mooedit_save_as').set_sensitive(all)
self.get_action('cut').set_sensitive(all)
self.get_action('copy').set_sensitive(all)
self.get_action('paste').set_sensitive(all)
if enabled and self._current and self._current.editor:
self.get_action('undo').set_sensitive(self._current.editor.can_undo())
self.get_action('redo').set_sensitive(self._current.editor.can_redo())
else:
self.get_action('undo').set_sensitive(all)
self.get_action('redo').set_sensitive(all)
self.get_action('focus_editor').set_sensitive(all)
self.get_action('mooedit_goto').set_sensitive(all)
self.get_action('mooedit_find').set_sensitive(all)
self.get_action('mooedit_find_next').set_sensitive(all)
self.get_action('mooedit_find_prev').set_sensitive(all)
self.get_action('mooedit_find_word_next').set_sensitive(all)
self.get_action('mooedit_find_word_prev').set_sensitive(all)
self.get_action('mooedit_replace').set_sensitive(all)
def _update_keyvals(self):
self.key_toggle = gtk.accelerator_parse(
self.get_keyboard_options()['mooedit_complete_toggle'].value)
self.key_close = gtk.accelerator_parse(
self.get_keyboard_options()['mooedit_completer_close'].value)
self.key_next = gtk.accelerator_parse(
self.get_keyboard_options()['mooedit_completer_next'].value)
self.key_prev = gtk.accelerator_parse(
self.get_keyboard_options()['mooedit_completer_prev'].value)
self.key_accept = gtk.accelerator_parse(
self.get_keyboard_options()['mooedit_completer_accept'].value)
def open(self, document):
"""Open a document"""
if document.unique_id not in self._documents.keys():
if self._load_file(document):
self._embed.set_current_page(-1)
if self._embed.get_n_pages() > 0:
self.update_actions()
if document.is_new:
self.get_action('save').set_sensitive(True)
else:
self.get_action('save').set_sensitive(False)
else:
#EA: the file was already open. we switch to it.
self._embed.set_current_page(self._embed.page_num(self._documents[document.unique_id]))
self.update_actions()
def open_list(self, documents):
good = None
for doc in documents:
try:
good = self._load_file(doc)
except DocumentException, err:
#self.log.exception(err)
self.boss.get_service('editor').emit('document-exception', error=err)
# we open the last good document now normally again to
# make system consistent
if good:
self.open(doc)
def close(self, document):
"""Close a document"""
# remove the last modified reference as it is not available when closed
if not self._documents.has_key(document.unique_id):
return True
if self._last_modified and self._last_modified[0].document == document:
self._last_modified = None
self.get_action('mooedit_last_edit').set_sensitive(False)
closing = self._documents[document.unique_id].editor.close()
if closing:
self._documents[document.unique_id].close()
self._embed.remove_page(self._embed.page_num(self._documents[document.unique_id]))
del self._documents[document.unique_id]
if self._embed.get_n_pages() == 0:
self.update_actions(enabled=False)
return closing
def save(self):
"""Save the current document"""
# man, medit resets the language on save
olang = self._current.editor.props.buffer.get_lang()
self._current.editor.save()
self._current.editor.set_lang(olang)
gcall(self._current.editor.set_lang, olang)
self.boss.cmd('buffer', 'current_file_saved')
def save_as(self):
"""Save the current document"""
olang = self._current.editor.props.buffer.get_lang()
self._current.editor.save_as()
self._current.editor.set_lang(olang)
gcall(self._current.editor.set_lang, olang)
self.boss.cmd('buffer', 'current_file_saved')
def cut(self):
"""Cut to the clipboard"""
self._current.editor.emit('cut-clipboard')
def copy(self):
"""Copy to the clipboard"""
self._current.editor.emit('copy-clipboard')
def paste(self):
"""Paste from the clipboard"""
self._current.editor.emit('paste-clipboard')
def undo(self):
self._current.editor.undo()
self.get_action('redo').set_sensitive(True)
if not self._current.editor.can_undo():
self.get_action('undo').set_sensitive(False)
def redo(self):
self._current.editor.redo()
self.get_action('undo').set_sensitive(True)
if not self._current.editor.can_redo():
self.get_action('redo').set_sensitive(False)
def goto_line(self, line):
"""Goto a line"""
self._current.editor.move_cursor(line-1, 0, False, True)
self.boss.get_service('buffer').emit('document-goto',
document=self._current.document, line=line-1)
def goto_last_edit(self):
if self._last_modified:
view, count = self._last_modified
self.open(view.document)
itr = view.editor.get_buffer().get_iter_at_offset(count)
view.editor.get_buffer().place_cursor(itr)
view.editor.scroll_to_iter(itr, 0.05, use_align=True)
def set_path(self, path):
pass
def grab_focus(self):
if self._current is not None:
self._current.editor.grab_focus()
def _changed_page(self, notebook, page, page_num):
self._current = self._embed.get_nth_page(page_num)
self.boss.cmd('buffer', 'open_file', document=self._current.document)
def reload_document(self, document):
"""
Reloads a document from disc
"""
# TODO: moo does no export reload functionality, so this really sucks
view = self._documents[document.unique_id]
buf = document.editor.get_buffer()
last_line = buf.get_iter_at_offset(buf.props.cursor_position)\
.get_line()
document.editor.disconnect_by_func(self._buffer_status_changed)
document.editor.disconnect_by_func(self._buffer_renamed)
document.editor.get_buffer().disconnect_by_func(self._buffer_changed)
closing = document.editor.close()
if closing:
label = document.editor._label
view.remove(document.editor)
editor = self._editor_instance.create_doc(document.filename)
editor._label = label
editor.inputter = PidaMooInput(self, editor, document)
document.editor = editor
view.set_editor(editor)
gcall(editor.move_cursor, last_line, 0, False, True)
document.editor.connect("doc_status_changed", self._buffer_status_changed, view)
document.editor.connect("filename-changed", self._buffer_renamed, view)
document.editor.get_buffer().connect("changed", self._buffer_changed, view)
document.editor.emit("doc_status_changed")
def _load_file(self, document):
try:
if document is None:
editor = self._editor_instance.new_doc()
else:
editor = self._editor_instance.create_doc(document.filename)
document.editor = editor
editor.inputter = PidaMooInput(self, editor, document)
editor.props.show_line_marks = True
editor.props.enable_bookmarks = False
#FIXME: this should be implemented but needs some code and a pref
#editor.props.enable_folding = True
#ind = PidaMooIndenter(editor, document)
#print ind
#editor.set_indenter(ind)
view = MooeditView(document)
view._star = False
view._exclam = False
document.editor.connect("doc_status_changed", self._buffer_status_changed, view)
document.editor.connect("filename-changed", self._buffer_renamed, view)
document.editor.get_buffer().connect("changed", self._buffer_changed, view)
label = self._embed._create_tab(document)
self._documents[document.unique_id] = view
self._embed.append_page(view, label)
self._embed.set_tab_reorderable(view, True)
#self._embed.set_tab_detachable(view, True)
self._current = view
return True
except Exception, err:
#self.log.exception(err)
raise DocumentException(err.message, document=document, orig=err)
def _buffer_status_changed(self, buffer, view):
status = view.editor.get_status()
if moo.edit.EDIT_MODIFIED & status == moo.edit.EDIT_MODIFIED:
if not self._current.editor.can_redo():
self.get_action('redo').set_sensitive(False)
if not view._star:
s = view.editor._label._markup
if view._exclam:
view._exclam = False
ns = "*" + s
view.editor._label.set_markup(ns)
view._star = True
self.get_action('undo').set_sensitive(True)
self.get_action('save').set_sensitive(True)
if moo.edit.EDIT_CLEAN & status == moo.edit.EDIT_CLEAN:
status = 0
if moo.edit.EDIT_NEW & status == moo.edit.EDIT_NEW:
status = 0
if moo.edit.EDIT_CHANGED_ON_DISK & status == moo.edit.EDIT_CHANGED_ON_DISK:
if not view._exclam:
s = view.editor._label._markup
if view._star:
view._star = False
ns = "!" + s
view.editor._label.set_markup(ns)
view._exclam = True
self.get_action('save').set_sensitive(True)
if status == 0:
if view._star or view._exclam:
s = view.editor._label.get_text()
ns = view.editor._label._markup
view._exclam = False
view._star = False
view.editor._label.set_markup(ns)
self.get_action('save').set_sensitive(False)
def _buffer_changed(self, buffer, view):
self._last_modified = (view, buffer.props.cursor_position)
self.get_action('mooedit_last_edit').set_sensitive(True)
def _buffer_modified(self, buffer, view):
s = view.editor._label.get_text()
ns = "*" + s
view.editor._label.set_markup(ns)
view.editor._label._markup(ns)
def _buffer_renamed(self, buffer, new_name, view):
view.document.filename = new_name
ns = self._get_document_title(view.document)
view.editor._label.set_markup(ns)
view.editor._label._markup = ns
view._exclam = False
view._star = False
def _get_document_title(self, document):
dsp = self.opt('display_type')
if dsp == 'filename':
return document.get_markup(document.markup_string_if_project)
elif dsp == 'fullpath':
return document.get_markup(document.markup_string_fullpath)
return document.markup
def _drag_motion_cb (self, widget, context, x, y, time):
list = widget.drag_dest_get_target_list()
target = widget.drag_dest_find_target(context, list)
if target is None:
return False
else:
if target == "text/uri-list":
context.drag_status(gtk.gdk.ACTION_COPY, time)
else:
widget.drag_get_data(context, "GTK_NOTEBOOK_TAB", time)
return True
def _drag_drop_cb (self, widget, context, x, y, time):
list = widget.drag_dest_get_target_list()
target = widget.drag_dest_find_target (context, list);
if (target == "text/uri-list"):
widget.drag_get_data (context, "text/uri-list", time)
else:
context.finish (False, False, time)
return True
def _drag_data_recv(self, widget, context, x, y, selection, targetType, time):
if targetType == 2:
for filename in selection.get_uris():
widget._mooedit.svc.boss.cmd('buffer', 'open_file', file_name=filename[7:])
return True
else:
return False
def get_content(self, editor):
return editor.get_buffer().props.text
def set_content(self, editor, text):
return editor.get_buffer().set_text(text)
def _get_current_word_pos(self):
# returns the start, endposition of the current word and the text
buf = self._current.editor.get_buffer()
cursor = buf.props.cursor_position
try:
# moo stores the text always as utf-8 in the internal buffer
txt = buf.props.text.decode('utf-8')
except UnicodeDecodeError:
txt = buf.props.text
start = cursor-1
end = cursor
# FIXME: maybe this is faster with a regular expression
while end < len(txt):
if txt[end].isspace():
break
end += 1
# this isn't handled easy with a regular expression as its a
# forward lookup. maybe we could search for whitespace and guess
# as startstring max(0, cursor-10) and if it doesn't find anything
# we use the full buffer and use the last find...
while start >= 0:
if txt[start].isspace():
start += 1
break
start -= 1
start = max(start, 0)
return (start, end, txt)
def get_current_word(self):
"""
Returns the word the cursor is in or the selected text
"""
start, end, txt = self._get_current_word_pos()
return txt[start:end]
def call_with_current_word(self, callback):
start, end, txt = self._get_current_word_pos()
rv = txt[start:end]
if rv:
callback(rv)
def call_with_selection(self, callback):
if not self._current.editor.has_selection():
return
buf = self._current.editor.get_buffer()
tmb = buf.get_selection_bounds()
try:
rv = buf.props.text.decode('utf-8') \
[tmb[0].get_offset():tmb[1].get_offset()]
except UnicodeDecodeError:
# the buf.props.text is raw binary. so we have to convert it to
# unicode
return
callback(rv)
def call_with_selection_or_word(self, callback):
if self._current.editor.has_selection():
self.call_with_selection(callback)
else:
self.call_with_current_word(callback)
def insert_text(self, text):
self._current.editor.get_buffer().insert_at_cursor(text)
def delete_current_word(self):
start, end, txt = self._get_current_word_pos()
buf = self._current.editor.get_buffer()
buf.delete(buf.get_iter_at_offset(start),
buf.get_iter_at_offset(end))
def get_current_line(self):
if not self._current:
return None
buf = self._current.editor.get_buffer()
i = buf.get_iter_at_offset(buf.props.cursor_position)
return i.get_line()+1
def replace_line(self, editor, lineno, text):
"""
Replace a line in the editor. lineno is index 0 based.
"""
buf = editor.get_buffer()
it1 = buf.get_iter_at_line(lineno)
it2 = buf.get_iter_at_line(lineno)
it2.forward_to_line_end()
buf.delete(it1, it2)
buf.insert(it1, text)
def get_cursor_position(self):
buf = self._current.editor.get_buffer()
return buf.props.cursor_position
def set_cursor_position(self, position, scroll=True):
#FIXME: return current position
buf = self._current.editor.get_buffer()
itr = buf.get_iter_at_offset(position)
buf.place_cursor(itr)
if scroll:
itr = buf.get_iter_at_offset(position)
self._current.editor.scroll_to_iter(itr, 0.05, use_align=True)
def do_doc_destroy(self, *args):
if self._docwin:
self._docwin.destroy()
self._docwin = None
def on_doc_destroy(self, *args):
self._current.editor.props.buffer.disconnect(self._editor_mi)
def show_documentation(self):
buf = self._current.editor.props.buffer
rec = self._current.editor.get_iter_location(
buf.get_iter_at_offset(
buf.props.cursor_position))
pos = self._current.editor.buffer_to_window_coords(
gtk.TEXT_WINDOW_WIDGET,
rec.x, rec.y)
abspos = self._current.editor.window.get_origin()
rpos = (pos[0]+abspos[0], pos[1]+abspos[1])
dm = self.boss.get_service('language').get_documentator(
self._current.document)
if not dm:
return
docu = dm.get_documentation(buf.props.text,
buf.props.cursor_position)
#print docus
if self._docwin:
self._docwin.destroy()
if not docu:
self.boss.get_service('notify').notify(
data=_('No documentation found'), timeout=2000)
return
pd = PidaDocWindow(documentation=docu)
if not pd.valid:
self.notify_user(_("No documentation found"),
title=_("Show documentation"),
quick=True)
return
pd.connect("destroy-event", self.on_doc_destroy)
self._current.editor.props.buffer.connect(
'cursor-moved', self.do_doc_destroy)
pd.move(rpos[0], rpos[1] + rec.height)
self._docwin = pd
pd.present()
def define_sign_type(self, name, icon, linehl, text, texthl):
pass
def undefine_sign_type(self, name):
pass
def show_sign(self, type, filename, line):
pass
def hide_sign(self, type, filename, line):
pass
@staticmethod
def get_sanity_errors():
if moo is None:
return [
"medit python bindings are missing"
]
#XXX: version checks
# Required Service attribute for service loading
Service = Mooedit
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:<|fim▁end|> | _('Reload file content'), |
<|file_name|>euler004.py<|end_file_name|><|fim▁begin|>#!/bin/python3
import bisect
def is_palindrome(n):
return str(n) == str(n)[::-1]
def generate_palindromes():
return [i * j
for i in range(100, 1000)
for j in range(100, 1000)
if is_palindrome(i * j)]
def find_lt(a, x):
'Find rightmost value less than x'
i = bisect.bisect_left(a, x)
if i:
return a[i - 1]
raise ValueError
<|fim▁hole|> n = int(input().strip())
print(find_lt(palindromes, n))<|fim▁end|> | palindromes = sorted(generate_palindromes())
test_cases = int(input().strip())
for _ in range(test_cases): |
<|file_name|>aeabi_memcpy.rs<|end_file_name|><|fim▁begin|>#![cfg(all(
target_arch = "arm",
not(any(target_env = "gnu", target_env = "musl")),
target_os = "linux",
feature = "mem"
))]
#![feature(compiler_builtins_lib)]
#![feature(lang_items)]
#![no_std]
extern crate compiler_builtins;
// test runner
extern crate utest_cortex_m_qemu;
// overrides `panic!`
#[macro_use]
extern crate utest_macros;
macro_rules! panic {
($($tt:tt)*) => {
upanic!($($tt)*);
};
}
extern "C" {
fn __aeabi_memcpy(dest: *mut u8, src: *const u8, n: usize);
fn __aeabi_memcpy4(dest: *mut u8, src: *const u8, n: usize);
}
struct Aligned {
array: [u8; 8],
_alignment: [u32; 0],
}
impl Aligned {
fn new(array: [u8; 8]) -> Self {
Aligned {
array: array,
_alignment: [],
}
}
}
#[test]
fn memcpy() {
let mut dest = [0; 4];
let src = [0xde, 0xad, 0xbe, 0xef];
for n in 0..dest.len() {
dest.copy_from_slice(&[0; 4]);
unsafe { __aeabi_memcpy(dest.as_mut_ptr(), src.as_ptr(), n) }
assert_eq!(&dest[0..n], &src[0..n])
}
}
#[test]
fn memcpy4() {
let mut aligned = Aligned::new([0; 8]);
let dest = &mut aligned.array;
let src = [0xde, 0xad, 0xbe, 0xef, 0xba, 0xad, 0xf0, 0x0d];
for n in 0..dest.len() {<|fim▁hole|> assert_eq!(&dest[0..n], &src[0..n])
}
}<|fim▁end|> | dest.copy_from_slice(&[0; 8]);
unsafe { __aeabi_memcpy4(dest.as_mut_ptr(), src.as_ptr(), n) }
|
<|file_name|>virtualdisk.py<|end_file_name|><|fim▁begin|>#
# Copyright (c) 2007, 2008 Agostino Russo
# Python port of wubi/disckimage/main.c by Hampus Wessman
#
# Written by Agostino Russo <agostino.russo@gmail.com>
#
# win32.ui is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of
# the License, or (at your option) any later version.
#
# win32.ui is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
'''
Allocates disk space for the virtual disk
'''
import ctypes
from ctypes import c_long, byref
from winui import defs
import sys
import logging
log = logging.getLogger('Virtualdisk')
def create_virtual_disk(path, size_mb):
'''
Fast allocation of disk space
This is done by using the windows API
The initial and final block are zeroed
'''
log.debug(" Creating virtual disk %s of %sMB" % (path, size_mb))
clear_bytes = 1000000
if not size_mb or size_mb < 1:
return
# Get Permission
grant_privileges()
# Create file
file_handle = defs.CreateFileW(
unicode(path),
defs.GENERIC_READ | defs.GENERIC_WRITE,
0,
defs.NULL,
<|fim▁hole|> if file_handle == defs.INVALID_HANDLE_VALUE:
log.exception("Failed to create file %s" % path)
# Set pointer to end of file */
file_pos = defs.LARGE_INTEGER()
file_pos.QuadPart = size_mb*1024*1024
if not defs.SetFilePointerEx(file_handle, file_pos, 0, defs.FILE_BEGIN):
log.exception("Failed to set file pointer to end of file")
# Set end of file
if not defs.SetEndOfFile(file_handle):
log.exception("Failed to extend file. Not enough free space?")
# Set valid data (if possible), ignore errors
call_SetFileValidData(file_handle, file_pos)
# Set pointer to beginning of file
file_pos.QuadPart = 0
result = defs.SetFilePointerEx(
file_handle,
file_pos,
defs.NULL,
defs.FILE_BEGIN)
if not result:
log.exception("Failed to set file pointer to beginning of file")
# Zero chunk of file
zero_file(file_handle, clear_bytes)
# Set pointer to end - clear_bytes of file
file_pos.QuadPart = size_mb*1024*1024 - clear_bytes
result = defs.SetFilePointerEx(
file_handle,
file_pos,
defs.NULL,
defs.FILE_BEGIN)
if not result:
log.exception("Failed to set file pointer to end - clear_bytes of file")
# Zero file
zero_file(file_handle, clear_bytes)
defs.CloseHandle(file_handle)
def grant_privileges():
# For version < Windows NT, no privileges are involved
full_version = sys.getwindowsversion()
major, minor, build, platform, txt = full_version
if platform < 2:
log.debug("Skipping grant_privileges, because Windows 95/98/ME was detected")
return
# SetFileValidData() requires the SE_MANAGE_VOLUME_NAME privilege, so we must enable it
# on the process token. We don't attempt to strip the privilege afterward as that would
# introduce race conditions. */
handle = ctypes.c_long(0)
if defs.OpenProcessToken(defs.GetCurrentProcess(), defs.TOKEN_ADJUST_PRIVILEGES|defs.TOKEN_QUERY, byref(handle)):
luid = defs.LUID()
if defs.LookupPrivilegeValue(defs.NULL, defs.SE_MANAGE_VOLUME_NAME, byref(luid)):
tp = defs.TOKEN_PRIVILEGES()
tp.PrivilegeCount = 1
tp.Privileges[0].Luid = luid
tp.Privileges[0].Attributes = defs.SE_PRIVILEGE_ENABLED
if not defs.AdjustTokenPrivileges(handle, defs.FALSE, byref(tp), 0, defs.NULL, defs.NULL):
log.debug("grant_privileges: AdjustTokenPrivileges() failed.")
else:
log.debug("grant_privileges: LookupPrivilegeValue() failed.")
defs.CloseHandle(handle)
else:
log.debug("grant_privileges: OpenProcessToken() failed.")
def call_SetFileValidData(file_handle, size_bytes):
# No need, Windows 95/98/ME do this automatically anyway.
full_version = sys.getwindowsversion()
major, minor, build, platform, txt = full_version
if platform < 2:
log.debug("Skipping SetFileValidData, because Windows 95/98/ME was detected")
return
try:
SetFileValidData = ctypes.windll.kernel32.SetFileValidData
except:
log.debug("Could not load SetFileValidData.")
return
SetFileValidData(file_handle, size_bytes)
def zero_file(file_handle, clear_bytes):
bytes_cleared = 0
buf_size = 1000
n_bytes_written = c_long(0)
write_buf = "0"*buf_size
while bytes_cleared < clear_bytes:
bytes_to_write = buf_size
if (bytes_to_write > clear_bytes - bytes_cleared):
bytes_to_write = clear_bytes - bytes_cleared
result = defs.WriteFile(
file_handle,
write_buf,
bytes_to_write,
byref(n_bytes_written),
defs.NULL)
if not result or not n_bytes_written.value:
log.exception("WriteFile() failed!")
bytes_cleared += n_bytes_written.value<|fim▁end|> | defs.CREATE_ALWAYS,
defs.FILE_ATTRIBUTE_NORMAL,
defs.NULL)
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>"""
A Pygments lexer for Magpie.
"""
from setuptools import setup
__author__ = 'Robert Nystrom'
setup(
name='Magpie',
version='1.0',
description=__doc__,
author=__author__,
packages=['magpie'],<|fim▁hole|> '''
)<|fim▁end|> | entry_points='''
[pygments.lexers]
magpielexer = magpie:MagpieLexer |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
#[macro_use]
extern crate num_derive;
#[macro_use]
extern crate serde;
pub mod resources;
use crossbeam_channel::{Receiver, Sender};
use ipc_channel::ipc::IpcSender;
use keyboard_types::KeyboardEvent;
use msg::constellation_msg::{InputMethodType, PipelineId, TopLevelBrowsingContextId};
use servo_url::ServoUrl;
use std::fmt::{Debug, Error, Formatter};
use webrender_api::units::{DeviceIntPoint, DeviceIntSize};
pub use webxr_api::MainThreadWaker as EventLoopWaker;
/// A cursor for the window. This is different from a CSS cursor (see
/// `CursorKind`) in that it has no `Auto` value.
#[repr(u8)]
#[derive(Clone, Copy, Deserialize, Eq, FromPrimitive, PartialEq, Serialize)]
pub enum Cursor {
None,
Default,
Pointer,
ContextMenu,
Help,
Progress,
Wait,
Cell,
Crosshair,
Text,
VerticalText,
Alias,
Copy,
Move,
NoDrop,
NotAllowed,
Grab,
Grabbing,
EResize,
NResize,
NeResize,
NwResize,
SResize,
SeResize,
SwResize,
WResize,
EwResize,
NsResize,
NeswResize,
NwseResize,
ColResize,
RowResize,
AllScroll,
ZoomIn,
ZoomOut,
}
<|fim▁hole|>}
impl EmbedderProxy {
pub fn send(&self, msg: (Option<TopLevelBrowsingContextId>, EmbedderMsg)) {
// Send a message and kick the OS event loop awake.
if let Err(err) = self.sender.send(msg) {
warn!("Failed to send response ({:?}).", err);
}
self.event_loop_waker.wake();
}
}
impl Clone for EmbedderProxy {
fn clone(&self) -> EmbedderProxy {
EmbedderProxy {
sender: self.sender.clone(),
event_loop_waker: self.event_loop_waker.clone(),
}
}
}
/// The port that the embedder receives messages on.
pub struct EmbedderReceiver {
pub receiver: Receiver<(Option<TopLevelBrowsingContextId>, EmbedderMsg)>,
}
impl EmbedderReceiver {
pub fn try_recv_embedder_msg(
&mut self,
) -> Option<(Option<TopLevelBrowsingContextId>, EmbedderMsg)> {
self.receiver.try_recv().ok()
}
pub fn recv_embedder_msg(&mut self) -> (Option<TopLevelBrowsingContextId>, EmbedderMsg) {
self.receiver.recv().unwrap()
}
}
#[derive(Deserialize, Serialize)]
pub enum ContextMenuResult {
Dismissed,
Ignored,
Selected(usize),
}
#[derive(Deserialize, Serialize)]
pub enum PromptDefinition {
/// Show a message.
Alert(String, IpcSender<()>),
/// Ask a Ok/Cancel question.
OkCancel(String, IpcSender<PromptResult>),
/// Ask a Yes/No question.
YesNo(String, IpcSender<PromptResult>),
/// Ask the user to enter text.
Input(String, String, IpcSender<Option<String>>),
}
#[derive(Deserialize, PartialEq, Serialize)]
pub enum PromptOrigin {
/// Prompt is triggered from content (window.prompt/alert/confirm/…).
/// Prompt message is unknown.
Untrusted,
/// Prompt is triggered from Servo (ask for permission, show error,…).
Trusted,
}
#[derive(Deserialize, PartialEq, Serialize)]
pub enum PromptResult {
/// Prompt was closed by clicking on the primary button (ok/yes)
Primary,
/// Prompt was closed by clicking on the secondary button (cancel/no)
Secondary,
/// Prompt was dismissed
Dismissed,
}
#[derive(Deserialize, Serialize)]
pub enum EmbedderMsg {
/// A status message to be displayed by the browser chrome.
Status(Option<String>),
/// Alerts the embedder that the current page has changed its title.
ChangePageTitle(Option<String>),
/// Move the window to a point
MoveTo(DeviceIntPoint),
/// Resize the window to size
ResizeTo(DeviceIntSize),
/// Show dialog to user
Prompt(PromptDefinition, PromptOrigin),
/// Show a context menu to the user
ShowContextMenu(IpcSender<ContextMenuResult>, Option<String>, Vec<String>),
/// Whether or not to allow a pipeline to load a url.
AllowNavigationRequest(PipelineId, ServoUrl),
/// Whether or not to allow script to open a new tab/browser
AllowOpeningBrowser(IpcSender<bool>),
/// A new browser was created by script
BrowserCreated(TopLevelBrowsingContextId),
/// Wether or not to unload a document
AllowUnload(IpcSender<bool>),
/// Sends an unconsumed key event back to the embedder.
Keyboard(KeyboardEvent),
/// Gets system clipboard contents
GetClipboardContents(IpcSender<String>),
/// Sets system clipboard contents
SetClipboardContents(String),
/// Changes the cursor.
SetCursor(Cursor),
/// A favicon was detected
NewFavicon(ServoUrl),
/// <head> tag finished parsing
HeadParsed,
/// The history state has changed.
HistoryChanged(Vec<ServoUrl>, usize),
/// Enter or exit fullscreen
SetFullscreenState(bool),
/// The load of a page has begun
LoadStart,
/// The load of a page has completed
LoadComplete,
/// A browser is to be closed
CloseBrowser,
/// A pipeline panicked. First string is the reason, second one is the backtrace.
Panic(String, Option<String>),
/// Open dialog to select bluetooth device.
GetSelectedBluetoothDevice(Vec<String>, IpcSender<Option<String>>),
/// Open file dialog to select files. Set boolean flag to true allows to select multiple files.
SelectFiles(Vec<FilterPattern>, bool, IpcSender<Option<Vec<String>>>),
/// Open interface to request permission specified by prompt.
PromptPermission(PermissionPrompt, IpcSender<PermissionRequest>),
/// Request to present an IME to the user when an editable element is focused.
ShowIME(InputMethodType),
/// Request to hide the IME when the editable element is blurred.
HideIME,
/// Servo has shut down
Shutdown,
/// Report a complete sampled profile
ReportProfile(Vec<u8>),
/// Notifies the embedder about media session events
/// (i.e. when there is metadata for the active media session, playback state changes...).
MediaSessionEvent(MediaSessionEvent),
/// Report the status of Devtools Server
OnDevtoolsStarted(Result<u16, ()>),
}
impl Debug for EmbedderMsg {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
match *self {
EmbedderMsg::Status(..) => write!(f, "Status"),
EmbedderMsg::ChangePageTitle(..) => write!(f, "ChangePageTitle"),
EmbedderMsg::MoveTo(..) => write!(f, "MoveTo"),
EmbedderMsg::ResizeTo(..) => write!(f, "ResizeTo"),
EmbedderMsg::Prompt(..) => write!(f, "Prompt"),
EmbedderMsg::AllowUnload(..) => write!(f, "AllowUnload"),
EmbedderMsg::AllowNavigationRequest(..) => write!(f, "AllowNavigationRequest"),
EmbedderMsg::Keyboard(..) => write!(f, "Keyboard"),
EmbedderMsg::GetClipboardContents(..) => write!(f, "GetClipboardContents"),
EmbedderMsg::SetClipboardContents(..) => write!(f, "SetClipboardContents"),
EmbedderMsg::SetCursor(..) => write!(f, "SetCursor"),
EmbedderMsg::NewFavicon(..) => write!(f, "NewFavicon"),
EmbedderMsg::HeadParsed => write!(f, "HeadParsed"),
EmbedderMsg::CloseBrowser => write!(f, "CloseBrowser"),
EmbedderMsg::HistoryChanged(..) => write!(f, "HistoryChanged"),
EmbedderMsg::SetFullscreenState(..) => write!(f, "SetFullscreenState"),
EmbedderMsg::LoadStart => write!(f, "LoadStart"),
EmbedderMsg::LoadComplete => write!(f, "LoadComplete"),
EmbedderMsg::Panic(..) => write!(f, "Panic"),
EmbedderMsg::GetSelectedBluetoothDevice(..) => write!(f, "GetSelectedBluetoothDevice"),
EmbedderMsg::SelectFiles(..) => write!(f, "SelectFiles"),
EmbedderMsg::PromptPermission(..) => write!(f, "PromptPermission"),
EmbedderMsg::ShowIME(..) => write!(f, "ShowIME"),
EmbedderMsg::HideIME => write!(f, "HideIME"),
EmbedderMsg::Shutdown => write!(f, "Shutdown"),
EmbedderMsg::AllowOpeningBrowser(..) => write!(f, "AllowOpeningBrowser"),
EmbedderMsg::BrowserCreated(..) => write!(f, "BrowserCreated"),
EmbedderMsg::ReportProfile(..) => write!(f, "ReportProfile"),
EmbedderMsg::MediaSessionEvent(..) => write!(f, "MediaSessionEvent"),
EmbedderMsg::OnDevtoolsStarted(..) => write!(f, "OnDevtoolsStarted"),
EmbedderMsg::ShowContextMenu(..) => write!(f, "ShowContextMenu"),
}
}
}
/// Filter for file selection;
/// the `String` content is expected to be extension (e.g, "doc", without the prefixing ".")
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct FilterPattern(pub String);
/// https://w3c.github.io/mediasession/#mediametadata
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct MediaMetadata {
/// Title
pub title: String,
/// Artist
pub artist: String,
/// Album
pub album: String,
}
impl MediaMetadata {
pub fn new(title: String) -> Self {
Self {
title,
artist: "".to_owned(),
album: "".to_owned(),
}
}
}
/// https://w3c.github.io/mediasession/#enumdef-mediasessionplaybackstate
#[repr(i32)]
#[derive(Clone, Debug, Deserialize, Serialize)]
pub enum MediaSessionPlaybackState {
/// The browsing context does not specify whether it’s playing or paused.
None_ = 1,
/// The browsing context is currently playing media and it can be paused.
Playing,
/// The browsing context has paused media and it can be resumed.
Paused,
}
/// https://w3c.github.io/mediasession/#dictdef-mediapositionstate
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct MediaPositionState {
pub duration: f64,
pub playback_rate: f64,
pub position: f64,
}
impl MediaPositionState {
pub fn new(duration: f64, playback_rate: f64, position: f64) -> Self {
Self {
duration,
playback_rate,
position,
}
}
}
/// Type of events sent from script to the embedder about the media session.
#[derive(Clone, Debug, Deserialize, Serialize)]
pub enum MediaSessionEvent {
/// Indicates that the media metadata is available.
SetMetadata(MediaMetadata),
/// Indicates that the playback state has changed.
PlaybackStateChange(MediaSessionPlaybackState),
/// Indicates that the position state is set.
SetPositionState(MediaPositionState),
}
/// Enum with variants that match the DOM PermissionName enum
#[derive(Clone, Debug, Deserialize, Serialize)]
pub enum PermissionName {
Geolocation,
Notifications,
Push,
Midi,
Camera,
Microphone,
Speaker,
DeviceInfo,
BackgroundSync,
Bluetooth,
PersistentStorage,
}
/// Information required to display a permission prompt
#[derive(Clone, Debug, Deserialize, Serialize)]
pub enum PermissionPrompt {
Insecure(PermissionName),
Request(PermissionName),
}
/// Status for prompting user for permission.
#[derive(Clone, Debug, Deserialize, Serialize)]
pub enum PermissionRequest {
Granted,
Denied,
}<|fim▁end|> | /// Sends messages to the embedder.
pub struct EmbedderProxy {
pub sender: Sender<(Option<TopLevelBrowsingContextId>, EmbedderMsg)>,
pub event_loop_waker: Box<dyn EventLoopWaker>, |
<|file_name|>keys.py<|end_file_name|><|fim▁begin|>Alchemy sentiment analysis: fb12d2c55fff36e1e268584e261b6b010b37279f
<|fim▁hole|><|fim▁end|> |
Africa Is Talking: 676dbd926bbb04fa69ce90ee81d3f5ffee2692aaf80eb5793bd70fe93e77dc2e |
<|file_name|>jquery.phantomjs.fix.js<|end_file_name|><|fim▁begin|>jQuery.each(("blur focus focusin focusout click dblclick " +
"mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave").split(" "),
function(i, name) {
jQuery.fn[name] = function() {
var el = this[0];
var ev = document.createEvent('MouseEvent');
ev.initMouseEvent(
name,
true /* bubble */, true /* cancelable */,
window, null,
0, 0, 0, 0, /* coordinates */
false, false, false, false, /* modifier keys */
0 /*left*/, null
);
el.dispatchEvent(ev);<|fim▁hole|>} );<|fim▁end|> | }; |
<|file_name|>file_path_watcher_util.cc<|end_file_name|><|fim▁begin|>// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/media_galleries/fileapi/file_path_watcher_util.h"
#include "base/bind.h"
#include "base/callback.h"
#include "base/location.h"
#include "base/logging.h"
#include "chrome/browser/media_galleries/fileapi/media_file_system_backend.h"
#include "content/public/browser/browser_thread.h"
namespace {
// Bounces |path| and |error| to |callback| from the FILE thread to the media
// task runner.
void OnFilePathChangedOnFileThread(
const base::FilePathWatcher::Callback& callback,
const base::FilePath& path,
bool error) {
DCHECK(content::BrowserThread::CurrentlyOn(content::BrowserThread::FILE));
MediaFileSystemBackend::MediaTaskRunner()->PostTask(
FROM_HERE, base::Bind(callback, path, error));
}
// The watch has to be started on the FILE thread, and the callback called by
// the FilePathWatcher also needs to run on the FILE thread.
void StartFilePathWatchOnFileThread(<|fim▁hole|> const base::FilePath& path,
const FileWatchStartedCallback& watch_started_callback,
const base::FilePathWatcher::Callback& path_changed_callback) {
DCHECK(content::BrowserThread::CurrentlyOn(content::BrowserThread::FILE));
// The watcher is created on the FILE thread because it is very difficult
// to safely pass an already-created file watcher to a different thread.
scoped_ptr<base::FilePathWatcher> watcher(new base::FilePathWatcher);
bool success = watcher->Watch(
path,
false /* recursive */,
base::Bind(&OnFilePathChangedOnFileThread, path_changed_callback));
if (!success)
LOG(ERROR) << "Adding watch for " << path.value() << " failed";
MediaFileSystemBackend::MediaTaskRunner()->PostTask(
FROM_HERE, base::Bind(watch_started_callback, base::Passed(&watcher)));
}
} // namespace
void StartFilePathWatchOnMediaTaskRunner(
const base::FilePath& path,
const FileWatchStartedCallback& watch_started_callback,
const base::FilePathWatcher::Callback& path_changed_callback) {
DCHECK(MediaFileSystemBackend::CurrentlyOnMediaTaskRunnerThread());
content::BrowserThread::PostTask(content::BrowserThread::FILE,
FROM_HERE,
base::Bind(&StartFilePathWatchOnFileThread,
path,
watch_started_callback,
path_changed_callback));
}<|fim▁end|> | |
<|file_name|>login.py<|end_file_name|><|fim▁begin|>from flask_wtf import Form
from wtforms import StringField, PasswordField, SubmitField
from wtforms.validators import DataRequired
<|fim▁hole|> submit = SubmitField('Log In', validators=[DataRequired()])<|fim▁end|> | class LoginForm(Form):
username = StringField('ID or Email Address', validators=[DataRequired()])
password = PasswordField('Password', validators=[DataRequired()])
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub use self::{
bit_board::BitBoard,
board::Board,
player::{AiPlayer, PlayerKind},
};
mod bit_board;
mod board;
mod multi_direction;
mod player;
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct Point(pub u32, pub u32);
impl Point {
fn from_offset(off: u32, size: Size) -> Point {
Point(off % size.0, off / size.0)
}
fn offset(self, size: Size) -> u32 {
self.0 + size.0 * self.1
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct Size(pub u32, pub u32);
pub const MIN_SIZE: u32 = 2;
pub const MAX_SIZE: u32 = 8;
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum Side {
Black,
White,
}
<|fim▁hole|> Side::White => Side::Black,
}
}
}<|fim▁end|> | impl Side {
pub fn flip(self) -> Side {
match self {
Side::Black => Side::White, |
<|file_name|>nzbToGamez.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2
#
##############################################################################
### NZBGET POST-PROCESSING SCRIPT ###
# Post-Process to CouchPotato, SickBeard, NzbDrone, Mylar, Gamez, HeadPhones.
#
# This script sends the download to your automated media management servers.
#
# NOTE: This script requires Python to be installed on your system.
##############################################################################
#
### OPTIONS ###
## General
# Auto Update nzbToMedia (0, 1).
#
# Set to 1 if you want nzbToMedia to automatically check for and update to the latest version
#auto_update=0
# Safe Mode protection of DestDir (0, 1).
#
# Enable/Disable a safety check to ensure we don't process all downloads in the default_downloadDirectory by mistake.
#safe_mode=1
## Gamez
# Gamez script category.
#
# category that gets called for post-processing with Gamez.
#gzCategory=games
# Gamez api key.
#gzapikey=
# Gamez host.
#
# The ipaddress for your Gamez server. e.g For the Same system use localhost or 127.0.0.1
#gzhost=localhost
# Gamez port.
#gzport=8085
# Gamez uses ssl (0, 1).
#
# Set to 1 if using ssl, else set to 0.
#gzssl=0
# Gamez library
#
# move downloaded games here.
#gzlibrary
# Gamez web_root
#
# set this if using a reverse proxy.
#gzweb_root=
# Gamez watch directory.
#
# set this to where your Gamez completed downloads are.
#gzwatch_dir=
## Posix
# Niceness for external tasks Extractor and Transcoder.
#
# Set the Niceness value for the nice command. These range from -20 (most favorable to the process) to 19 (least favorable to the process).
#niceness=10
# ionice scheduling class (0, 1, 2, 3).
#
# Set the ionice scheduling class. 0 for none, 1 for real time, 2 for best-effort, 3 for idle.
#ionice_class=2
# ionice scheduling class data.
#
# Set the ionice scheduling class data. This defines the class data, if the class accepts an argument. For real time and best-effort, 0-7 is valid data.
#ionice_classdata=4
## WakeOnLan
# use WOL (0, 1).
#
# set to 1 to send WOL broadcast to the mac and test the server (e.g. xbmc) on the host and port specified.
#wolwake=0
# WOL MAC
#
# enter the mac address of the system to be woken.
#wolmac=00:01:2e:2D:64:e1
# Set the Host and Port of a server to verify system has woken.
#wolhost=192.168.1.37
#wolport=80
### NZBGET POST-PROCESSING SCRIPT ###
##############################################################################
import sys
import nzbToMedia
<|fim▁hole|>section = "Gamez"
result = nzbToMedia.main(sys.argv, section)
sys.exit(result)<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""<|fim▁hole|>"""<|fim▁end|> | Created on Wed Mar 2 12:13:32 2016
@author: Zahari Kassabov |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>/* Copyright (C) 2017 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
extern crate libc;
extern crate nom;
use nom::{digit};
use std::str;
use std;
use std::str::FromStr;
use log::*;
// We transform an integer string into a i64, ignoring surrounding whitespaces
// We look for a digit suite, and try to convert it.
// If either str::from_utf8 or FromStr::from_str fail,
// we fallback to the parens parser defined above
named!(getu16<u16>,
map_res!(
map_res!(
ws!(digit),
str::from_utf8
),
FromStr::from_str
)
);
// 227 Entering Passive Mode (212,27,32,66,221,243).
named!(pub ftp_pasv_response<u16>,
do_parse!(
tag!("227") >>
take_until_and_consume!("(") >><|fim▁hole|> part2: getu16 >>
alt! (tag!(").") | tag!(")")) >>
(
part1 * 256 + part2
)
)
);
#[no_mangle]
pub extern "C" fn rs_ftp_pasv_response(input: *const libc::uint8_t, len: libc::uint32_t) -> u16 {
let buf = unsafe{std::slice::from_raw_parts(input, len as usize)};
match ftp_pasv_response(buf) {
nom::IResult::Done(_, dport) => {
return dport;
}
nom::IResult::Incomplete(_) => {
let buf = unsafe{std::slice::from_raw_parts(input, len as usize)};
SCLogDebug!("pasv incomplete: '{:?}'", String::from_utf8_lossy(buf));
},
nom::IResult::Error(_) => {
let buf = unsafe{std::slice::from_raw_parts(input, len as usize)};
SCLogDebug!("pasv error on '{:?}'", String::from_utf8_lossy(buf));
},
}
return 0;
}
// 229 Entering Extended Passive Mode (|||48758|).
named!(pub ftp_epsv_response<u16>,
do_parse!(
tag!("229") >>
take_until_and_consume!("|||") >>
port: getu16 >>
alt! (tag!("|).") | tag!("|)")) >>
(
port
)
)
);
#[no_mangle]
pub extern "C" fn rs_ftp_epsv_response(input: *const libc::uint8_t, len: libc::uint32_t) -> u16 {
let buf = unsafe{std::slice::from_raw_parts(input, len as usize)};
match ftp_epsv_response(buf) {
nom::IResult::Done(_, dport) => {
return dport;
},
nom::IResult::Incomplete(_) => {
let buf = unsafe{std::slice::from_raw_parts(input, len as usize)};
SCLogDebug!("epsv incomplete: '{:?}'", String::from_utf8_lossy(buf));
},
nom::IResult::Error(_) => {
let buf = unsafe{std::slice::from_raw_parts(input, len as usize)};
SCLogDebug!("epsv incomplete: '{:?}'", String::from_utf8_lossy(buf));
},
}
return 0;
}<|fim▁end|> | digit >> tag!(",") >> digit >> tag!(",") >>
digit >> tag!(",") >> digit >> tag!(",") >>
part1: getu16 >>
tag!(",") >> |
<|file_name|>mx_test.go<|end_file_name|><|fim▁begin|>package bsw
import (
"testing"
)
func TestMX(t *testing.T) {
_, results, err := MX("stacktitan.com", "8.8.8.8")
if err != nil {
t.Error("error returned from MX")
t.Log(err)
}
found := false
for _, r := range results {
if r.Hostname == "mx1.emailsrvr.com" {
found = true
}
}
if !found {
t.Error("MX did not find correct mx server")
t.Log(results)
}<|fim▁hole|>}<|fim▁end|> | |
<|file_name|>php.js<|end_file_name|><|fim▁begin|>define(["exports"], function (_exports) {
"use strict";
Object.defineProperty(_exports, "__esModule", {
value: true
});
_exports.phpLang = phpLang;
function phpLang(hljs) {
var VARIABLE = {
begin: "\\$+[a-zA-Z_\x7f-\xff][a-zA-Z0-9_\x7f-\xff]*"
};
var PREPROCESSOR = {
className: "meta",
begin: /<\?(php)?|\?>/
};
var STRING = {
className: "string",
contains: [hljs.BACKSLASH_ESCAPE, PREPROCESSOR],
variants: [{
begin: 'b"',
end: '"'
}, {
begin: "b'",
end: "'"
}, hljs.inherit(hljs.APOS_STRING_MODE, {
illegal: null
}), hljs.inherit(hljs.QUOTE_STRING_MODE, {
illegal: null
})]
};
var NUMBER = {
variants: [hljs.BINARY_NUMBER_MODE, hljs.C_NUMBER_MODE]
};
return {
aliases: ["php", "php3", "php4", "php5", "php6", "php7"],
case_insensitive: true,
keywords: "and include_once list abstract global private echo interface as static endswitch " + "array null if endwhile or const for endforeach self var while isset public " + "protected exit foreach throw elseif include __FILE__ empty require_once do xor " + "return parent clone use __CLASS__ __LINE__ else break print eval new " + "catch __METHOD__ case exception default die require __FUNCTION__ " + "enddeclare final try switch continue endfor endif declare unset true false " + "trait goto instanceof insteadof __DIR__ __NAMESPACE__ " + "yield finally",
contains: [hljs.HASH_COMMENT_MODE, hljs.COMMENT("//", "$", {
contains: [PREPROCESSOR]
}), hljs.COMMENT("/\\*", "\\*/", {
contains: [{
className: "doctag",
begin: "@[A-Za-z]+"
}]
}), hljs.COMMENT("__halt_compiler.+?;", false, {
endsWithParent: true,
keywords: "__halt_compiler",
lexemes: hljs.UNDERSCORE_IDENT_RE
}), {
className: "string",
begin: /<<<['"]?\w+['"]?$/,
end: /^\w+;?$/,
contains: [hljs.BACKSLASH_ESCAPE, {
className: "subst",
variants: [{
begin: /\$\w+/
}, {
begin: /\{\$/,
end: /\}/
}]
}]
}, PREPROCESSOR, {
className: "keyword",
begin: /\$this\b/
}, VARIABLE, {
// swallow composed identifiers to avoid parsing them as keywords
begin: /(::|->)+[a-zA-Z_\x7f-\xff][a-zA-Z0-9_\x7f-\xff]*/
}, {
className: "function",
beginKeywords: "function",
end: /[;{]/,<|fim▁hole|> begin: "\\(",
end: "\\)",
contains: ["self", VARIABLE, hljs.C_BLOCK_COMMENT_MODE, STRING, NUMBER]
}]
}, {
className: "class",
beginKeywords: "class interface",
end: "{",
excludeEnd: true,
illegal: /[:\(\$"]/,
contains: [{
beginKeywords: "extends implements"
}, hljs.UNDERSCORE_TITLE_MODE]
}, {
beginKeywords: "namespace",
end: ";",
illegal: /[\.']/,
contains: [hljs.UNDERSCORE_TITLE_MODE]
}, {
beginKeywords: "use",
end: ";",
contains: [hljs.UNDERSCORE_TITLE_MODE]
}, {
begin: "=>" // No markup, just a relevance booster
}, STRING, NUMBER]
};
}
});<|fim▁end|> | excludeEnd: true,
illegal: "\\$|\\[|%",
contains: [hljs.UNDERSCORE_TITLE_MODE, {
className: "params", |
<|file_name|>inter.js<|end_file_name|><|fim▁begin|>'use strict';
import Maths from './maths.js'<|fim▁hole|><|fim▁end|> | import FSM from './fsm.js'
import { Animation, Interpolation } from './animation.js'
export { Maths, FSM, Interpolation, Animation } |
<|file_name|>fail_if_no_c.py<|end_file_name|><|fim▁begin|># Copyright 2009-2014 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Fail if the C extension module doesn't exist.<|fim▁hole|>
Only really intended to be used by internal build scripts.
"""
import sys
sys.path[0:0] = [""]
import bson
import pymongo
if not pymongo.has_c() or not bson.has_c():
sys.exit("could not load C extensions")<|fim▁end|> | |
<|file_name|>InputReader.cpp<|end_file_name|><|fim▁begin|>#include "stdafx.h"
#include "InputReader.h"
#include "graph.h"
#include <iostream>
#include <string>
using namespace std;
InputReader::InputReader(const char* fileName, const char* stFileName)
{
in.open(fileName);
if(!in.is_open())
{
cout<<"Input file "<<fileName<<" doesn't exist!"<<endl;
exit(1);
}
stIn.open(stFileName);<|fim▁hole|> {
cout<<"Input file "<<stFileName<<" doesn't exist!"<<endl;
in.close();
exit(1);
}
}
InputReader::~InputReader()
{
in.close();
stIn.close();
}
void InputReader::ReadFirstLine()
{
in>>strLine;
assert(strLine[0] == 'g');
in>>strLine;
assert(strLine[0] == '#');
in>> gId;
}
bool InputReader::ReadGraph(Graph &g)
{
ReadFirstLine();
if(gId == 0)
{
return false; /*ÒѾûÓÐͼ*/
}
g.nId = gId;
in>>strLine;
assert(strLine[0] == 's'); /*¶ÁÈ¡¶¥µãÊýºÍ±ßÊý*/
in>>g.nV>>g.nE;
assert(g.nV < MAX); /*·ÀÖ¹ÁÚ½Ó¾ØÕó´óС²»¹»ÓÃ*/
/*ÏÂÃæ¶ÁÈ¡±ßµÄÐÅÏ¢*/
int u,v; /*u,vÊÇÒ»Ìõ±ßµÄÁ½¸ö¶¥µã*/
for(int i = 1; i <= g.nE; i++)
{
in>>strLine;
assert(strLine[0] == 'e');
in>>u>>v; /*×¢ÒâÕâ¸ö²»ÄÜÓëÏÂÃæµÄдµ½Ò»Æð*/
in>>g.matrix[u][v].iC>>g.matrix[u][v].dP>>g.matrix[u][v].iLabel;
}
return true;
}
void InputReader::ReadSourceSink(int &s, int &t)
{
stIn>>s>>t;
}<|fim▁end|> | if(!stIn.is_open()) |
<|file_name|>IPCFutexPageQueue.cpp<|end_file_name|><|fim▁begin|>#include "IPCFutexPageQueue.h"
#include "IPCCheck.h"
#include "IPCException.h"
#include "IPCLog.h"
#include "IPCType.h"
#include "futex.h"
#include <errno.h>
#include <sched.h>
#include <string.h>
#include <sys/mman.h>
#include <time.h>
#include <unistd.h>
IPCFutexPageQueue::IPCFutexPageQueue(void* sharedMemory, size_t s, size_t id)
: m_currentWrite(id)
, m_currentRead(id ^ 1)
, m_pageSize(s / m_pagesCount)
, m_sharedMemory(sharedMemory)
, m_tid(gettid())
{
IPC_DCHECK(s == ipc_size);
IPC_LOGD("id: %zu", id);
for (int i = m_currentWrite; i < m_pagesCount; i += 2) {
uint32_t* data = static_cast<uint32_t*>(getPage(i));
data[1] |= m_finishTag;
}
lock(m_currentWrite, true);
}
IPCFutexPageQueue::~IPCFutexPageQueue()
{<|fim▁hole|> uint32_t* data = static_cast<uint32_t*>(getPage(m_currentWrite));
data[1] = sizeof(uint32_t) * 2;
data[2] = MSG_TERMINATE;
data[3] = static_cast<uint32_t>(IPCType::END);
try {
unlock(m_currentWrite);
} catch (IPCException& e) {
IPC_LOGE("%s", e.msg());
}
munmap(m_sharedMemory, m_pageSize << 2);
}
void IPCFutexPageQueue::stepWrite()
{
IPC_LOGD("stepWrite");
clearFinishedTag();
size_t current = m_currentWrite;
m_currentWrite = step(m_currentWrite);
lock(m_currentWrite, true);
unlock(current);
}
void IPCFutexPageQueue::unlock(size_t id)
{
IPC_LOGD("unlock: %zu", id);
volatile uint32_t* pageStart = static_cast<volatile uint32_t*>(getPage(id));
uint32_t l = m_tid;
if (__atomic_compare_exchange_n(pageStart, &l, 0,
false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST)) {
return;
}
if ((l & FUTEX_WAITERS) != 0) {
int futexReturn = __futex(pageStart, FUTEX_UNLOCK_PI, 0, nullptr);
if (futexReturn == -1) {
throw IPCException("failed to futex unlock %s", strerror(errno));
}
return;
} else if ((l & FUTEX_TID_MASK) != m_tid) {
throw IPCException("l is not equal to tid: %d %d", l, m_tid);
}
throw IPCException("expected lock value");
}
void IPCFutexPageQueue::lock(size_t id, bool checkFinish)
{
volatile uint32_t* pageStart = static_cast<volatile uint32_t*>(getPage(id));
uint32_t l = m_tid;
uint32_t expected = 0;
IPC_LOGD("lock: %zu", id);
// wait for the finished tag;
if (checkFinish) {
while (true) {
uint32_t finishedTag = pageStart[1];
if (finishedTag == m_finishTag) {
break;
}
struct timespec waitTime = { m_timeoutSec, 0 };
int futexReturn = __futex(pageStart + 1, FUTEX_WAIT, 0, &waitTime);
if (futexReturn == -1) {
int myerrno = errno;
if (myerrno == EINTR)
continue;
else if (myerrno == ETIMEDOUT) {
throw IPCException("IPCFutexPageQueue::lock timeout when waiting for finished");
} else if (myerrno == EAGAIN) {
// become m_finishTag when enter the linux kernel.
break;
} else {
throw IPCException("IPCException::lock waiting for finished tag %u: %s", pageStart[1], strerror(myerrno));
}
}
break;
}
}
if (__atomic_compare_exchange_n(pageStart, &expected, l,
false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST)) {
return;
}
errno = 0;
while (true) {
int futexReturn = __futex(pageStart, FUTEX_LOCK_PI, 1, nullptr);
if (futexReturn == -1 && errno != EINTR) {
volatile uint32_t* pageStart0 = static_cast<volatile uint32_t*>(getPage(0));
volatile uint32_t* pageStart1 = static_cast<volatile uint32_t*>(getPage(1));
volatile uint32_t* pageStart2 = static_cast<volatile uint32_t*>(getPage(2));
volatile uint32_t* pageStart3 = static_cast<volatile uint32_t*>(getPage(3));
throw IPCException("futex lock pi failed: %s, %x %x (%x %x %x %x)", strerror(errno), *pageStart, m_tid, *pageStart0, *pageStart1, *pageStart2, *pageStart3);
}
if (futexReturn == -1)
continue;
break;
}
l = *pageStart;
if ((l & FUTEX_OWNER_DIED)) {
unlock(id);
throw IPCException("original owner has die");
}
}
void* IPCFutexPageQueue::getPage(size_t id)
{
size_t offset = id * m_pageSize;
return static_cast<char*>(m_sharedMemory) + offset;
}
void IPCFutexPageQueue::lockReadPage()
{
IPC_LOGD("lockReadPage");
uint32_t* pageStart = static_cast<uint32_t*>(getPage(m_currentRead));
if (!*pageStart) {
// this page should be locked.
}
lock(m_currentRead, false);
}
void IPCFutexPageQueue::unlockReadPageAndStep()
{
IPC_LOGD("unlockReadPageAndStep");
setFinishedTag();
unlock(m_currentRead);
m_currentRead = step(m_currentRead);
}
void IPCFutexPageQueue::spinWaitPeer()
{
volatile uint32_t* pageStart = static_cast<volatile uint32_t*>(getPage(m_currentRead));
struct timespec start;
clock_gettime(CLOCK_MONOTONIC, &start);
while (!*pageStart) {
sched_yield();
struct timespec now;
clock_gettime(CLOCK_MONOTONIC, &now);
if ((now.tv_sec - start.tv_sec) > m_timeoutSec)
throw IPCException("spinWaitPeer timeout");
}
}
void IPCFutexPageQueue::setFinishedTag()
{
uint32_t* pageStart = static_cast<uint32_t*>(getPage(m_currentRead));
uint32_t expected = 0;
if (__atomic_compare_exchange_n(pageStart + 1, &expected, m_finishTag,
false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST)) {
IPC_LOGD("setFinishedTag:waking writer");
__futex(pageStart + 1, FUTEX_WAKE, 1, nullptr);
return;
} else {
IPC_LOGD("setFinishedTag unexpected value: %u", expected);
}
}
void IPCFutexPageQueue::clearFinishedTag()
{
uint32_t* pageRead = static_cast<uint32_t*>(getPage(m_currentWrite));
pageRead[1] = 0;
__atomic_thread_fence(__ATOMIC_SEQ_CST);
}<|fim▁end|> | // build a terminate msg. |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![crate_id = "playgroundrs"]
#![crate_type = "lib"]
extern crate std;
extern crate core;
extern crate rand;
pub mod heap;
pub mod util;
pub mod sort;<|fim▁hole|><|fim▁end|> | pub mod test; |
<|file_name|>SwingConsole.java<|end_file_name|><|fim▁begin|>package toncc;
import javax.swing.*;
/** Utility class to run JFrame-based GUI classes.
*
* @author Bruce Eckel, Giacomo Parolini
*/
class SwingConsole {
private static void prepare(final JFrame f) {
f.setTitle(f.getClass().getSimpleName());
f.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
}
public static void run(final JFrame f,final int width,final int height) {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
prepare(f);
f.setSize(width,height);
f.setVisible(true);
}
});
}
/** Don't manually set size, but pack. */
public static void run(final JFrame f) {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
prepare(f);
f.pack();
f.setVisible(true);
}
});
}
/** Don't manually set size, but pack. */
public static void run(final JFrame f,final String title) {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
f.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
f.setTitle(title);
f.pack();
f.setVisible(true);
}
});
}
public static void run(final JFrame f,final int width,final int height,final String title) {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
f.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
f.setTitle(title);
f.setSize(width,height);
f.setVisible(true);
}
});
}
public static void runFullScreen(final JFrame f) {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
prepare(f);
f.setExtendedState(JFrame.MAXIMIZED_BOTH);
f.setVisible(true);
}
});
}<|fim▁hole|>
public static void setSystemLookAndFeel() {
try {
// Set system L&F
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
} catch(Exception ee) {
System.err.println("Caught exception while setting LookAndFeel: "+ee);
}
}
}<|fim▁end|> | |
<|file_name|>connection.js<|end_file_name|><|fim▁begin|>const EventEmitter = require("events");
const ssh2 = require("ssh2");
const Client = ssh2.Client;
class Connection extends EventEmitter {
constructor (connectOptions, dimensions) {
this.connectOptions = connectOptions;
this.dimensions = dimensions;
this.errorCallback = errorCallback;
if (!this.errorCallback) {
this.errorCallback = function () {};
}
this.stream = null;
this.connection = new Client();
}
ready () {
this.emit("info", "Client is ready");
connection.shell({
cols: this.dimensions.cols,
rows: this.dimensions.rows
}, this.onConnect);
}
connect (connectOptions = null) {
if (connectOptions)
this.connectOptions = connectOptions;
this.connection.connect(this.connectOptions);
}
resize (cols, rows) {
if (this.stream) {
// Height and width are set to the defaults because their values don't seem to actually matter
this.stream.setWindow(rows, cols, 480, 640);
}<|fim▁hole|> else {
this.emit("info", "Connection not established! (resize)");
console.warn("Connection not established! (resize)");
}
}
write (data) {
if (this.stream !== null) {
this.stream.write(data);
}
else {
this.emit("info", "Connection not established! (write)");
console.warn("Connection not established! (write)");
}
}
// Events
onConnect (err, stream) {
if (err)
return errorCallback();
this.emit("info", "Connection successful");
this.stream = stream;
stream
.on("close", this.onClose.bind(this))
.on("data", this.onData.bind(this))
.stderr.on("data", onErrorData.bind(this));
}
onData (data) {
this.emit("data", data);
this.emit("stdout", data);
}
onErrorData (data) {
this.emit("data", data);
this.emit("stderr", data);
}
onClose () {
this.emit("info", "Connection stream closed");
this.connection.end();
this.stream = null;
}
}
module.exports = Connection;<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2016-2018 Bruce Stenning. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// 1. Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// 3. Neither the name of the copyright holder nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
// COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,<|fim▁hole|>// AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
// DAMAGE.
pub mod renderer;
pub mod renderergl;
#[macro_use]
pub mod renderervk;
pub mod shader;
pub mod shaderglsl;
pub mod shaderspirv;
pub mod texture;
pub mod texturegl;
pub mod texturevk;
pub mod rendertarget;
pub mod rendertargetgl;
pub mod rendertargetvk;
pub mod uniformbuffer;
pub mod uniformbuffergl;
pub mod uniformbuffervk;
pub mod descriptorset;
pub mod descriptorsetgl;
pub mod descriptorsetvk;
pub mod image;
pub mod resources;<|fim▁end|> | // BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
// OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright 2015, 2016 Parity Technologies (UK) Ltd.
// This file is part of Parity.<|fim▁hole|>// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Database migrations.
pub mod state;
pub mod blocks;
pub mod extras;
mod v9;
pub use self::v9::ToV9;
pub use self::v9::Extract;
mod v10;
pub use self::v10::ToV10;<|fim▁end|> |
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by |
<|file_name|>dashboard-view.js<|end_file_name|><|fim▁begin|>import '@polymer/iron-icon/iron-icon.js';
import '@polymer/iron-icons/iron-icons.js';
import '@polymer/paper-card/paper-card.js';
import '@polymer/paper-ripple/paper-ripple.js';
import '@polymer/paper-item/paper-icon-item.js';
import '@polymer/paper-icon-button/paper-icon-button.js';
import {html, PolymerElement} from '@polymer/polymer';
import css from './dashboard-view.css';
import template from './dashboard-view.pug';
import './card-styles.js';
import './iframe-link.js';
import './notebooks-card.js';
import './pipelines-card.js';
import './resource-chart.js';
import {getGCPData} from './resources/cloud-platform-data.js';
import utilitiesMixin from './utilities-mixin.js';
export class DashboardView extends utilitiesMixin(PolymerElement) {
static get template() {
return html([`
<style include="card-styles"><|fim▁hole|> ${template()}
`]);
}
/**
* Object describing property-related metadata used by Polymer features
*/
static get properties() {
return {
documentationItems: Array,
quickLinks: Array,
namespace: {
type: Object,
observer: '_namespaceChanged',
},
platformDetails: Object,
platformInfo: {
type: Object,
observer: '_platformInfoChanged',
},
};
}
/**
* Observer for platformInfo property
*/
_platformInfoChanged() {
if (this.platformInfo && this.platformInfo.providerName === 'gce') {
this.platformName = 'GCP';
const pieces = this.platformInfo.provider.split('/');
let gcpProject = '';
if (pieces.length >= 3) {
gcpProject = pieces[2];
}
this.platformDetails = getGCPData(gcpProject);
}
}
/**
* Rewrites the links adding the namespace as a query parameter.
* @param {namespace} namespace
*/
_namespaceChanged(namespace) {
this.quickLinks.map((quickLink) => {
quickLink.link = this.buildHref(quickLink.link, {ns: namespace});
return quickLink;
});
// We need to deep-copy and re-assign in order to trigger the
// re-rendering of the component
this.quickLinks = JSON.parse(JSON.stringify(this.quickLinks));
}
}
customElements.define('dashboard-view', DashboardView);<|fim▁end|> | ${css.toString()}
</style> |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>
use system::syscall::sys_iopl;
use power::reset;
mod power;
fn main() {
unsafe { sys_iopl(3).unwrap() };
println!("Performing reset");
reset();
}<|fim▁end|> | extern crate system; |
<|file_name|>_rb.cpp<|end_file_name|><|fim▁begin|>///////////////////////////////////////////////////////////////////////////////
//
// File : $Id: _rb.cpp 27 2006-05-20 19:31:15Z mbabuskov $
// Subject : IBPP, internal RB class implementation
//
///////////////////////////////////////////////////////////////////////////////
//
// (C) Copyright 2000-2006 T.I.P. Group S.A. and the IBPP Team (www.ibpp.org)
//
// The contents of this file are subject to the IBPP License (the "License");
// you may not use this file except in compliance with the License. You may
// obtain a copy of the License at http://www.ibpp.org or in the 'license.txt'
// file which must have been distributed along with this file.
//
// This software, distributed under the License, is distributed on an "AS IS"
// basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
// License for the specific language governing rights and limitations
// under the License.
//
///////////////////////////////////////////////////////////////////////////////
//
// COMMENTS
// * RB == Result Block/Buffer, see Interbase 6.0 C-API
// * Tabulations should be set every four characters when editing this file.
//
///////////////////////////////////////////////////////////////////////////////
#ifdef _MSC_VER
#pragma warning(disable: 4786 4996)
#ifndef _DEBUG
#pragma warning(disable: 4702)
#endif
#endif
#include "_ibpp.h"
#ifdef HAS_HDRSTOP
#pragma hdrstop
#endif
using namespace ibpp_internals;
char* RB::FindToken(char token)
{
char* p = mBuffer;
while (*p != isc_info_end)
{
int len;
if (*p == token) return p;
len = (*gds.Call()->m_vax_integer)(p+1, 2);
p += (len + 3);
}
return 0;
}
char* RB::FindToken(char token, char subtoken)
{
char* p = mBuffer;
while (*p != isc_info_end)
{
int len;
if (*p == token)
{
// Found token, now find subtoken
int inlen = (*gds.Call()->m_vax_integer)(p+1, 2);
p += 3;
while (inlen > 0)
{
if (*p == subtoken) return p;
len = (*gds.Call()->m_vax_integer)(p+1, 2);
p += (len + 3);
inlen -= (len + 3);
}
return 0;
}
len = (*gds.Call()->m_vax_integer)(p+1, 2);
p += (len + 3);
}
return 0;
}
int RB::GetValue(char token)
{
int value;
int len;
char* p = FindToken(token);
if (p == 0)
throw LogicExceptionImpl("RB::GetValue", _("Token not found."));
len = (*gds.Call()->m_vax_integer)(p+1, 2);
if (len == 0) value = 0;
else value = (*gds.Call()->m_vax_integer)(p+3, (short)len);
return value;
}
int RB::GetCountValue(char token)
{
// Specifically used on tokens like isc_info_insert_count and the like
// which return detailed counts per relation. We sum up the values.
int value;
int len;
char* p = FindToken(token);
if (p == 0)
throw LogicExceptionImpl("RB::GetCountValue", _("Token not found."));
// len is the number of bytes in the following array
len = (*gds.Call()->m_vax_integer)(p+1, 2);
p += 3;
value = 0;
while (len > 0)
{
// Each array item is 6 bytes : 2 bytes for the relation_id which
// we skip, and 4 bytes for the count value which we sum up accross
// all tables.
value += (*gds.Call()->m_vax_integer)(p+2, 4);
p += 6;
len -= 6;
}
return value;
}
int RB::GetValue(char token, char subtoken)
{
int value;
int len;
char* p = FindToken(token, subtoken);
if (p == 0)
throw LogicExceptionImpl("RB::GetValue", _("Token/Subtoken not found."));
len = (*gds.Call()->m_vax_integer)(p+1, 2);
if (len == 0) value = 0;
else value = (*gds.Call()->m_vax_integer)(p+3, (short)len);
return value;
}
bool RB::GetBool(char token)
{
int value;
char* p = FindToken(token);
if (p == 0)
throw LogicExceptionImpl("RB::GetBool", _("Token not found."));
value = (*gds.Call()->m_vax_integer)(p+1, 4);
return value == 0 ? false : true;
}
int RB::GetString(char token, std::string& data)
{
int len;
char* p = FindToken(token);
if (p == 0)
throw LogicExceptionImpl("RB::GetString", _("Token not found."));
len = (*gds.Call()->m_vax_integer)(p+1, 2);<|fim▁hole|>
void RB::Reset()
{
delete [] mBuffer;
mBuffer = new char [mSize];
memset(mBuffer, 255, mSize);
}
RB::RB()
{
mSize = 1024;
mBuffer = new char [1024];
memset(mBuffer, 255, mSize);
}
RB::RB(int Size)
{
mSize = Size;
mBuffer = new char [Size];
memset(mBuffer, 255, mSize);
}
RB::~RB()
{
try { delete [] mBuffer; }
catch (...) { }
}
//
// EOF
//<|fim▁end|> | data = std::string(p+3, len);
return len;
} |
<|file_name|>callbacks.py<|end_file_name|><|fim▁begin|># Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras callbacks: utilities called at certain points during model training.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import deque
from collections import Iterable
from collections import OrderedDict
import csv
import json
import os
import time
import numpy as np
import six
from tensorflow.contrib.keras.python.keras import backend as K
from tensorflow.contrib.keras.python.keras.utils.generic_utils import Progbar
from tensorflow.contrib.tensorboard.plugins import projector
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.summary import summary as tf_summary
from tensorflow.python.training import saver as saver_lib
# pylint: disable=g-import-not-at-top
try:
import requests
except ImportError:
requests = None
# pylint: enable=g-import-not-at-top
class CallbackList(object):
"""Container abstracting a list of callbacks.
Arguments:
callbacks: List of `Callback` instances.
queue_length: Queue length for keeping
running statistics over callback execution time.
"""
def __init__(self, callbacks=None, queue_length=10):
callbacks = callbacks or []
self.callbacks = [c for c in callbacks]
self.queue_length = queue_length
def append(self, callback):
self.callbacks.append(callback)
def set_params(self, params):
for callback in self.callbacks:
callback.set_params(params)
def set_model(self, model):
for callback in self.callbacks:
callback.set_model(model)
def on_epoch_begin(self, epoch, logs=None):
"""Called at the start of an epoch.
Arguments:
epoch: integer, index of epoch.
logs: dictionary of logs.
"""
logs = logs or {}
for callback in self.callbacks:
callback.on_epoch_begin(epoch, logs)
self._delta_t_batch = 0.
self._delta_ts_batch_begin = deque([], maxlen=self.queue_length)
self._delta_ts_batch_end = deque([], maxlen=self.queue_length)
def on_epoch_end(self, epoch, logs=None):
"""Called at the end of an epoch.
Arguments:
epoch: integer, index of epoch.
logs: dictionary of logs.
"""
logs = logs or {}
for callback in self.callbacks:
callback.on_epoch_end(epoch, logs)
def on_batch_begin(self, batch, logs=None):
"""Called right before processing a batch.
Arguments:
batch: integer, index of batch within the current epoch.
logs: dictionary of logs.
"""
logs = logs or {}
t_before_callbacks = time.time()
for callback in self.callbacks:
callback.on_batch_begin(batch, logs)
self._delta_ts_batch_begin.append(time.time() - t_before_callbacks)
delta_t_median = np.median(self._delta_ts_batch_begin)
if (self._delta_t_batch > 0. and
delta_t_median > 0.95 * self._delta_t_batch and delta_t_median > 0.1):
logging.warning(
'Method on_batch_begin() is slow compared '
'to the batch update (%f). Check your callbacks.' % delta_t_median)
self._t_enter_batch = time.time()
def on_batch_end(self, batch, logs=None):
"""Called at the end of a batch.
Arguments:
batch: integer, index of batch within the current epoch.
logs: dictionary of logs.
"""
logs = logs or {}
if not hasattr(self, '_t_enter_batch'):
self._t_enter_batch = time.time()
self._delta_t_batch = time.time() - self._t_enter_batch
t_before_callbacks = time.time()
for callback in self.callbacks:
callback.on_batch_end(batch, logs)
self._delta_ts_batch_end.append(time.time() - t_before_callbacks)
delta_t_median = np.median(self._delta_ts_batch_end)
if (self._delta_t_batch > 0. and
(delta_t_median > 0.95 * self._delta_t_batch and delta_t_median > 0.1)):
logging.warning(
'Method on_batch_end() is slow compared '
'to the batch update (%f). Check your callbacks.' % delta_t_median)
def on_train_begin(self, logs=None):
"""Called at the beginning of training.
Arguments:
logs: dictionary of logs.
"""
logs = logs or {}
for callback in self.callbacks:
callback.on_train_begin(logs)
def on_train_end(self, logs=None):
"""Called at the end of training.
Arguments:
logs: dictionary of logs.
"""
logs = logs or {}
for callback in self.callbacks:
callback.on_train_end(logs)
def __iter__(self):
return iter(self.callbacks)
class Callback(object):
"""Abstract base class used to build new callbacks.
# Properties
params: dict. Training parameters
(eg. verbosity, batch size, number of epochs...).
model: instance of `keras.models.Model`.
Reference of the model being trained.
The `logs` dictionary that callback methods
take as argument will contain keys for quantities relevant to
the current batch or epoch.
Currently, the `.fit()` method of the `Sequential` model class
will include the following quantities in the `logs` that
it passes to its callbacks:
on_epoch_end: logs include `acc` and `loss`, and
optionally include `val_loss`
(if validation is enabled in `fit`), and `val_acc`
(if validation and accuracy monitoring are enabled).
on_batch_begin: logs include `size`,
the number of samples in the current batch.
on_batch_end: logs include `loss`, and optionally `acc`
(if accuracy monitoring is enabled).
"""
def __init__(self):
self.validation_data = None
def set_params(self, params):
self.params = params
def set_model(self, model):
self.model = model
def on_epoch_begin(self, epoch, logs=None):
pass
def on_epoch_end(self, epoch, logs=None):
pass
def on_batch_begin(self, batch, logs=None):
pass
def on_batch_end(self, batch, logs=None):
pass
def on_train_begin(self, logs=None):
pass
def on_train_end(self, logs=None):
pass
class BaseLogger(Callback):
"""Callback that accumulates epoch averages of metrics.
This callback is automatically applied to every Keras model.
"""
def on_epoch_begin(self, epoch, logs=None):
self.seen = 0
self.totals = {}
def on_batch_end(self, batch, logs=None):
logs = logs or {}
batch_size = logs.get('size', 0)
self.seen += batch_size
for k, v in logs.items():
if k in self.totals:
self.totals[k] += v * batch_size
else:
self.totals[k] = v * batch_size
def on_epoch_end(self, epoch, logs=None):
if logs is not None:
for k in self.params['metrics']:
if k in self.totals:
# Make value available to next callbacks.
logs[k] = self.totals[k] / self.seen
class TerminateOnNaN(Callback):
"""Callback that terminates training when a NaN loss is encountered."""
def __init__(self):
super(TerminateOnNaN, self).__init__()
def on_batch_end(self, batch, logs=None):
logs = logs or {}
loss = logs.get('loss')
if loss is not None:
if np.isnan(loss) or np.isinf(loss):
print('Batch %d: Invalid loss, terminating training' % (batch))
self.model.stop_training = True
class ProgbarLogger(Callback):
"""Callback that prints metrics to stdout.
Arguments:
count_mode: One of "steps" or "samples".
Whether the progress bar should
count samples seens or steps (batches) seen.
Raises:
ValueError: In case of invalid `count_mode`.
"""
def __init__(self, count_mode='samples'):
super(ProgbarLogger, self).__init__()
if count_mode == 'samples':
self.use_steps = False
elif count_mode == 'steps':
self.use_steps = True
else:
raise ValueError('Unknown `count_mode`: ' + str(count_mode))
def on_train_begin(self, logs=None):
self.verbose = self.params['verbose']
self.epochs = self.params['epochs']
def on_epoch_begin(self, epoch, logs=None):
if self.verbose:
print('Epoch %d/%d' % (epoch + 1, self.epochs))
if self.use_steps:
target = self.params['steps']
else:
target = self.params['samples']
self.target = target
self.progbar = Progbar(target=self.target, verbose=self.verbose)
self.seen = 0
def on_batch_begin(self, batch, logs=None):
if self.seen < self.target:
self.log_values = []
def on_batch_end(self, batch, logs=None):
logs = logs or {}
batch_size = logs.get('size', 0)
if self.use_steps:
self.seen += 1
else:
self.seen += batch_size
for k in self.params['metrics']:
if k in logs:
self.log_values.append((k, logs[k]))
# Skip progbar update for the last batch;
# will be handled by on_epoch_end.
if self.verbose and self.seen < self.target:
self.progbar.update(self.seen, self.log_values)
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}
for k in self.params['metrics']:
if k in logs:
self.log_values.append((k, logs[k]))
if self.verbose:
self.progbar.update(self.seen, self.log_values, force=True)
class History(Callback):
"""Callback that records events into a `History` object.
This callback is automatically applied to
every Keras model. The `History` object
gets returned by the `fit` method of models.
"""
def on_train_begin(self, logs=None):
self.epoch = []
self.history = {}
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}
self.epoch.append(epoch)
for k, v in logs.items():
self.history.setdefault(k, []).append(v)
class ModelCheckpoint(Callback):
"""Save the model after every epoch.
`filepath` can contain named formatting options,
which will be filled the value of `epoch` and
keys in `logs` (passed in `on_epoch_end`).
For example: if `filepath` is `weights.{epoch:02d}-{val_loss:.2f}.hdf5`,
then the model checkpoints will be saved with the epoch number and
the validation loss in the filename.
Arguments:
filepath: string, path to save the model file.
monitor: quantity to monitor.
verbose: verbosity mode, 0 or 1.
save_best_only: if `save_best_only=True`,
the latest best model according to
the quantity monitored will not be overwritten.
mode: one of {auto, min, max}.
If `save_best_only=True`, the decision
to overwrite the current save file is made
based on either the maximization or the
minimization of the monitored quantity. For `val_acc`,
this should be `max`, for `val_loss` this should
be `min`, etc. In `auto` mode, the direction is
automatically inferred from the name of the monitored quantity.
save_weights_only: if True, then only the model's weights will be
saved (`model.save_weights(filepath)`), else the full model
is saved (`model.save(filepath)`).
period: Interval (number of epochs) between checkpoints.
"""
def __init__(self,
filepath,
monitor='val_loss',
verbose=0,
save_best_only=False,
save_weights_only=False,
mode='auto',
period=1):
super(ModelCheckpoint, self).__init__()
self.monitor = monitor
self.verbose = verbose
self.filepath = filepath
self.save_best_only = save_best_only
self.save_weights_only = save_weights_only
self.period = period
self.epochs_since_last_save = 0
if mode not in ['auto', 'min', 'max']:
logging.warning('ModelCheckpoint mode %s is unknown, '
'fallback to auto mode.' % (mode))
mode = 'auto'
if mode == 'min':
self.monitor_op = np.less
self.best = np.Inf
elif mode == 'max':
self.monitor_op = np.greater
self.best = -np.Inf
else:
if 'acc' in self.monitor or self.monitor.startswith('fmeasure'):
self.monitor_op = np.greater
self.best = -np.Inf
else:
self.monitor_op = np.less
self.best = np.Inf
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}
self.epochs_since_last_save += 1
if self.epochs_since_last_save >= self.period:
self.epochs_since_last_save = 0
filepath = self.filepath.format(epoch=epoch, **logs)
if self.save_best_only:
current = logs.get(self.monitor)
if current is None:
logging.warning('Can save best model only with %s available, '
'skipping.' % (self.monitor))
else:
if self.monitor_op(current, self.best):
if self.verbose > 0:
print('Epoch %05d: %s improved from %0.5f to %0.5f,'
' saving model to %s' % (epoch, self.monitor, self.best,
current, filepath))
self.best = current
if self.save_weights_only:
self.model.save_weights(filepath, overwrite=True)
else:
self.model.save(filepath, overwrite=True)
else:
if self.verbose > 0:
print('Epoch %05d: %s did not improve' % (epoch, self.monitor))
else:
if self.verbose > 0:
print('Epoch %05d: saving model to %s' % (epoch, filepath))
if self.save_weights_only:
self.model.save_weights(filepath, overwrite=True)
else:
self.model.save(filepath, overwrite=True)
class EarlyStopping(Callback):
"""Stop training when a monitored quantity has stopped improving.
Arguments:
monitor: quantity to be monitored.
min_delta: minimum change in the monitored quantity
to qualify as an improvement, i.e. an absolute
change of less than min_delta, will count as no
improvement.
patience: number of epochs with no improvement
after which training will be stopped.
verbose: verbosity mode.
mode: one of {auto, min, max}. In `min` mode,
training will stop when the quantity
monitored has stopped decreasing; in `max`
mode it will stop when the quantity
monitored has stopped increasing; in `auto`
mode, the direction is automatically inferred
from the name of the monitored quantity.
"""
def __init__(self,
monitor='val_loss',
min_delta=0,
patience=0,
verbose=0,
mode='auto'):
super(EarlyStopping, self).__init__()
self.monitor = monitor
self.patience = patience
self.verbose = verbose
self.min_delta = min_delta
self.wait = 0
self.stopped_epoch = 0
if mode not in ['auto', 'min', 'max']:
logging.warning('EarlyStopping mode %s is unknown, '
'fallback to auto mode.' % (self.mode))
mode = 'auto'
if mode == 'min':
self.monitor_op = np.less
elif mode == 'max':
self.monitor_op = np.greater
else:
if 'acc' in self.monitor or self.monitor.startswith('fmeasure'):
self.monitor_op = np.greater
else:
self.monitor_op = np.less
if self.monitor_op == np.greater:
self.min_delta *= 1
else:
self.min_delta *= -1
def on_train_begin(self, logs=None):
# Allow instances to be re-used
self.wait = 0
self.stopped_epoch = 0
self.best = np.Inf if self.monitor_op == np.less else -np.Inf
def on_epoch_end(self, epoch, logs=None):
current = logs.get(self.monitor)
if current is None:
logging.warning('Early stopping requires %s available!' % (self.monitor))
if self.monitor_op(current - self.min_delta, self.best):
self.best = current
self.wait = 0
else:
if self.wait >= self.patience:
self.stopped_epoch = epoch
self.model.stop_training = True
self.wait += 1
def on_train_end(self, logs=None):
if self.stopped_epoch > 0 and self.verbose > 0:
print('Epoch %05d: early stopping' % (self.stopped_epoch))
class RemoteMonitor(Callback):
"""Callback used to stream events to a server.
Requires the `requests` library.
Events are sent to `root + '/publish/epoch/end/'` by default. Calls are
HTTP POST, with a `data` argument which is a
JSON-encoded dictionary of event data.
Arguments:
root: String; root url of the target server.
path: String; path relative to `root` to which the events will be sent.
field: String; JSON field under which the data will be stored.
headers: Dictionary; optional custom HTTP headers.
Defaults to:
`{'Accept': 'application/json', 'Content-Type': 'application/json'}`
"""
def __init__(self,
root='http://localhost:9000',
path='/publish/epoch/end/',
field='data',
headers=None):
super(RemoteMonitor, self).__init__()
if headers is None:
headers = {
'Accept': 'application/json',
'Content-Type': 'application/json'
}
self.root = root
self.path = path
self.field = field
self.headers = headers
def on_epoch_end(self, epoch, logs=None):
if requests is None:
raise ImportError('RemoteMonitor requires ' 'the `requests` library.')
logs = logs or {}
send = {}
send['epoch'] = epoch
for k, v in logs.items():
send[k] = v
try:
requests.post(
self.root + self.path, {self.field: json.dumps(send)},
headers=self.headers)
except requests.exceptions.RequestException:
logging.warning('Warning: could not reach RemoteMonitor '
'root server at ' + str(self.root))
class LearningRateScheduler(Callback):
"""Learning rate scheduler.
Arguments:
schedule: a function that takes an epoch index as input
(integer, indexed from 0) and returns a new
learning rate as output (float).
"""
def __init__(self, schedule):
super(LearningRateScheduler, self).__init__()
self.schedule = schedule
def on_epoch_begin(self, epoch, logs=None):
if not hasattr(self.model.optimizer, 'lr'):
raise ValueError('Optimizer must have a "lr" attribute.')
lr = self.schedule(epoch)
if not isinstance(lr, (float, np.float32, np.float64)):
raise ValueError('The output of the "schedule" function '
'should be float.')
K.set_value(self.model.optimizer.lr, lr)
class TensorBoard(Callback):
# pylint: disable=line-too-long
"""Tensorboard basic visualizations.
This callback writes a log for TensorBoard, which allows
you to visualize dynamic graphs of your training and test
metrics, as well as activation histograms for the different
layers in your model.
TensorBoard is a visualization tool provided with TensorFlow.
If you have installed TensorFlow with pip, you should be able
to launch TensorBoard from the command line:
```
tensorboard --logdir=/full_path_to_your_logs
```
You can find more information about TensorBoard
[here](https://www.tensorflow.org/get_started/summaries_and_tensorboard).
Arguments:
log_dir: the path of the directory where to save the log
files to be parsed by TensorBoard.
histogram_freq: frequency (in epochs) at which to compute activation
and weight histograms for the layers of the model. If set to 0,
histograms won't be computed. Validation data (or split) must be
specified for histogram visualizations.
write_graph: whether to visualize the graph in TensorBoard.
The log file can become quite large when
write_graph is set to True.
write_grads: whether to visualize gradient histograms in TensorBoard.
`histogram_freq` must be greater than 0.
batch_size: size of batch of inputs to feed to the network
for histograms computation.
write_images: whether to write model weights to visualize as
image in TensorBoard.
embeddings_freq: frequency (in epochs) at which selected embedding
layers will be saved.
embeddings_layer_names: a list of names of layers to keep eye on. If
None or empty list all the embedding layer will be watched.
embeddings_metadata: a dictionary which maps layer name to a file name
in which metadata for this embedding layer is saved. See the
[details](https://www.tensorflow.org/how_tos/embedding_viz/#metadata_optional)
about metadata files format. In case if the same metadata file is
used for all embedding layers, string can be passed.
"""
# pylint: enable=line-too-long
def __init__(self,
log_dir='./logs',
histogram_freq=0,
batch_size=32,
write_graph=True,
write_grads=False,
write_images=False,
embeddings_freq=0,
embeddings_layer_names=None,
embeddings_metadata=None):
super(TensorBoard, self).__init__()
self.log_dir = log_dir
self.histogram_freq = histogram_freq
self.merged = None
self.write_graph = write_graph
self.write_grads = write_grads
self.write_images = write_images
self.embeddings_freq = embeddings_freq
self.embeddings_layer_names = embeddings_layer_names
self.embeddings_metadata = embeddings_metadata or {}
self.batch_size = batch_size
def set_model(self, model):
self.model = model
self.sess = K.get_session()
if self.histogram_freq and self.merged is None:
for layer in self.model.layers:
for weight in layer.weights:
tf_summary.histogram(weight.name, weight)
if self.write_grads:
grads = model.optimizer.get_gradients(model.total_loss, weight)
tf_summary.histogram('{}_grad'.format(weight.name), grads)
if self.write_images:
w_img = array_ops.squeeze(weight)
shape = K.int_shape(w_img)
if len(shape) == 2: # dense layer kernel case
if shape[0] > shape[1]:
w_img = array_ops.transpose(w_img)
shape = K.int_shape(w_img)
w_img = array_ops.reshape(w_img, [1, shape[0], shape[1], 1])
elif len(shape) == 3: # convnet case
if K.image_data_format() == 'channels_last':
# switch to channels_first to display
# every kernel as a separate image
w_img = array_ops.transpose(w_img, perm=[2, 0, 1])
shape = K.int_shape(w_img)
w_img = array_ops.reshape(w_img,
[shape[0], shape[1], shape[2], 1])
elif len(shape) == 1: # bias case
w_img = array_ops.reshape(w_img, [1, shape[0], 1, 1])
else:
# not possible to handle 3D convnets etc.
continue
shape = K.int_shape(w_img)
assert len(shape) == 4 and shape[-1] in [1, 3, 4]
tf_summary.image(weight.name, w_img)
if hasattr(layer, 'output'):
tf_summary.histogram('{}_out'.format(layer.name), layer.output)
self.merged = tf_summary.merge_all()
if self.write_graph:
self.writer = tf_summary.FileWriter(self.log_dir, self.sess.graph)
else:
self.writer = tf_summary.FileWriter(self.log_dir)
if self.embeddings_freq:
embeddings_layer_names = self.embeddings_layer_names
if not embeddings_layer_names:
embeddings_layer_names = [
layer.name for layer in self.model.layers
if type(layer).__name__ == 'Embedding'
]
embeddings = {
layer.name: layer.weights[0]
for layer in self.model.layers if layer.name in embeddings_layer_names
}
self.saver = saver_lib.Saver(list(embeddings.values()))
embeddings_metadata = {}
if not isinstance(self.embeddings_metadata, str):
embeddings_metadata = self.embeddings_metadata
else:
embeddings_metadata = {
layer_name: self.embeddings_metadata
for layer_name in embeddings.keys()
}
config = projector.ProjectorConfig()
self.embeddings_ckpt_path = os.path.join(self.log_dir,
'keras_embedding.ckpt')
for layer_name, tensor in embeddings.items():
embedding = config.embeddings.add()
embedding.tensor_name = tensor.name
if layer_name in embeddings_metadata:
embedding.metadata_path = embeddings_metadata[layer_name]
projector.visualize_embeddings(self.writer, config)
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}
if self.validation_data and self.histogram_freq:
if epoch % self.histogram_freq == 0:
val_data = self.validation_data
tensors = (
self.model.inputs + self.model.targets + self.model.sample_weights)
if self.model.uses_learning_phase:
tensors += [K.learning_phase()]
assert len(val_data) == len(tensors)
val_size = val_data[0].shape[0]
i = 0
while i < val_size:
step = min(self.batch_size, val_size - i)
batch_val = []
batch_val.append(val_data[0][i:i + step])
batch_val.append(val_data[1][i:i + step])
batch_val.append(val_data[2][i:i + step])
if self.model.uses_learning_phase:
batch_val.append(val_data[3])
feed_dict = dict(zip(tensors, batch_val))
result = self.sess.run([self.merged], feed_dict=feed_dict)
summary_str = result[0]
self.writer.add_summary(summary_str, epoch)
i += self.batch_size
if self.embeddings_freq and self.embeddings_ckpt_path:
if epoch % self.embeddings_freq == 0:
self.saver.save(self.sess, self.embeddings_ckpt_path, epoch)
for name, value in logs.items():
if name in ['batch', 'size']:
continue
summary = tf_summary.Summary()
summary_value = summary.value.add()
summary_value.simple_value = value.item()
summary_value.tag = name
self.writer.add_summary(summary, epoch)
self.writer.flush()
def on_train_end(self, _):
self.writer.close()
class ReduceLROnPlateau(Callback):
"""Reduce learning rate when a metric has stopped improving.
Models often benefit from reducing the learning rate by a factor
of 2-10 once learning stagnates. This callback monitors a
quantity and if no improvement is seen for a 'patience' number
of epochs, the learning rate is reduced.
Example:
```python
reduce_lr = ReduceLROnPlateau(monitor='val_loss', factor=0.2,
patience=5, min_lr=0.001)
model.fit(X_train, Y_train, callbacks=[reduce_lr])
```
Arguments:
monitor: quantity to be monitored.
factor: factor by which the learning rate will
be reduced. new_lr = lr * factor
patience: number of epochs with no improvement
after which learning rate will be reduced.
verbose: int. 0: quiet, 1: update messages.
mode: one of {auto, min, max}. In `min` mode,
lr will be reduced when the quantity
monitored has stopped decreasing; in `max`
mode it will be reduced when the quantity
monitored has stopped increasing; in `auto`
mode, the direction is automatically inferred
from the name of the monitored quantity.
epsilon: threshold for measuring the new optimum,
to only focus on significant changes.
cooldown: number of epochs to wait before resuming
normal operation after lr has been reduced.
min_lr: lower bound on the learning rate.
"""
def __init__(self,
monitor='val_loss',
factor=0.1,
patience=10,
verbose=0,
mode='auto',
epsilon=1e-4,
cooldown=0,
min_lr=0):
super(ReduceLROnPlateau, self).__init__()
self.monitor = monitor
if factor >= 1.0:
raise ValueError('ReduceLROnPlateau ' 'does not support a factor >= 1.0.')
self.factor = factor
self.min_lr = min_lr
self.epsilon = epsilon
self.patience = patience
self.verbose = verbose
self.cooldown = cooldown
self.cooldown_counter = 0 # Cooldown counter.
self.wait = 0
self.best = 0
self.mode = mode
self.monitor_op = None
self._reset()
def _reset(self):
"""Resets wait counter and cooldown counter.
"""
if self.mode not in ['auto', 'min', 'max']:
logging.warning('Learning Rate Plateau Reducing mode %s is unknown, '
'fallback to auto mode.' % (self.mode))
self.mode = 'auto'
if (self.mode == 'min' or
(self.mode == 'auto' and 'acc' not in self.monitor)):
self.monitor_op = lambda a, b: np.less(a, b - self.epsilon)
self.best = np.Inf
else:
self.monitor_op = lambda a, b: np.greater(a, b + self.epsilon)
self.best = -np.Inf
self.cooldown_counter = 0
self.wait = 0
self.lr_epsilon = self.min_lr * 1e-4
def on_train_begin(self, logs=None):
self._reset()
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}
logs['lr'] = K.get_value(self.model.optimizer.lr)
current = logs.get(self.monitor)
if current is None:
logging.warning('Learning Rate Plateau Reducing requires %s available!' %
self.monitor)
else:
if self.in_cooldown():
self.cooldown_counter -= 1
self.wait = 0
if self.monitor_op(current, self.best):
self.best = current
self.wait = 0
elif not self.in_cooldown():
if self.wait >= self.patience:
old_lr = float(K.get_value(self.model.optimizer.lr))
if old_lr > self.min_lr + self.lr_epsilon:
new_lr = old_lr * self.factor
new_lr = max(new_lr, self.min_lr)
K.set_value(self.model.optimizer.lr, new_lr)
if self.verbose > 0:
print('\nEpoch %05d: reducing learning rate to %s.' % (epoch,
new_lr))
self.cooldown_counter = self.cooldown
self.wait = 0
self.wait += 1
def in_cooldown(self):
return self.cooldown_counter > 0
class CSVLogger(Callback):
"""Callback that streams epoch results to a csv file.<|fim▁hole|> Example:
```python
csv_logger = CSVLogger('training.log')
model.fit(X_train, Y_train, callbacks=[csv_logger])
```
Arguments:
filename: filename of the csv file, e.g. 'run/log.csv'.
separator: string used to separate elements in the csv file.
append: True: append if file exists (useful for continuing
training). False: overwrite existing file,
"""
def __init__(self, filename, separator=',', append=False):
self.sep = separator
self.filename = filename
self.append = append
self.writer = None
self.keys = None
self.append_header = True
self.file_flags = 'b' if six.PY2 and os.name == 'nt' else ''
super(CSVLogger, self).__init__()
def on_train_begin(self, logs=None):
if self.append:
if os.path.exists(self.filename):
with open(self.filename, 'r' + self.file_flags) as f:
self.append_header = not bool(len(f.readline()))
self.csv_file = open(self.filename, 'a' + self.file_flags)
else:
self.csv_file = open(self.filename, 'w' + self.file_flags)
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}
def handle_value(k):
is_zero_dim_ndarray = isinstance(k, np.ndarray) and k.ndim == 0
if isinstance(k, six.string_types):
return k
elif isinstance(k, Iterable) and not is_zero_dim_ndarray:
return '"[%s]"' % (', '.join(map(str, k)))
else:
return k
if not self.writer:
self.keys = sorted(logs.keys())
class CustomDialect(csv.excel):
delimiter = self.sep
self.writer = csv.DictWriter(
self.csv_file,
fieldnames=['epoch'] + self.keys,
dialect=CustomDialect)
if self.append_header:
self.writer.writeheader()
row_dict = OrderedDict({'epoch': epoch})
row_dict.update((key, handle_value(logs[key])) for key in self.keys)
self.writer.writerow(row_dict)
self.csv_file.flush()
def on_train_end(self, logs=None):
self.csv_file.close()
self.writer = None
class LambdaCallback(Callback):
"""Callback for creating simple, custom callbacks on-the-fly.
This callback is constructed with anonymous functions that will be called
at the appropriate time. Note that the callbacks expects positional
arguments, as:
- `on_epoch_begin` and `on_epoch_end` expect two positional arguments:
`epoch`, `logs`
- `on_batch_begin` and `on_batch_end` expect two positional arguments:
`batch`, `logs`
- `on_train_begin` and `on_train_end` expect one positional argument:
`logs`
Arguments:
on_epoch_begin: called at the beginning of every epoch.
on_epoch_end: called at the end of every epoch.
on_batch_begin: called at the beginning of every batch.
on_batch_end: called at the end of every batch.
on_train_begin: called at the beginning of model training.
on_train_end: called at the end of model training.
Example:
```python
# Print the batch number at the beginning of every batch.
batch_print_callback = LambdaCallback(
on_batch_begin=lambda batch,logs: print(batch))
# Plot the loss after every epoch.
import numpy as np
import matplotlib.pyplot as plt
plot_loss_callback = LambdaCallback(
on_epoch_end=lambda epoch, logs: plt.plot(np.arange(epoch),
logs['loss']))
# Terminate some processes after having finished model training.
processes = ...
cleanup_callback = LambdaCallback(
on_train_end=lambda logs: [
p.terminate() for p in processes if p.is_alive()])
model.fit(...,
callbacks=[batch_print_callback,
plot_loss_callback,
cleanup_callback])
```
"""
def __init__(self,
on_epoch_begin=None,
on_epoch_end=None,
on_batch_begin=None,
on_batch_end=None,
on_train_begin=None,
on_train_end=None,
**kwargs):
super(LambdaCallback, self).__init__()
self.__dict__.update(kwargs)
if on_epoch_begin is not None:
self.on_epoch_begin = on_epoch_begin
else:
self.on_epoch_begin = lambda epoch, logs: None
if on_epoch_end is not None:
self.on_epoch_end = on_epoch_end
else:
self.on_epoch_end = lambda epoch, logs: None
if on_batch_begin is not None:
self.on_batch_begin = on_batch_begin
else:
self.on_batch_begin = lambda batch, logs: None
if on_batch_end is not None:
self.on_batch_end = on_batch_end
else:
self.on_batch_end = lambda batch, logs: None
if on_train_begin is not None:
self.on_train_begin = on_train_begin
else:
self.on_train_begin = lambda logs: None
if on_train_end is not None:
self.on_train_end = on_train_end
else:
self.on_train_end = lambda logs: None<|fim▁end|> |
Supports all values that can be represented as a string,
including 1D iterables such as np.ndarray.
|
<|file_name|>splitter.py<|end_file_name|><|fim▁begin|># (c) 2014 James Cammarata, <jcammarata@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
import codecs
from ansible.errors import AnsibleParserError
from ansible.parsing.quoting import unquote
# Decode escapes adapted from rspeer's answer here:
# http://stackoverflow.com/questions/4020539/process-escape-sequences-in-a-string-in-python
_HEXCHAR = '[a-fA-F0-9]'
_ESCAPE_SEQUENCE_RE = re.compile(r'''
( \\U{0} # 8-digit hex escapes
| \\u{1} # 4-digit hex escapes
| \\x{2} # 2-digit hex escapes
| \\N\{{[^}}]+\}} # Unicode characters by name
| \\[\\'"abfnrtv] # Single-character escapes
)'''.format(_HEXCHAR*8, _HEXCHAR*4, _HEXCHAR*2), re.UNICODE | re.VERBOSE)
def _decode_escapes(s):
def decode_match(match):
return codecs.decode(match.group(0), 'unicode-escape')
return _ESCAPE_SEQUENCE_RE.sub(decode_match, s)
def parse_kv(args, check_raw=False):
'''
Convert a string of key/value items to a dict. If any free-form params
are found and the check_raw option is set to True, they will be added
to a new parameter called '_raw_params'. If check_raw is not enabled,
they will simply be ignored.
'''
### FIXME: args should already be a unicode string
from ansible.utils.unicode import to_unicode
args = to_unicode(args, nonstring='passthru')
options = {}
if args is not None:
try:
vargs = split_args(args)
except ValueError as ve:
if 'no closing quotation' in str(ve).lower():
raise AnsibleParsingError("error parsing argument string, try quoting the entire line.")
else:
raise
raw_params = []
for orig_x in vargs:
x = _decode_escapes(orig_x)
if "=" in x:
pos = 0
try:
while True:
pos = x.index('=', pos + 1)
if pos > 0 and x[pos - 1] != '\\':
break
except ValueError:
# ran out of string, but we must have some escaped equals,
# so replace those and append this to the list of raw params
raw_params.append(x.replace('\\=', '='))
continue
k = x[:pos]
v = x[pos + 1:]
# FIXME: make the retrieval of this list of shell/command
# options a function, so the list is centralized
if check_raw and k not in ('creates', 'removes', 'chdir', 'executable', 'warn'):
raw_params.append(orig_x)
else:
options[k.strip()] = unquote(v.strip())
else:
raw_params.append(orig_x)
# recombine the free-form params, if any were found, and assign
# them to a special option for use later by the shell/command module
if len(raw_params) > 0:
options[u'_raw_params'] = ' '.join(raw_params)
return options
def _get_quote_state(token, quote_char):
'''
the goal of this block is to determine if the quoted string
is unterminated in which case it needs to be put back together
'''
# the char before the current one, used to see if
# the current character is escaped
prev_char = None
for idx, cur_char in enumerate(token):
if idx > 0:
prev_char = token[idx-1]
if cur_char in '"\'' and prev_char != '\\':<|fim▁hole|> quote_char = cur_char
return quote_char
def _count_jinja2_blocks(token, cur_depth, open_token, close_token):
'''
this function counts the number of opening/closing blocks for a
given opening/closing type and adjusts the current depth for that
block based on the difference
'''
num_open = token.count(open_token)
num_close = token.count(close_token)
if num_open != num_close:
cur_depth += (num_open - num_close)
if cur_depth < 0:
cur_depth = 0
return cur_depth
def split_args(args):
'''
Splits args on whitespace, but intelligently reassembles
those that may have been split over a jinja2 block or quotes.
When used in a remote module, we won't ever have to be concerned about
jinja2 blocks, however this function is/will be used in the
core portions as well before the args are templated.
example input: a=b c="foo bar"
example output: ['a=b', 'c="foo bar"']
Basically this is a variation shlex that has some more intelligence for
how Ansible needs to use it.
'''
# the list of params parsed out of the arg string
# this is going to be the result value when we are done
params = []
# Initial split on white space
args = args.strip()
items = args.strip().split('\n')
# iterate over the tokens, and reassemble any that may have been
# split on a space inside a jinja2 block.
# ex if tokens are "{{", "foo", "}}" these go together
# These variables are used
# to keep track of the state of the parsing, since blocks and quotes
# may be nested within each other.
quote_char = None
inside_quotes = False
print_depth = 0 # used to count nested jinja2 {{ }} blocks
block_depth = 0 # used to count nested jinja2 {% %} blocks
comment_depth = 0 # used to count nested jinja2 {# #} blocks
# now we loop over each split chunk, coalescing tokens if the white space
# split occurred within quotes or a jinja2 block of some kind
for itemidx,item in enumerate(items):
# we split on spaces and newlines separately, so that we
# can tell which character we split on for reassembly
# inside quotation characters
tokens = item.strip().split(' ')
line_continuation = False
for idx,token in enumerate(tokens):
# if we hit a line continuation character, but
# we're not inside quotes, ignore it and continue
# on to the next token while setting a flag
if token == '\\' and not inside_quotes:
line_continuation = True
continue
# store the previous quoting state for checking later
was_inside_quotes = inside_quotes
quote_char = _get_quote_state(token, quote_char)
inside_quotes = quote_char is not None
# multiple conditions may append a token to the list of params,
# so we keep track with this flag to make sure it only happens once
# append means add to the end of the list, don't append means concatenate
# it to the end of the last token
appended = False
# if we're inside quotes now, but weren't before, append the token
# to the end of the list, since we'll tack on more to it later
# otherwise, if we're inside any jinja2 block, inside quotes, or we were
# inside quotes (but aren't now) concat this token to the last param
if inside_quotes and not was_inside_quotes:
params.append(token)
appended = True
elif print_depth or block_depth or comment_depth or inside_quotes or was_inside_quotes:
if idx == 0 and was_inside_quotes:
params[-1] = "%s%s" % (params[-1], token)
elif len(tokens) > 1:
spacer = ''
if idx > 0:
spacer = ' '
params[-1] = "%s%s%s" % (params[-1], spacer, token)
else:
params[-1] = "%s\n%s" % (params[-1], token)
appended = True
# if the number of paired block tags is not the same, the depth has changed, so we calculate that here
# and may append the current token to the params (if we haven't previously done so)
prev_print_depth = print_depth
print_depth = _count_jinja2_blocks(token, print_depth, "{{", "}}")
if print_depth != prev_print_depth and not appended:
params.append(token)
appended = True
prev_block_depth = block_depth
block_depth = _count_jinja2_blocks(token, block_depth, "{%", "%}")
if block_depth != prev_block_depth and not appended:
params.append(token)
appended = True
prev_comment_depth = comment_depth
comment_depth = _count_jinja2_blocks(token, comment_depth, "{#", "#}")
if comment_depth != prev_comment_depth and not appended:
params.append(token)
appended = True
# finally, if we're at zero depth for all blocks and not inside quotes, and have not
# yet appended anything to the list of params, we do so now
if not (print_depth or block_depth or comment_depth) and not inside_quotes and not appended and token != '':
params.append(token)
# if this was the last token in the list, and we have more than
# one item (meaning we split on newlines), add a newline back here
# to preserve the original structure
if len(items) > 1 and itemidx != len(items) - 1 and not line_continuation:
params[-1] += '\n'
# always clear the line continuation flag
line_continuation = False
# If we're done and things are not at zero depth or we're still inside quotes,
# raise an error to indicate that the args were unbalanced
if print_depth or block_depth or comment_depth or inside_quotes:
raise AnsibleParserError("failed at splitting arguments, either an unbalanced jinja2 block or quotes: {}".format(args))
return params<|fim▁end|> | if quote_char:
if cur_char == quote_char:
quote_char = None
else: |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>import time
import random
import os
import os.path
import logging
import urlparse
import functools
import lms.lib.comment_client as cc
import django_comment_client.utils as utils
import django_comment_client.settings as cc_settings
from django.core import exceptions
from django.contrib.auth.decorators import login_required
from django.views.decorators.http import require_POST, require_GET
from django.views.decorators import csrf
from django.core.files.storage import get_storage_class
from django.utils.translation import ugettext as _
from django.contrib.auth.models import User
from mitxmako.shortcuts import render_to_string
from courseware.courses import get_course_with_access, get_course_by_id
from course_groups.cohorts import get_cohort_id, is_commentable_cohorted
from django_comment_client.utils import JsonResponse, JsonError, extract, add_courseware_context
from django_comment_client.permissions import check_permissions_by_view, cached_has_permission
from django_comment_common.models import Role
from courseware.access import has_access
log = logging.getLogger(__name__)
def permitted(fn):
@functools.wraps(fn)
def wrapper(request, *args, **kwargs):
def fetch_content():
if "thread_id" in kwargs:
content = cc.Thread.find(kwargs["thread_id"]).to_dict()
elif "comment_id" in kwargs:
content = cc.Comment.find(kwargs["comment_id"]).to_dict()
else:
content = None
return content
if check_permissions_by_view(request.user, kwargs['course_id'], fetch_content(), request.view_name):
return fn(request, *args, **kwargs)
else:
return JsonError("unauthorized", status=401)
return wrapper
def ajax_content_response(request, course_id, content, template_name):
context = {
'course_id': course_id,
'content': content,
}
html = render_to_string(template_name, context)
user_info = cc.User.from_django_user(request.user).to_dict()
annotated_content_info = utils.get_annotated_content_info(course_id, content, request.user, user_info)
return JsonResponse({
'html': html,
'content': utils.safe_content(content),
'annotated_content_info': annotated_content_info,
})
@require_POST
@login_required
@permitted
def create_thread(request, course_id, commentable_id):
"""
Given a course and commentble ID, create the thread
"""
log.debug("Creating new thread in %r, id %r", course_id, commentable_id)
course = get_course_with_access(request.user, course_id, 'load')
post = request.POST
if course.allow_anonymous:
anonymous = post.get('anonymous', 'false').lower() == 'true'
else:
anonymous = False
if course.allow_anonymous_to_peers:
anonymous_to_peers = post.get('anonymous_to_peers', 'false').lower() == 'true'
else:
anonymous_to_peers = False
thread = cc.Thread(**extract(post, ['body', 'title', 'tags']))
thread.update_attributes(**{
'anonymous': anonymous,
'anonymous_to_peers': anonymous_to_peers,
'commentable_id': commentable_id,
'course_id': course_id,
'user_id': request.user.id,
})
user = cc.User.from_django_user(request.user)
#kevinchugh because the new requirement is that all groups will be determined
#by the group id in the request this all goes away
#not anymore, only for admins
# Cohort the thread if the commentable is cohorted.
if is_commentable_cohorted(course_id, commentable_id):
user_group_id = get_cohort_id(user, course_id)
# TODO (vshnayder): once we have more than just cohorts, we'll want to
# change this to a single get_group_for_user_and_commentable function
# that can do different things depending on the commentable_id
if cached_has_permission(request.user, "see_all_cohorts", course_id):
# admins can optionally choose what group to post as
group_id = post.get('group_id', user_group_id)
else:
# regular users always post with their own id.
group_id = user_group_id
if group_id:
thread.update_attributes(group_id=group_id)
thread.save()
#patch for backward compatibility to comments service
if not 'pinned' in thread.attributes:
thread['pinned'] = False
if post.get('auto_subscribe', 'false').lower() == 'true':
user = cc.User.from_django_user(request.user)
user.follow(thread)
data = thread.to_dict()
add_courseware_context([data], course)
if request.is_ajax():
return ajax_content_response(request, course_id, data, 'discussion/ajax_create_thread.html')
else:
return JsonResponse(utils.safe_content(data))
@require_POST
@login_required
@permitted
def update_thread(request, course_id, thread_id):
"""
Given a course id and thread id, update a existing thread, used for both static and ajax submissions
"""
thread = cc.Thread.find(thread_id)
thread.update_attributes(**extract(request.POST, ['body', 'title', 'tags']))
thread.save()
if request.is_ajax():
return ajax_content_response(request, course_id, thread.to_dict(), 'discussion/ajax_update_thread.html')
else:
return JsonResponse(utils.safe_content(thread.to_dict()))
def _create_comment(request, course_id, thread_id=None, parent_id=None):
"""
given a course_id, thread_id, and parent_id, create a comment,
called from create_comment to do the actual creation
"""
post = request.POST
comment = cc.Comment(**extract(post, ['body']))
course = get_course_with_access(request.user, course_id, 'load')
if course.allow_anonymous:
anonymous = post.get('anonymous', 'false').lower() == 'true'
else:
anonymous = False
if course.allow_anonymous_to_peers:
anonymous_to_peers = post.get('anonymous_to_peers', 'false').lower() == 'true'
else:
anonymous_to_peers = False
comment.update_attributes(**{
'anonymous': anonymous,
'anonymous_to_peers': anonymous_to_peers,
'user_id': request.user.id,
'course_id': course_id,
'thread_id': thread_id,
'parent_id': parent_id,
})
comment.save()
if post.get('auto_subscribe', 'false').lower() == 'true':
user = cc.User.from_django_user(request.user)
user.follow(comment.thread)
if request.is_ajax():
return ajax_content_response(request, course_id, comment.to_dict(), 'discussion/ajax_create_comment.html')
else:
return JsonResponse(utils.safe_content(comment.to_dict()))
@require_POST
@login_required
@permitted
def create_comment(request, course_id, thread_id):
"""
given a course_id and thread_id, test for comment depth. if not too deep,
call _create_comment to create the actual comment.
"""
if cc_settings.MAX_COMMENT_DEPTH is not None:
if cc_settings.MAX_COMMENT_DEPTH < 0:
return JsonError("Comment level too deep")<|fim▁hole|>
@require_POST
@login_required
@permitted
def delete_thread(request, course_id, thread_id):
"""
given a course_id and thread_id, delete this thread
this is ajax only
"""
thread = cc.Thread.find(thread_id)
thread.delete()
return JsonResponse(utils.safe_content(thread.to_dict()))
@require_POST
@login_required
@permitted
def update_comment(request, course_id, comment_id):
"""
given a course_id and comment_id, update the comment with payload attributes
handles static and ajax submissions
"""
comment = cc.Comment.find(comment_id)
comment.update_attributes(**extract(request.POST, ['body']))
comment.save()
if request.is_ajax():
return ajax_content_response(request, course_id, comment.to_dict(), 'discussion/ajax_update_comment.html')
else:
return JsonResponse(utils.safe_content(comment.to_dict()))
@require_POST
@login_required
@permitted
def endorse_comment(request, course_id, comment_id):
"""
given a course_id and comment_id, toggle the endorsement of this comment,
ajax only
"""
comment = cc.Comment.find(comment_id)
comment.endorsed = request.POST.get('endorsed', 'false').lower() == 'true'
comment.save()
return JsonResponse(utils.safe_content(comment.to_dict()))
@require_POST
@login_required
@permitted
def openclose_thread(request, course_id, thread_id):
"""
given a course_id and thread_id, toggle the status of this thread
ajax only
"""
thread = cc.Thread.find(thread_id)
thread.closed = request.POST.get('closed', 'false').lower() == 'true'
thread.save()
thread = thread.to_dict()
return JsonResponse({
'content': utils.safe_content(thread),
'ability': utils.get_ability(course_id, thread, request.user),
})
@require_POST
@login_required
@permitted
def create_sub_comment(request, course_id, comment_id):
"""
given a course_id and comment_id, create a response to a comment
after checking the max depth allowed, if allowed
"""
if cc_settings.MAX_COMMENT_DEPTH is not None:
if cc_settings.MAX_COMMENT_DEPTH <= cc.Comment.find(comment_id).depth:
return JsonError("Comment level too deep")
return _create_comment(request, course_id, parent_id=comment_id)
@require_POST
@login_required
@permitted
def delete_comment(request, course_id, comment_id):
"""
given a course_id and comment_id delete this comment
ajax only
"""
comment = cc.Comment.find(comment_id)
comment.delete()
return JsonResponse(utils.safe_content(comment.to_dict()))
@require_POST
@login_required
@permitted
def vote_for_comment(request, course_id, comment_id, value):
"""
given a course_id and comment_id,
"""
user = cc.User.from_django_user(request.user)
comment = cc.Comment.find(comment_id)
user.vote(comment, value)
return JsonResponse(utils.safe_content(comment.to_dict()))
@require_POST
@login_required
@permitted
def undo_vote_for_comment(request, course_id, comment_id):
"""
given a course id and comment id, remove vote
ajax only
"""
user = cc.User.from_django_user(request.user)
comment = cc.Comment.find(comment_id)
user.unvote(comment)
return JsonResponse(utils.safe_content(comment.to_dict()))
@require_POST
@login_required
@permitted
def vote_for_thread(request, course_id, thread_id, value):
"""
given a course id and thread id vote for this thread
ajax only
"""
user = cc.User.from_django_user(request.user)
thread = cc.Thread.find(thread_id)
user.vote(thread, value)
return JsonResponse(utils.safe_content(thread.to_dict()))
@require_POST
@login_required
@permitted
def flag_abuse_for_thread(request, course_id, thread_id):
"""
given a course_id and thread_id flag this thread for abuse
ajax only
"""
user = cc.User.from_django_user(request.user)
thread = cc.Thread.find(thread_id)
thread.flagAbuse(user, thread)
return JsonResponse(utils.safe_content(thread.to_dict()))
@require_POST
@login_required
@permitted
def un_flag_abuse_for_thread(request, course_id, thread_id):
"""
given a course id and thread id, remove abuse flag for this thread
ajax only
"""
user = cc.User.from_django_user(request.user)
course = get_course_by_id(course_id)
thread = cc.Thread.find(thread_id)
removeAll = cached_has_permission(request.user, 'openclose_thread', course_id) or has_access(request.user, course, 'staff')
thread.unFlagAbuse(user, thread, removeAll)
return JsonResponse(utils.safe_content(thread.to_dict()))
@require_POST
@login_required
@permitted
def flag_abuse_for_comment(request, course_id, comment_id):
"""
given a course and comment id, flag comment for abuse
ajax only
"""
user = cc.User.from_django_user(request.user)
comment = cc.Comment.find(comment_id)
comment.flagAbuse(user, comment)
return JsonResponse(utils.safe_content(comment.to_dict()))
@require_POST
@login_required
@permitted
def un_flag_abuse_for_comment(request, course_id, comment_id):
"""
given a course_id and comment id, unflag comment for abuse
ajax only
"""
user = cc.User.from_django_user(request.user)
course = get_course_by_id(course_id)
removeAll = cached_has_permission(request.user, 'openclose_thread', course_id) or has_access(request.user, course, 'staff')
comment = cc.Comment.find(comment_id)
comment.unFlagAbuse(user, comment, removeAll)
return JsonResponse(utils.safe_content(comment.to_dict()))
@require_POST
@login_required
@permitted
def undo_vote_for_thread(request, course_id, thread_id):
"""
given a course id and thread id, remove users vote for thread
ajax only
"""
user = cc.User.from_django_user(request.user)
thread = cc.Thread.find(thread_id)
user.unvote(thread)
return JsonResponse(utils.safe_content(thread.to_dict()))
@require_POST
@login_required
@permitted
def pin_thread(request, course_id, thread_id):
"""
given a course id and thread id, pin this thread
ajax only
"""
user = cc.User.from_django_user(request.user)
thread = cc.Thread.find(thread_id)
thread.pin(user, thread_id)
return JsonResponse(utils.safe_content(thread.to_dict()))
def un_pin_thread(request, course_id, thread_id):
"""
given a course id and thread id, remove pin from this thread
ajax only
"""
user = cc.User.from_django_user(request.user)
thread = cc.Thread.find(thread_id)
thread.un_pin(user, thread_id)
return JsonResponse(utils.safe_content(thread.to_dict()))
@require_POST
@login_required
@permitted
def follow_thread(request, course_id, thread_id):
user = cc.User.from_django_user(request.user)
thread = cc.Thread.find(thread_id)
user.follow(thread)
return JsonResponse({})
@require_POST
@login_required
@permitted
def follow_commentable(request, course_id, commentable_id):
"""
given a course_id and commentable id, follow this commentable
ajax only
"""
user = cc.User.from_django_user(request.user)
commentable = cc.Commentable.find(commentable_id)
user.follow(commentable)
return JsonResponse({})
@require_POST
@login_required
@permitted
def follow_user(request, course_id, followed_user_id):
user = cc.User.from_django_user(request.user)
followed_user = cc.User.find(followed_user_id)
user.follow(followed_user)
return JsonResponse({})
@require_POST
@login_required
@permitted
def unfollow_thread(request, course_id, thread_id):
"""
given a course id and thread id, stop following this thread
ajax only
"""
user = cc.User.from_django_user(request.user)
thread = cc.Thread.find(thread_id)
user.unfollow(thread)
return JsonResponse({})
@require_POST
@login_required
@permitted
def unfollow_commentable(request, course_id, commentable_id):
"""
given a course id and commentable id stop following commentable
ajax only
"""
user = cc.User.from_django_user(request.user)
commentable = cc.Commentable.find(commentable_id)
user.unfollow(commentable)
return JsonResponse({})
@require_POST
@login_required
@permitted
def unfollow_user(request, course_id, followed_user_id):
"""
given a course id and user id, stop following this user
ajax only
"""
user = cc.User.from_django_user(request.user)
followed_user = cc.User.find(followed_user_id)
user.unfollow(followed_user)
return JsonResponse({})
@require_POST
@login_required
@permitted
def update_moderator_status(request, course_id, user_id):
"""
given a course id and user id, check if the user has moderator
and send back a user profile
"""
is_moderator = request.POST.get('is_moderator', '').lower()
if is_moderator not in ["true", "false"]:
return JsonError("Must provide is_moderator as boolean value")
is_moderator = is_moderator == "true"
user = User.objects.get(id=user_id)
role = Role.objects.get(course_id=course_id, name="Moderator")
if is_moderator:
user.roles.add(role)
else:
user.roles.remove(role)
if request.is_ajax():
course = get_course_with_access(request.user, course_id, 'load')
discussion_user = cc.User(id=user_id, course_id=course_id)
context = {
'course': course,
'course_id': course_id,
'user': request.user,
'django_user': user,
'profiled_user': discussion_user.to_dict(),
}
return JsonResponse({
'html': render_to_string('discussion/ajax_user_profile.html', context)
})
else:
return JsonResponse({})
@require_GET
def search_similar_threads(request, course_id, commentable_id):
"""
given a course id and commentable id, run query given in text get param
of request
"""
text = request.GET.get('text', None)
if text:
query_params = {
'text': text,
'commentable_id': commentable_id,
}
threads = cc.search_similar_threads(course_id, recursive=False, query_params=query_params)
else:
theads = []
context = {'threads': map(utils.extend_content, threads)}
return JsonResponse({
'html': render_to_string('discussion/_similar_posts.html', context)
})
@require_GET
def tags_autocomplete(request, course_id):
value = request.GET.get('q', None)
results = []
if value:
results = cc.tags_autocomplete(value)
return JsonResponse(results)
@require_POST
@login_required
@csrf.csrf_exempt
def upload(request, course_id): # ajax upload file to a question or answer
"""view that handles file upload via Ajax
"""
# check upload permission
result = ''
error = ''
new_file_name = ''
try:
# TODO authorization
#may raise exceptions.PermissionDenied
#if request.user.is_anonymous():
# msg = _('Sorry, anonymous users cannot upload files')
# raise exceptions.PermissionDenied(msg)
#request.user.assert_can_upload_file()
# check file type
f = request.FILES['file-upload']
file_extension = os.path.splitext(f.name)[1].lower()
if not file_extension in cc_settings.ALLOWED_UPLOAD_FILE_TYPES:
file_types = "', '".join(cc_settings.ALLOWED_UPLOAD_FILE_TYPES)
msg = _("allowed file types are '%(file_types)s'") % \
{'file_types': file_types}
raise exceptions.PermissionDenied(msg)
# generate new file name
new_file_name = str(time.time()).replace('.', str(random.randint(0, 100000))) + file_extension
file_storage = get_storage_class()()
# use default storage to store file
file_storage.save(new_file_name, f)
# check file size
# byte
size = file_storage.size(new_file_name)
if size > cc_settings.MAX_UPLOAD_FILE_SIZE:
file_storage.delete(new_file_name)
msg = _("maximum upload file size is %(file_size)sK") % \
{'file_size': cc_settings.MAX_UPLOAD_FILE_SIZE}
raise exceptions.PermissionDenied(msg)
except exceptions.PermissionDenied, err:
error = unicode(err)
except Exception, err:
print err
logging.critical(unicode(err))
error = _('Error uploading file. Please contact the site administrator. Thank you.')
if error == '':
result = 'Good'
file_url = file_storage.url(new_file_name)
parsed_url = urlparse.urlparse(file_url)
file_url = urlparse.urlunparse(
urlparse.ParseResult(
parsed_url.scheme,
parsed_url.netloc,
parsed_url.path,
'', '', ''
)
)
else:
result = ''
file_url = ''
return JsonResponse({
'result': {
'msg': result,
'error': error,
'file_url': file_url,
}
})<|fim▁end|> | return _create_comment(request, course_id, thread_id=thread_id)
|
<|file_name|>mode-jsx.js<|end_file_name|><|fim▁begin|>ace.define("ace/mode/jsx", ["require", "exports", "module", "ace/lib/oop", "ace/mode/text", "ace/tokenizer", "ace/mode/jsx_highlight_rules", "ace/mode/matching_brace_outdent", "ace/mode/behaviour/cstyle", "ace/mode/folding/cstyle"], function (e, t, n) {
function l() {
this.HighlightRules = o, this.$outdent = new u, this.$behaviour = new a, this.foldingRules = new f
}
var r = e("../lib/oop"), i = e("./text").Mode, s = e("../tokenizer").Tokenizer, o = e("./jsx_highlight_rules").JsxHighlightRules, u = e("./matching_brace_outdent").MatchingBraceOutdent, a = e("./behaviour/cstyle").CstyleBehaviour, f = e("./folding/cstyle").FoldMode;
r.inherits(l, i), function () {
this.lineCommentStart = "//", this.blockComment = {start: "/*", end: "*/"}, this.getNextLineIndent = function (e, t, n) {
var r = this.$getIndent(t), i = this.getTokenizer().getLineTokens(t, e), s = i.tokens;
if (s.length && s[s.length - 1].type == "comment")return r;
if (e == "start") {
var o = t.match(/^.*[\{\(\[]\s*$/);
o && (r += n)
}
return r
}, this.checkOutdent = function (e, t, n) {
return this.$outdent.checkOutdent(t, n)
}, this.autoOutdent = function (e, t, n) {
this.$outdent.autoOutdent(t, n)
}, this.$id = "ace/mode/jsx"<|fim▁hole|> this.$rules = {start: [
{token: "comment", regex: "\\/\\/.*$"},
s.getStartRule("doc-start"),
{token: "comment", regex: "\\/\\*", next: "comment"},
{token: "string.regexp", regex: "[/](?:(?:\\[(?:\\\\]|[^\\]])+\\])|(?:\\\\/|[^\\]/]))*[/]\\w*\\s*(?=[).,;]|$)"},
{token: "string", regex: '["](?:(?:\\\\.)|(?:[^"\\\\]))*?["]'},
{token: "string", regex: "['](?:(?:\\\\.)|(?:[^'\\\\]))*?[']"},
{token: "constant.numeric", regex: "0[xX][0-9a-fA-F]+\\b"},
{token: "constant.numeric", regex: "[+-]?\\d+(?:(?:\\.\\d*)?(?:[eE][+-]?\\d+)?)?\\b"},
{token: "constant.language.boolean", regex: "(?:true|false)\\b"},
{token: ["storage.type", "text", "entity.name.function"], regex: "(function)(\\s+)(" + r + ")"},
{token: function (r) {
return r == "this" ? "variable.language" : r == "function" ? "storage.type" : e.hasOwnProperty(r) || n.hasOwnProperty(r) ? "keyword" : t.hasOwnProperty(r) ? "constant.language" : /^_?[A-Z][a-zA-Z0-9_]*$/.test(r) ? "language.support.class" : "identifier"
}, regex: r},
{token: "keyword.operator", regex: "!|%|&|\\*|\\-\\-|\\-|\\+\\+|\\+|~|==|=|!=|<=|>=|<<=|>>=|>>>=|<>|<|>|!|&&|\\|\\||\\?\\:|\\*=|%=|\\+=|\\-=|&=|\\^=|\\b(?:in|instanceof|new|delete|typeof|void)"},
{token: "punctuation.operator", regex: "\\?|\\:|\\,|\\;|\\."},
{token: "paren.lparen", regex: "[[({<]"},
{token: "paren.rparen", regex: "[\\])}>]"},
{token: "text", regex: "\\s+"}
], comment: [
{token: "comment", regex: ".*?\\*\\/", next: "start"},
{token: "comment", regex: ".+"}
]}, this.embedRules(s, "doc-", [s.getEndRule("start")])
};
r.inherits(u, o), t.JsxHighlightRules = u
}), ace.define("ace/mode/doc_comment_highlight_rules", ["require", "exports", "module", "ace/lib/oop", "ace/mode/text_highlight_rules"], function (e, t, n) {
var r = e("../lib/oop"), i = e("./text_highlight_rules").TextHighlightRules, s = function () {
this.$rules = {start: [
{token: "comment.doc.tag", regex: "@[\\w\\d_]+"},
{token: "comment.doc.tag", regex: "\\bTODO\\b"},
{defaultToken: "comment.doc"}
]}
};
r.inherits(s, i), s.getStartRule = function (e) {
return{token: "comment.doc", regex: "\\/\\*(?=\\*)", next: e}
}, s.getEndRule = function (e) {
return{token: "comment.doc", regex: "\\*\\/", next: e}
}, t.DocCommentHighlightRules = s
}), ace.define("ace/mode/matching_brace_outdent", ["require", "exports", "module", "ace/range"], function (e, t, n) {
var r = e("../range").Range, i = function () {
};
(function () {
this.checkOutdent = function (e, t) {
return/^\s+$/.test(e) ? /^\s*\}/.test(t) : !1
}, this.autoOutdent = function (e, t) {
var n = e.getLine(t), i = n.match(/^(\s*\})/);
if (!i)return 0;
var s = i[1].length, o = e.findMatchingBracket({row: t, column: s});
if (!o || o.row == t)return 0;
var u = this.$getIndent(e.getLine(o.row));
e.replace(new r(t, 0, t, s - 1), u)
}, this.$getIndent = function (e) {
return e.match(/^\s*/)[0]
}
}).call(i.prototype), t.MatchingBraceOutdent = i
}), ace.define("ace/mode/behaviour/cstyle", ["require", "exports", "module", "ace/lib/oop", "ace/mode/behaviour", "ace/token_iterator", "ace/lib/lang"], function (e, t, n) {
var r = e("../../lib/oop"), i = e("../behaviour").Behaviour, s = e("../../token_iterator").TokenIterator, o = e("../../lib/lang"), u = ["text", "paren.rparen", "punctuation.operator"], a = ["text", "paren.rparen", "punctuation.operator", "comment"], f, l = {}, c = function (e) {
var t = -1;
e.multiSelect && (t = e.selection.id, l.rangeCount != e.multiSelect.rangeCount && (l = {rangeCount: e.multiSelect.rangeCount}));
if (l[t])return f = l[t];
f = l[t] = {autoInsertedBrackets: 0, autoInsertedRow: -1, autoInsertedLineEnd: "", maybeInsertedBrackets: 0, maybeInsertedRow: -1, maybeInsertedLineStart: "", maybeInsertedLineEnd: ""}
}, h = function () {
this.add("braces", "insertion", function (e, t, n, r, i) {
var s = n.getCursorPosition(), u = r.doc.getLine(s.row);
if (i == "{") {
c(n);
var a = n.getSelectionRange(), l = r.doc.getTextRange(a);
if (l !== "" && l !== "{" && n.getWrapBehavioursEnabled())return{text: "{" + l + "}", selection: !1};
if (h.isSaneInsertion(n, r))return/[\]\}\)]/.test(u[s.column]) || n.inMultiSelectMode ? (h.recordAutoInsert(n, r, "}"), {text: "{}", selection: [1, 1]}) : (h.recordMaybeInsert(n, r, "{"), {text: "{", selection: [1, 1]})
} else if (i == "}") {
c(n);
var p = u.substring(s.column, s.column + 1);
if (p == "}") {
var d = r.$findOpeningBracket("}", {column: s.column + 1, row: s.row});
if (d !== null && h.isAutoInsertedClosing(s, u, i))return h.popAutoInsertedClosing(), {text: "", selection: [1, 1]}
}
} else {
if (i == "\n" || i == "\r\n") {
c(n);
var v = "";
h.isMaybeInsertedClosing(s, u) && (v = o.stringRepeat("}", f.maybeInsertedBrackets), h.clearMaybeInsertedClosing());
var p = u.substring(s.column, s.column + 1);
if (p === "}") {
var m = r.findMatchingBracket({row: s.row, column: s.column + 1}, "}");
if (!m)return null;
var g = this.$getIndent(r.getLine(m.row))
} else {
if (!v) {
h.clearMaybeInsertedClosing();
return
}
var g = this.$getIndent(u)
}
var y = g + r.getTabString();
return{text: "\n" + y + "\n" + g + v, selection: [1, y.length, 1, y.length]}
}
h.clearMaybeInsertedClosing()
}
}), this.add("braces", "deletion", function (e, t, n, r, i) {
var s = r.doc.getTextRange(i);
if (!i.isMultiLine() && s == "{") {
c(n);
var o = r.doc.getLine(i.start.row), u = o.substring(i.end.column, i.end.column + 1);
if (u == "}")return i.end.column++, i;
f.maybeInsertedBrackets--
}
}), this.add("parens", "insertion", function (e, t, n, r, i) {
if (i == "(") {
c(n);
var s = n.getSelectionRange(), o = r.doc.getTextRange(s);
if (o !== "" && n.getWrapBehavioursEnabled())return{text: "(" + o + ")", selection: !1};
if (h.isSaneInsertion(n, r))return h.recordAutoInsert(n, r, ")"), {text: "()", selection: [1, 1]}
} else if (i == ")") {
c(n);
var u = n.getCursorPosition(), a = r.doc.getLine(u.row), f = a.substring(u.column, u.column + 1);
if (f == ")") {
var l = r.$findOpeningBracket(")", {column: u.column + 1, row: u.row});
if (l !== null && h.isAutoInsertedClosing(u, a, i))return h.popAutoInsertedClosing(), {text: "", selection: [1, 1]}
}
}
}), this.add("parens", "deletion", function (e, t, n, r, i) {
var s = r.doc.getTextRange(i);
if (!i.isMultiLine() && s == "(") {
c(n);
var o = r.doc.getLine(i.start.row), u = o.substring(i.start.column + 1, i.start.column + 2);
if (u == ")")return i.end.column++, i
}
}), this.add("brackets", "insertion", function (e, t, n, r, i) {
if (i == "[") {
c(n);
var s = n.getSelectionRange(), o = r.doc.getTextRange(s);
if (o !== "" && n.getWrapBehavioursEnabled())return{text: "[" + o + "]", selection: !1};
if (h.isSaneInsertion(n, r))return h.recordAutoInsert(n, r, "]"), {text: "[]", selection: [1, 1]}
} else if (i == "]") {
c(n);
var u = n.getCursorPosition(), a = r.doc.getLine(u.row), f = a.substring(u.column, u.column + 1);
if (f == "]") {
var l = r.$findOpeningBracket("]", {column: u.column + 1, row: u.row});
if (l !== null && h.isAutoInsertedClosing(u, a, i))return h.popAutoInsertedClosing(), {text: "", selection: [1, 1]}
}
}
}), this.add("brackets", "deletion", function (e, t, n, r, i) {
var s = r.doc.getTextRange(i);
if (!i.isMultiLine() && s == "[") {
c(n);
var o = r.doc.getLine(i.start.row), u = o.substring(i.start.column + 1, i.start.column + 2);
if (u == "]")return i.end.column++, i
}
}), this.add("string_dquotes", "insertion", function (e, t, n, r, i) {
if (i == '"' || i == "'") {
c(n);
var s = i, o = n.getSelectionRange(), u = r.doc.getTextRange(o);
if (u !== "" && u !== "'" && u != '"' && n.getWrapBehavioursEnabled())return{text: s + u + s, selection: !1};
var a = n.getCursorPosition(), f = r.doc.getLine(a.row), l = f.substring(a.column - 1, a.column);
if (l == "\\")return null;
var p = r.getTokens(o.start.row), d = 0, v, m = -1;
for (var g = 0; g < p.length; g++) {
v = p[g], v.type == "string" ? m = -1 : m < 0 && (m = v.value.indexOf(s));
if (v.value.length + d > o.start.column)break;
d += p[g].value.length
}
if (!v || m < 0 && v.type !== "comment" && (v.type !== "string" || o.start.column !== v.value.length + d - 1 && v.value.lastIndexOf(s) === v.value.length - 1)) {
if (!h.isSaneInsertion(n, r))return;
return{text: s + s, selection: [1, 1]}
}
if (v && v.type === "string") {
var y = f.substring(a.column, a.column + 1);
if (y == s)return{text: "", selection: [1, 1]}
}
}
}), this.add("string_dquotes", "deletion", function (e, t, n, r, i) {
var s = r.doc.getTextRange(i);
if (!i.isMultiLine() && (s == '"' || s == "'")) {
c(n);
var o = r.doc.getLine(i.start.row), u = o.substring(i.start.column + 1, i.start.column + 2);
if (u == s)return i.end.column++, i
}
})
};
h.isSaneInsertion = function (e, t) {
var n = e.getCursorPosition(), r = new s(t, n.row, n.column);
if (!this.$matchTokenType(r.getCurrentToken() || "text", u)) {
var i = new s(t, n.row, n.column + 1);
if (!this.$matchTokenType(i.getCurrentToken() || "text", u))return!1
}
return r.stepForward(), r.getCurrentTokenRow() !== n.row || this.$matchTokenType(r.getCurrentToken() || "text", a)
}, h.$matchTokenType = function (e, t) {
return t.indexOf(e.type || e) > -1
}, h.recordAutoInsert = function (e, t, n) {
var r = e.getCursorPosition(), i = t.doc.getLine(r.row);
this.isAutoInsertedClosing(r, i, f.autoInsertedLineEnd[0]) || (f.autoInsertedBrackets = 0), f.autoInsertedRow = r.row, f.autoInsertedLineEnd = n + i.substr(r.column), f.autoInsertedBrackets++
}, h.recordMaybeInsert = function (e, t, n) {
var r = e.getCursorPosition(), i = t.doc.getLine(r.row);
this.isMaybeInsertedClosing(r, i) || (f.maybeInsertedBrackets = 0), f.maybeInsertedRow = r.row, f.maybeInsertedLineStart = i.substr(0, r.column) + n, f.maybeInsertedLineEnd = i.substr(r.column), f.maybeInsertedBrackets++
}, h.isAutoInsertedClosing = function (e, t, n) {
return f.autoInsertedBrackets > 0 && e.row === f.autoInsertedRow && n === f.autoInsertedLineEnd[0] && t.substr(e.column) === f.autoInsertedLineEnd
}, h.isMaybeInsertedClosing = function (e, t) {
return f.maybeInsertedBrackets > 0 && e.row === f.maybeInsertedRow && t.substr(e.column) === f.maybeInsertedLineEnd && t.substr(0, e.column) == f.maybeInsertedLineStart
}, h.popAutoInsertedClosing = function () {
f.autoInsertedLineEnd = f.autoInsertedLineEnd.substr(1), f.autoInsertedBrackets--
}, h.clearMaybeInsertedClosing = function () {
f && (f.maybeInsertedBrackets = 0, f.maybeInsertedRow = -1)
}, r.inherits(h, i), t.CstyleBehaviour = h
}), ace.define("ace/mode/folding/cstyle", ["require", "exports", "module", "ace/lib/oop", "ace/range", "ace/mode/folding/fold_mode"], function (e, t, n) {
var r = e("../../lib/oop"), i = e("../../range").Range, s = e("./fold_mode").FoldMode, o = t.FoldMode = function (e) {
e && (this.foldingStartMarker = new RegExp(this.foldingStartMarker.source.replace(/\|[^|]*?$/, "|" + e.start)), this.foldingStopMarker = new RegExp(this.foldingStopMarker.source.replace(/\|[^|]*?$/, "|" + e.end)))
};
r.inherits(o, s), function () {
this.foldingStartMarker = /(\{|\[)[^\}\]]*$|^\s*(\/\*)/, this.foldingStopMarker = /^[^\[\{]*(\}|\])|^[\s\*]*(\*\/)/, this.getFoldWidgetRange = function (e, t, n, r) {
var i = e.getLine(n), s = i.match(this.foldingStartMarker);
if (s) {
var o = s.index;
if (s[1])return this.openingBracketBlock(e, s[1], n, o);
var u = e.getCommentFoldRange(n, o + s[0].length, 1);
return u && !u.isMultiLine() && (r ? u = this.getSectionRange(e, n) : t != "all" && (u = null)), u
}
if (t === "markbegin")return;
var s = i.match(this.foldingStopMarker);
if (s) {
var o = s.index + s[0].length;
return s[1] ? this.closingBracketBlock(e, s[1], n, o) : e.getCommentFoldRange(n, o, -1)
}
}, this.getSectionRange = function (e, t) {
var n = e.getLine(t), r = n.search(/\S/), s = t, o = n.length;
t += 1;
var u = t, a = e.getLength();
while (++t < a) {
n = e.getLine(t);
var f = n.search(/\S/);
if (f === -1)continue;
if (r > f)break;
var l = this.getFoldWidgetRange(e, "all", t);
if (l) {
if (l.start.row <= s)break;
if (l.isMultiLine())t = l.end.row; else if (r == f)break
}
u = t
}
return new i(s, o, u, e.getLine(u).length)
}
}.call(o.prototype)
})<|fim▁end|> | }.call(l.prototype), t.Mode = l
}), ace.define("ace/mode/jsx_highlight_rules", ["require", "exports", "module", "ace/lib/oop", "ace/lib/lang", "ace/mode/doc_comment_highlight_rules", "ace/mode/text_highlight_rules"], function (e, t, n) {
var r = e("../lib/oop"), i = e("../lib/lang"), s = e("./doc_comment_highlight_rules").DocCommentHighlightRules, o = e("./text_highlight_rules").TextHighlightRules, u = function () {
var e = i.arrayToMap("break|do|instanceof|typeof|case|else|new|var|catch|finally|return|void|continue|for|switch|default|while|function|this|if|throw|delete|in|try|class|extends|super|import|from|into|implements|interface|static|mixin|override|abstract|final|number|int|string|boolean|variant|log|assert".split("|")), t = i.arrayToMap("null|true|false|NaN|Infinity|__FILE__|__LINE__|undefined".split("|")), n = i.arrayToMap("debugger|with|const|export|let|private|public|yield|protected|extern|native|as|operator|__fake__|__readonly__".split("|")), r = "[a-zA-Z_][a-zA-Z0-9_]*\\b"; |
<|file_name|>qingjia_calendar.py<|end_file_name|><|fim▁begin|>#!usr/bin/python
# -*- coding:utf-8 -*-
from osv import osv,fields
import time
from datetime import datetime
from dateutil import rrule
class qingjia_calendar(osv.osv):
_name='qingjia.calendar'
_columns={
'start_date':fields.datetime('start_date'),
'end_date':fields.datetime('end_date'),
'calendar_line_ids':fields.one2many('qingjia.calendar.line','qingjia_calendar_id','calendar_line_ids'),
'state':fields.selection([('arrange','arrange'),('not arrange','not arrange')],'state',readonly=True)
}
_defaults={
}
def plan_arrange(self,cr,uid,ids,context=None):
my=self.browse(cr,uid,ids[0])
line_obj=self.pool.get('qingjia.calendar.line')
holidays=[]
datas=[]
start_date=time.strptime(my.start_date,'%Y-%m-%d %H:%M:%S')
end_date=time.strptime(my.end_date,'%Y-%m-%d %H:%M:%S')
dt=datetime(start_date.tm_year,start_date.tm_mon,start_date.tm_mday)
unt=datetime(end_date.tm_year,end_date.tm_mon,end_date.tm_mday)
days=rrule.rrule(rrule.DAILY,dtstart=dt,until=unt,byweekday=[6])
ge=days._iter()
for i in range(days.count()):
date_info=ge.next()
date_list=map(str,(date_info.year,date_info.month,date_info.day))
date='-'.join(date_list)
holidays.append(date)
for day in holidays:
line_search=line_obj.search(cr,uid,[('date','=',day),('type','=','holiday'),('state','=','arrange')])
if line_search:
datas.append((4,line_search[0]))
else:
datas.append((0,0,{'date':day,'type':'holiday','state':'arrange','name':'holiday'}))
self.write(cr,uid,ids,{'calendar_line_ids':datas})
return True
qingjia_calendar()
class qingjia_calendar_line(osv.osv):
_name='qingjia.calendar.line'
_columns={
'qingjia_calendar_id':fields.many2one('qingjia.calendar','qingjia_calendar_id'),
'name':fields.char('type',size=64),
'date':fields.datetime('date'),
'type':fields.selection([('work','Work'),('holiday','Holiday')],'type',),
'state':fields.selection([('arrange','arrange'),('not arrange','not arrange')],'state'),
'is_holiday':fields.boolean('is_holiday'),
'note':fields.char('note',size=128),
}
_defaults={'type':'work'}
def onchange_type(self,cr,uid,ids,res,context=None):
if res:
print res,'res'
return {'value':{'name':res}}<|fim▁hole|><|fim▁end|> |
qingjia_calendar_line() |
<|file_name|>config.exceptionHandler.ts<|end_file_name|><|fim▁begin|>module App {
"use strict";
angular.module("knowledgeCenterApp")
.config([
"$provide", ($provide: ng.auto.IProvideService): void => {
$provide.decorator("$exceptionHandler",
["$delegate", "config", "logger", extendExceptionHandler]);
}
]);
// extend the $exceptionHandler service to also display a toast.
function extendExceptionHandler($delegate: Function, config: App.IConfigProvider, logger: Common.ILoggerService): Function {
var appErrorPrefix: string = config.appErrorPrefix;<|fim▁hole|> $delegate(exception, cause);
if (appErrorPrefix && exception.message.indexOf(appErrorPrefix) === 0) { return; }
var errorData: any = { exception: exception, cause: cause };
var msg: string = appErrorPrefix + exception.message;
logger.logError(msg, errorData, "knowledgeCenterApp", true);
};
}
}<|fim▁end|> | return (exception: Error, cause: string): void => { |
<|file_name|>borders.hpp<|end_file_name|><|fim▁begin|>#pragma once
#include "common.hpp"
<|fim▁hole|>} // namespace ccloutline<|fim▁end|> | namespace ccloutline {
border_mat borders(const arma::mat&);
|
<|file_name|>hal_dynalib_export.cpp<|end_file_name|><|fim▁begin|>/**
******************************************************************************
* @file hal_dynalib_export.c
* @author Matthew McGowan
******************************************************************************
Copyright (c) 2015 Particle Industries, Inc. All rights reserved.
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation, either<|fim▁hole|> MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, see <http://www.gnu.org/licenses/>.
******************************************************************************
*/
#define DYNALIB_EXPORT
#include "hal_dynalib.h"
#include "hal_dynalib_core.h"
#include "hal_dynalib_gpio.h"
#include "hal_dynalib_i2c.h"
#include "hal_dynalib_ota.h"
#include "hal_dynalib_peripherals.h"
#include "hal_dynalib_socket.h"
#include "hal_dynalib_spi.h"
#include "hal_dynalib_usart.h"
#include "hal_dynalib_wlan.h"
#include "hal_dynalib_concurrent.h"
#include "hal_dynalib_cellular.h"
#include "hal_dynalib_can.h"
#include "hal_dynalib_rgbled.h"
#include "hal_dynalib_dct.h"
#ifndef HAL_USB_EXCLUDE
#include "hal_dynalib_usb.h"
#endif
#ifndef HAL_BOOTLOADER_EXCLUDE
#include "hal_dynalib_bootloader.h"
#endif<|fim▁end|> | version 3 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of |
<|file_name|>app.js<|end_file_name|><|fim▁begin|>'use strict';
/* App Module */
var phonecatApp = angular.module('phonecatApp', [
'ngRoute',
'phonecatAnimations',
'phonecatControllers',
'phonecatFilters',
'phonecatServices',
]);
phonecatApp.config(['$routeProvider',
function($routeProvider) {
$routeProvider.
when('/phones', {
templateUrl: 'partials/phone-list.html',
controller: 'PhoneListCtrl'
}).
when('/phones/:phoneId', {
templateUrl: 'partials/phone-detail.html',
controller: 'PhoneDetailCtrl'
}).<|fim▁hole|> }]);<|fim▁end|> | otherwise({
redirectTo: '/phones'
}); |
<|file_name|>permissions.py<|end_file_name|><|fim▁begin|>def get_perm_argparser(self, args):
args = args.split(" ")
if args[0] == "nick":
self.conman.gen_send("Permission level for %s: %s" % (args[1], self.permsman.get_nick_perms(args[1])))
elif args[0] == "cmd":
if args[1].startswith("."):
args[1] = args[1][1:]
self.conman.gen_send("Permission level for %s: %s" % (args[1], self.permsman.get_cmd_perms(args[1])))
elif args[0] == "msg":
self.conman.gen_send("Message permissions for %s: %s" % (args[1], self.permsman.get_msg_perms(args[1])))
def set_perm_argparser(self, args):
args = args.split(" ")
if args[0] == "nick":
self.conman.gen_send("Setting permission level for %s: %s" % (args[1], args[2]))
self.permsman.set_nick_perms(args[1], args[2])
elif args[0] == "cmd":
if args[1].startswith("."):
args[1] = args[1][1:]<|fim▁hole|> elif args[0] == "msg":
args[2] = args[2].lower() == "true" or args[2] == "1"
self.conman.gen_send("Setting message permissions for %s: %s" % (args[1], args[2]))
self.permsman.set_msg_perms(args[1], args[2])
self._map("command", "getperm", get_perm_argparser)
self._map("command", "setperm", set_perm_argparser)<|fim▁end|> | self.conman.gen_send("Setting permission level for %s: %s" % (args[1], args[2]))
self.permsman.set_cmd_perms(args[1], args[2]) |
<|file_name|>tests.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2016-2021 Snowplow Analytics Ltd. All rights reserved.
//
// This program is licensed to you under the Apache License Version 2.0, and
// you may not use this file except in compliance with the Apache License
// Version 2.0. You may obtain a copy of the Apache License Version 2.0 at
// http://www.apache.org/licenses/LICENSE-2.0.
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the Apache License Version 2.0 is distributed on an "AS
// IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
// implied. See the Apache License Version 2.0 for the specific language
// governing permissions and limitations there under.
//
use factotum::factfile::*;
use daggy::*;
use factotum::tests::*;
#[test]
fn recursive_find_ok() {
let mut dag = Dag::<Task, ()>::new();
let parent = make_task("root", &vec![]);
let idx = dag.add_node(parent);
let task_child1 = make_task("child1", &vec![]);
dag.add_child(idx, (), task_child1);
let task_child2 = make_task("child2", &vec![]);
let (_, child2) = dag.add_child(idx, (), task_child2);
let grandchild_node = make_task("grandchild", &vec![]);
let (_, grandchild_idx) = dag.add_child(child2, (), grandchild_node);
if let Some((found_idx, found_node)) = super::find_task_recursive(&dag, "grandchild", idx) {
assert_eq!(found_idx, grandchild_idx);
assert_eq!(found_node.name, "grandchild");
} else {
panic!("couldn't find value");
}
}
#[test]
fn get_tasks_in_order_basic() {
let mut dag = Dag::<Task, ()>::new();
let parent = make_task("root", &vec![]);
let root_idx: NodeIndex = dag.add_node(parent);
let child1 = make_task("child1", &vec![]);
let child2 = make_task("child2", &vec![]);
dag.add_child(root_idx, (), child1);
let (_, child2_idx) = dag.add_child(root_idx, (), child2);
let grandchild = make_task("grandchild", &vec![]);
dag.add_child(child2_idx, (), grandchild);
let expected = vec![vec!["root"], vec!["child2", "child1"], vec!["grandchild"]];
let mut actual: Vec<Vec<&Task>> = vec![];
super::get_tasks_in_order(&dag, &vec![root_idx], &mut actual);
compare_tasks(expected, actual);
}
#[test]
fn check_valid_subtree() {
// root <-- start here
// / \
// child1 child2
// \
// grandchild
//
let mut dag = Dag::<Task, ()>::new();
let parent = make_task("root", &vec![]);
let root_idx: NodeIndex = dag.add_node(parent);
let child1 = make_task("child1", &vec![]);
let child2 = make_task("child2", &vec![]);<|fim▁hole|> let (_, child2_idx) = dag.add_child(root_idx, (), child2);
let grandchild = make_task("grandchild", &vec![]);
dag.add_child(child2_idx, (), grandchild);
assert_eq!(true, super::is_proper_sub_tree(&dag, root_idx));
}
#[test]
fn check_invalid_subtree() {
// root <-- start here ok
// / \
// child1 child2 <-- start here fails
// \ \
// grandchild
//
//
let mut dag = Dag::<Task, ()>::new();
let parent = make_task("root", &vec![]);
let root_idx: NodeIndex = dag.add_node(parent);
let child1 = make_task("child1", &vec![]);
let child2 = make_task("child2", &vec![]);
let (_, child1_idx) = dag.add_child(root_idx, (), child1);
let (_, child2_idx) = dag.add_child(root_idx, (), child2);
let grandchild = make_task("grandchild", &vec![]);
let (_, grandchild_idx) = dag.add_child(child2_idx, (), grandchild);
dag.add_edge(child1_idx, grandchild_idx, ()).ok().unwrap();
assert_eq!(false, super::is_proper_sub_tree(&dag, child2_idx));
assert_eq!(true, super::is_proper_sub_tree(&dag, root_idx));
}<|fim▁end|> |
dag.add_child(root_idx, (), child1); |
<|file_name|>AbstractDominoIterator.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2013
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
<|fim▁hole|>package org.openntf.domino.iterators;
import java.util.Iterator;
import org.openntf.domino.Base;
import org.openntf.domino.Database;
import org.openntf.domino.DocumentCollection;
import org.openntf.domino.Session;
import org.openntf.domino.View;
import org.openntf.domino.ViewEntryCollection;
import org.openntf.domino.utils.DominoUtils;
import org.openntf.domino.utils.Factory;
// TODO: Auto-generated Javadoc
/**
* The Class AbstractDominoIterator.
*
* @param <T>
* the generic type
*/
public abstract class AbstractDominoIterator<T> implements Iterator<T> {
/** The server name_. */
private String serverName_;
/** The file path_. */
private String filePath_;
/** The collection_. */
private Base<?> collection_;
/** The session_. */
private transient Session session_;
/** The database_. */
private transient Database database_;
/**
* Instantiates a new abstract domino iterator.
*
* @param collection
* the collection
*/
protected AbstractDominoIterator(final Base<?> collection) {
setCollection(collection);
}
/**
* Gets the session.
*
* @return the session
*/
protected Session getSession() {
if (session_ == null) {
try {
session_ = Factory.getSession();
} catch (Throwable e) {
DominoUtils.handleException(e);
return null;
}
}
return session_;
}
/**
* Gets the database.
*
* @return the database
*/
protected Database getDatabase() {
if (database_ == null) {
Session session = getSession();
try {
database_ = session.getDatabase(getServerName(), getFilePath());
} catch (Throwable e) {
DominoUtils.handleException(e);
return null;
}
}
return database_;
}
/**
* Gets the file path.
*
* @return the file path
*/
protected String getFilePath() {
return filePath_;
}
/**
* Gets the server name.
*
* @return the server name
*/
protected String getServerName() {
return serverName_;
}
/**
* Sets the database.
*
* @param database
* the new database
*/
protected void setDatabase(final Database database) {
if (database != null) {
try {
setFilePath(database.getFilePath());
setServerName(database.getServer());
} catch (Throwable e) {
DominoUtils.handleException(e);
}
}
}
/**
* Sets the file path.
*
* @param filePath
* the new file path
*/
protected void setFilePath(final String filePath) {
filePath_ = filePath;
}
/**
* Sets the server name.
*
* @param serverName
* the new server name
*/
protected void setServerName(final String serverName) {
serverName_ = serverName;
}
/**
* Gets the collection.
*
* @return the collection
*/
public Base<?> getCollection() {
return collection_;
}
/**
* Sets the collection.
*
* @param collection
* the new collection
*/
public void setCollection(final Base<?> collection) {
if (collection != null) {
if (collection instanceof DocumentCollection) {
org.openntf.domino.Database parent = ((org.openntf.domino.DocumentCollection) collection).getParent();
session_ = Factory.fromLotus(parent.getParent(), Session.SCHEMA, null); // FIXME NTF - this is suboptimal,
database_ = Factory.fromLotus(parent, Database.SCHEMA, session_);
// but we still need to
// sort out the parent/child pattern
} else if (collection instanceof ViewEntryCollection) {
View vw = ((ViewEntryCollection) collection).getParent();
database_ = vw.getParent();
session_ = Factory.getSession(database_);
}
if (database_ != null) {
setDatabase(database_);
}
}
collection_ = collection;
}
}<|fim▁end|> | |
<|file_name|>workflow_files.py<|end_file_name|><|fim▁begin|># THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE.
# Copyright (C) NIWA & British Crown (Met Office) & Contributors.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Workflow service files management."""
import asyncio
from collections import deque
from contextlib import suppress
from enum import Enum
from functools import partial
import glob
import json
import logging
import os
from pathlib import Path
from random import shuffle
import re
import shutil
from subprocess import Popen, PIPE, DEVNULL, TimeoutExpired
from time import sleep
from typing import (
Any, Container, Deque, Dict, Iterable, List, NamedTuple, Optional, Set,
Tuple, TYPE_CHECKING, Union
)
import aiofiles
import zmq.auth
import cylc.flow.flags
from cylc.flow import LOG
from cylc.flow.cfgspec.glbl_cfg import glbl_cfg
from cylc.flow.exceptions import (
CylcError,
PlatformError,
PlatformLookupError,
ServiceFileError,
UserInputError,
WorkflowFilesError,
handle_rmtree_err,
)
from cylc.flow.pathutil import (
expand_path,
get_cylc_run_dir,
get_workflow_run_dir,
make_localhost_symlinks,
parse_rm_dirs,
remove_dir_and_target,
get_next_rundir_number,
remove_dir_or_file,
remove_empty_parents
)
from cylc.flow.platforms import (
get_host_from_platform,
get_install_target_to_platforms_map,
get_localhost_install_target,
)
from cylc.flow.hostuserutil import (
get_user,
is_remote_host
)
from cylc.flow.remote import (
DEFAULT_RSYNC_OPTS,
_construct_ssh_cmd,
construct_ssh_cmd,
)
from cylc.flow.workflow_db_mgr import WorkflowDatabaseManager
from cylc.flow.loggingutil import CylcLogFormatter, close_log
from cylc.flow.unicode_rules import WorkflowNameValidator
from cylc.flow.util import cli_format
from cylc.flow.wallclock import get_current_time_string
if TYPE_CHECKING:
from optparse import Values
class KeyType(Enum):
"""Used for authentication keys - public or private"""
PRIVATE = "private"
PUBLIC = "public"
class KeyOwner(Enum):
"""Used for authentication keys - server or client"""
SERVER = "server"
CLIENT = "client"
class KeyInfo(): # noqa: SIM119 (not really relevant here)
"""Represents a server or client key file, which can be private or public.
Attributes:
file_name: The file name of this key object.
key_type: public or private
key_owner: server or client
key_path: The absolute path, not including filename,
for this key object.
full_key_path: The absolute path, including filename,
for this key object.
"""
def __init__(self, key_type, key_owner, full_key_path=None,
workflow_srv_dir=None, install_target=None, server_held=True):
self.key_type = key_type
self.key_owner = key_owner
self.full_key_path = full_key_path
self.workflow_srv_dir = workflow_srv_dir
self.install_target = install_target
if self.full_key_path is not None:
self.key_path, self.file_name = os.path.split(self.full_key_path)
elif self.workflow_srv_dir is not None: # noqa: SIM106
# Build key filename
file_name = key_owner.value
# Add optional install target name
if (key_owner is KeyOwner.CLIENT
and key_type is KeyType.PUBLIC
and self.install_target is not None):
file_name = f"{file_name}_{self.install_target}"
if key_type == KeyType.PRIVATE:
file_extension = WorkflowFiles.Service.PRIVATE_FILE_EXTENSION
elif key_type == KeyType.PUBLIC:
file_extension = WorkflowFiles.Service.PUBLIC_FILE_EXTENSION
self.file_name = f"{file_name}{file_extension}"
# Build key path (without filename) for client public keys
if (key_owner is KeyOwner.CLIENT
and key_type is KeyType.PUBLIC and server_held):
temp = f"{key_owner.value}_{key_type.value}_keys"
self.key_path = os.path.join(
os.path.expanduser("~"),
self.workflow_srv_dir,<|fim▁hole|> and server_held is False)
or
(key_owner is KeyOwner.SERVER
and key_type is KeyType.PRIVATE)
or (key_owner is KeyOwner.CLIENT
and key_type is KeyType.PRIVATE)
or (key_owner is KeyOwner.SERVER
and key_type is KeyType.PUBLIC)):
self.key_path = os.path.expandvars(self.workflow_srv_dir)
else:
raise ValueError(
"Cannot create KeyInfo without workflow path or full path.")
# Build full key path (including file name)
self.full_key_path = os.path.join(self.key_path, self.file_name)
class WorkflowFiles:
"""Names of files and directories located in the workflow directory."""
FLOW_FILE = 'flow.cylc'
"""The workflow configuration file."""
FLOW_FILE_PROCESSED = 'flow-processed.cylc'
"""The workflow configuration file after processing."""
SUITE_RC = 'suite.rc'
"""Deprecated workflow configuration file."""
RUN_N = 'runN'
"""Symbolic link for latest run"""
LOG_DIR = 'log'
"""Workflow log directory."""
SHARE_DIR = 'share'
"""Workflow share directory."""
SHARE_CYCLE_DIR = os.path.join(SHARE_DIR, 'cycle')
"""Workflow share/cycle directory."""
WORK_DIR = 'work'
"""Workflow work directory."""
RUN_DIR = 'run'
"""Workflow run directory."""
class Service:
"""The directory containing Cylc system files."""
DIRNAME = '.service'
"""The name of this directory."""
CONTACT = 'contact'
"""Contains settings for the running workflow.
For details of the fields see ``ContactFileFields``.
"""
DB = 'db'
"""The workflow database.
Contains information about the execution and status of a workflow.
"""
PUBLIC_FILE_EXTENSION = '.key'
PRIVATE_FILE_EXTENSION = '.key_secret'
"""Keyword identifiers used to form the certificate names.
Note: the public & private identifiers are set by CurveZMQ, so cannot
be renamed, but we hard-code them since they can't be extracted easily.
"""
class Install:
"""The directory containing install source link."""
DIRNAME = '_cylc-install'
"""The name of this directory."""
SOURCE = 'source'
"""Symlink to the workflow definition (For run dir)."""
RESERVED_DIRNAMES = frozenset([
LOG_DIR, SHARE_DIR, WORK_DIR, RUN_N, Service.DIRNAME, Install.DIRNAME
])
"""Reserved directory names that cannot be present in a source dir."""
RESERVED_NAMES = frozenset([FLOW_FILE, SUITE_RC, *RESERVED_DIRNAMES])
"""Reserved filenames that cannot be used as run names."""
SYMLINK_DIRS = frozenset([
SHARE_CYCLE_DIR, SHARE_DIR, LOG_DIR, WORK_DIR, ''
])
"""The paths of the symlink dirs that may be set in
global.cylc[install][symlink dirs], relative to the run dir
('' represents the run dir)."""
class ContactFileFields:
"""Field names present in ``WorkflowFiles.Service.CONTACT``.
These describe properties of a running workflow.
.. note::
The presence of this file indicates the workflow is running as it is
removed on shutdown. however, if a workflow is not properly shut down
this file may be left behind.
"""
API = 'CYLC_API'
"""The Workflow API version string."""
HOST = 'CYLC_WORKFLOW_HOST'
"""The name of the host the scheduler process is running on."""
NAME = 'CYLC_WORKFLOW_ID'
"""The name of the workflow."""
OWNER = 'CYLC_WORKFLOW_OWNER'
"""The user account under which the scheduler process is running."""
PID = 'CYLC_WORKFLOW_PID'
"""The process ID of the running workflow on ``CYLC_WORKFLOW_HOST``."""
COMMAND = 'CYLC_WORKFLOW_COMMAND'
"""The command that was used to run the workflow on ``CYLC_WORKFLOW_HOST```.
Note that this command may be affected by:
* Workflow host selection (this adds the ``--host`` argument).
* Auto restart (this reconstructs the command and changes the ``--host``
argument.
"""
PORT = 'CYLC_WORKFLOW_PORT'
"""The port Cylc uses to communicate with this workflow."""
PUBLISH_PORT = 'CYLC_WORKFLOW_PUBLISH_PORT'
"""The port Cylc uses to publish data."""
WORKFLOW_RUN_DIR_ON_WORKFLOW_HOST = (
'CYLC_WORKFLOW_RUN_DIR_ON_WORKFLOW_HOST'
)
"""The path to the workflow run directory as seen from ``HOST``."""
UUID = 'CYLC_WORKFLOW_UUID'
"""Unique ID for this run of the workflow."""
VERSION = 'CYLC_VERSION'
"""The Cylc version under which the workflow is running."""
SCHEDULER_SSH_COMMAND = 'SCHEDULER_SSH_COMMAND'
SCHEDULER_CYLC_PATH = 'SCHEDULER_CYLC_PATH'
"""The path containing the Cylc executable on a remote host."""
SCHEDULER_USE_LOGIN_SHELL = 'SCHEDULER_USE_LOGIN_SHELL'
"""Remote command setting for Scheduler."""
class RemoteCleanQueueTuple(NamedTuple):
proc: 'Popen[str]'
install_target: str
platforms: List[Dict[str, Any]]
REG_DELIM = "/"
NO_TITLE = "No title provided"
REC_TITLE = re.compile(r"^\s*title\s*=\s*(.*)\s*$")
CONTACT_FILE_EXISTS_MSG = r"""workflow contact file exists: %(fname)s
Workflow "%(workflow)s" is already running, listening at "%(host)s:%(port)s".
To start a new run, stop the old one first with one or more of these:
* cylc stop %(workflow)s # wait for active tasks/event handlers
* cylc stop --kill %(workflow)s # kill active tasks and wait
* cylc stop --now %(workflow)s # don't wait for active tasks
* cylc stop --now --now %(workflow)s # don't wait
* ssh -n "%(host)s" kill %(pid)s # final brute force!
"""
SUITERC_DEPR_MSG = (
f"Backward compatibility mode ON for CYLC 7 '{WorkflowFiles.SUITE_RC}'"
" files: please address deprecation warnings and upgrade to Cylc 8 graph"
f" syntax BEFORE renaming the file to '{WorkflowFiles.FLOW_FILE}'.\n"
)
NO_FLOW_FILE_MSG = (
f"No {WorkflowFiles.FLOW_FILE} or {WorkflowFiles.SUITE_RC} "
"in {}"
)
REG_CLASH_MSG = (
"The specified reg could refer to ./{0} or ~/cylc-run/{1}. "
"This command will use ./{0}."
)
NESTED_DIRS_MSG = (
"Nested {dir_type} directories not allowed - cannot install workflow"
" in '{dest}' as '{existing}' is already a valid {dir_type} directory."
)
def _is_process_running(
host: str,
pid: Union[int, str],
command: str
) -> bool:
"""Check if a workflow process is still running.
* Returns True if the process is still running.
* Returns False if it is not.
* Raises CylcError if we cannot tell (e.g. due to network issues).
Args:
host:
The host where you expect it to be running.
pid:
The process ID you expect it to be running under.
command:
The command you expect to be running as it would appear in `ps`
output` (e.g. `cylc play <flow> --host=localhost`).
Raises:
CylcError:
If it is not possible to tell whether the process is running
or not.
Returns:
True if the workflow is running else False.
Examples:
>>> import psutil; proc = psutil.Process()
# check a process that is running (i.e. this one)
>>> _is_process_running(
... 'localhost',
... proc.pid,
... cli_format(proc.cmdline()),
... )
True
# check a process that is running but with a command line that
# doesn't match
>>> _is_process_running('localhost', proc.pid, 'something-else')
False
"""
# See if the process is still running or not.
metric = f'[["Process", {pid}]]'
if is_remote_host(host):
cmd = ['psutil']
cmd = _construct_ssh_cmd(cmd, host)
else:
cmd = ['cylc', 'psutil']
proc = Popen( # nosec
cmd,
stdin=PIPE,
stdout=PIPE,
stderr=PIPE,
text=True
) # * hardcoded command
try:
# Terminate command after 10 seconds to prevent hanging, etc.
out, err = proc.communicate(timeout=10, input=metric)
except TimeoutExpired:
raise CylcError(
f'Cannot determine whether workflow is running on {host}.'
)
if proc.returncode == 2:
# the psutil call failed to gather metrics on the process
# because the process does not exist
return False
if proc.returncode:
# the psutil call failed in some other way e.g. network issues
LOG.debug(
f'$ {cli_format(cmd)} # returned {proc.returncode}\n{err}'
)
raise CylcError(
f'Cannot determine whether workflow is running on {host}.'
f'\n{command}'
)
process = json.loads(out)[0]
return cli_format(process['cmdline']) == command
def detect_old_contact_file(reg: str, contact_data=None) -> None:
"""Check if the workflow process is still running.
As a side-effect this should detect and rectify the situation
where an old contact file is still present from a previous run. This can be
caused by the uncontrolled teardown of a running Scheduler (e.g. a power
off).
* If an old contact file does not exist, do nothing.
* If one does exist but the workflow process is definitely not alive,
remove it.
* If one exists and the workflow process is still alive, raise
ServiceFileError.
Args:
reg: workflow name
Raises:
CylcError:
* If it is not possible to tell for sure if the workflow is running
or not.
* If the workflow is not, however, the contact file cannot be
removed.
ServiceFileError(CylcError):
If old contact file exists and the workflow process still alive.
"""
# An old workflow of the same name may be running if a contact file exists
# and can be loaded.
if not contact_data:
try:
contact_data = load_contact_file(reg)
except (IOError, ValueError, ServiceFileError):
# Contact file does not exist or corrupted, workflow should be dead
return
try:
old_host: str = contact_data[ContactFileFields.HOST]
old_port: str = contact_data[ContactFileFields.PORT]
old_pid: str = contact_data[ContactFileFields.PID]
old_cmd: str = contact_data[ContactFileFields.COMMAND]
except KeyError as exc:
# this shouldn't happen
# but if it does re-raise the error as something more informative
raise Exception(f'Found contact file with incomplete data:\n{exc}.')
# check if the workflow process is running ...
# NOTE: can raise CylcError
process_is_running = _is_process_running(old_host, old_pid, old_cmd)
fname = get_contact_file(reg)
if process_is_running:
# ... the process is running, raise an exception
raise ServiceFileError(
CONTACT_FILE_EXISTS_MSG % {
"host": old_host,
"port": old_port,
"pid": old_cmd,
"fname": fname,
"workflow": reg,
}
)
else:
# ... the process isn't running so the contact file is out of date
# remove it
try:
os.unlink(fname)
return
except OSError as exc:
raise CylcError(
f'Failed to remove old contact file: "{fname}"\n{exc}'
)
def dump_contact_file(reg, data):
"""Create contact file. Data should be a key=value dict."""
# Note:
# 1st fsync for writing the content of the contact file to disk.
# 2nd fsync for writing the file metadata of the contact file to disk.
# The double fsync logic ensures that if the contact file is written to
# a shared file system e.g. via NFS, it will be immediately visible
# from by a process on other hosts after the current process returns.
with open(get_contact_file(reg), "wb") as handle:
for key, value in sorted(data.items()):
handle.write(("%s=%s\n" % (key, value)).encode())
os.fsync(handle.fileno())
dir_fileno = os.open(get_workflow_srv_dir(reg), os.O_DIRECTORY)
os.fsync(dir_fileno)
os.close(dir_fileno)
def get_contact_file(reg):
"""Return name of contact file."""
return os.path.join(
get_workflow_srv_dir(reg), WorkflowFiles.Service.CONTACT)
def get_flow_file(reg: str) -> Path:
"""Return the path of a workflow's flow.cylc file."""
run_dir = get_workflow_run_dir(reg)
path = check_flow_file(run_dir)
return path
def get_workflow_source_dir(
run_dir: Union[Path, str]
) -> Union[Tuple[str, Path], Tuple[None, None]]:
"""Get the source directory path of the workflow in directory provided.
Args:
run_dir: directory to check for an installed flow inside.
Returns (source_dir, symlink) where the latter is the symlink to the source
dir that exists in the run dir.
"""
source_path = Path(
run_dir,
WorkflowFiles.Install.DIRNAME,
WorkflowFiles.Install.SOURCE)
try:
source = os.readlink(source_path)
return source, source_path
except OSError:
alt_source_path = Path(
Path(run_dir).parent,
WorkflowFiles.Install.DIRNAME,
WorkflowFiles.Install.SOURCE)
try:
source = os.readlink(alt_source_path)
return source, alt_source_path
except OSError:
return None, None
def get_workflow_srv_dir(reg):
"""Return service directory of a workflow."""
run_d = os.getenv("CYLC_WORKFLOW_RUN_DIR")
if (
not run_d
or os.getenv("CYLC_WORKFLOW_ID") != reg
or os.getenv("CYLC_WORKFLOW_OWNER") != get_user()
):
run_d = get_workflow_run_dir(reg)
return os.path.join(run_d, WorkflowFiles.Service.DIRNAME)
def load_contact_file(reg: str) -> Dict[str, str]:
"""Load contact file. Return data as key=value dict."""
file_base = WorkflowFiles.Service.CONTACT
path = get_workflow_srv_dir(reg)
file_content = _load_local_item(file_base, path)
if not file_content:
raise ServiceFileError("Couldn't load contact file")
data: Dict[str, str] = {}
for line in file_content.splitlines():
key, value = [item.strip() for item in line.split("=", 1)]
# BACK COMPAT: contact pre "suite" to "workflow" conversion.
# from:
# Cylc 8
# remove at:
# Cylc 9
data[key.replace('SUITE', 'WORKFLOW')] = value
return data
async def load_contact_file_async(reg, run_dir=None):
if not run_dir:
path = Path(
get_workflow_srv_dir(reg),
WorkflowFiles.Service.CONTACT
)
else:
path = Path(
run_dir,
WorkflowFiles.Service.DIRNAME,
WorkflowFiles.Service.CONTACT
)
try:
async with aiofiles.open(path, mode='r') as cont:
data = {}
async for line in cont:
key, value = [item.strip() for item in line.split("=", 1)]
# BACK COMPAT: contact pre "suite" to "workflow" conversion.
# from:
# Cylc 8
# remove at:
# Cylc 9
data[key.replace('SUITE', 'WORKFLOW')] = value
return data
except IOError:
raise ServiceFileError("Couldn't load contact file")
def register(
workflow_name: str, source: Optional[str] = None
) -> str:
"""Set up workflow.
This completes some of the set up completed by cylc install.
Called only if running a workflow that has not been installed.
Validates workflow name.
Validates run directory structure.
Creates symlinks for localhost symlink dirs.
Symlinks flow.cylc -> suite.rc.
Creates the .service directory.
Args:
workflow_name: workflow name.
source: directory location of flow.cylc file, default $PWD.
Return:
The installed workflow name (which may be computed here).
Raise:
WorkflowFilesError:
- No flow.cylc or suite.rc file found in source location.
- Illegal name (can look like a relative path, but not absolute).
- Nested workflow run directories.
"""
validate_workflow_name(workflow_name)
if source is not None:
if os.path.basename(source) == WorkflowFiles.FLOW_FILE:
source = os.path.dirname(source)
else:
source = os.getcwd()
# flow.cylc must exist so we can detect accidentally reversed args.
source = os.path.abspath(source)
check_flow_file(source)
if not is_installed(get_workflow_run_dir(workflow_name)):
symlinks_created = make_localhost_symlinks(
get_workflow_run_dir(workflow_name), workflow_name)
if symlinks_created:
for src, dst in symlinks_created.items():
LOG.info(f"Symlink created from {src} to {dst}")
# Create service dir if necessary.
srv_d = get_workflow_srv_dir(workflow_name)
os.makedirs(srv_d, exist_ok=True)
return workflow_name
def is_installed(rund: Union[Path, str]) -> bool:
"""Check to see if the path sent contains installed flow.
Checks for valid _cylc-install directory in the two possible locations in
relation to the run directory.
Args:
rund: run directory path to check
Returns:
bool: True if rund belongs to an installed workflow
"""
rund = Path(rund)
cylc_install_dir = Path(rund, WorkflowFiles.Install.DIRNAME)
alt_cylc_install_dir = Path(rund.parent, WorkflowFiles.Install.DIRNAME)
return cylc_install_dir.is_dir() or alt_cylc_install_dir.is_dir()
async def get_contained_workflows(
path: Path,
scan_depth: Optional[int] = None
) -> List[str]:
"""Return the sorted names of any workflows in a directory.
Args:
path: Absolute path to the dir.
scan_depth: How many levels deep to look inside the dir.
"""
from cylc.flow.network.scan import scan
return sorted(
[i['name'] async for i in scan(scan_dir=path, max_depth=scan_depth)]
)
def _clean_check(opts: 'Values', reg: str, run_dir: Path) -> None:
"""Check whether a workflow can be cleaned.
Args:
reg: Workflow name.
run_dir: Path to the workflow run dir on the filesystem.
"""
validate_workflow_name(reg)
reg = os.path.normpath(reg)
# Thing to clean must be a dir or broken symlink:
if not run_dir.is_dir() and not run_dir.is_symlink():
raise FileNotFoundError(f"No directory to clean at {run_dir}")
try:
detect_old_contact_file(reg)
except ServiceFileError as exc:
raise ServiceFileError(f"Cannot remove running workflow.\n\n{exc}")
def init_clean(reg: str, opts: 'Values') -> None:
"""Initiate the process of removing a stopped workflow from the local
scheduler filesystem and remote hosts.
Args:
reg: Workflow name.
opts: CLI options object for cylc clean.
"""
local_run_dir = Path(get_workflow_run_dir(reg))
try:
_clean_check(opts, reg, local_run_dir)
except FileNotFoundError as exc:
LOG.info(exc)
return
local_run_dir, reg_path = infer_latest_run(
local_run_dir, implicit_runN=False
)
reg = str(reg_path)
# Parse --rm option to make sure it's valid
rm_dirs = parse_rm_dirs(opts.rm_dirs) if opts.rm_dirs else None
# Check dir does not contain other workflows:
scan_depth = glbl_cfg().get(['install', 'max depth']) + 1
contained_workflows = asyncio.get_event_loop().run_until_complete(
get_contained_workflows(local_run_dir, scan_depth)
) # Note: increased scan depth for safety
if len(contained_workflows) == 1:
# Clean the contained workflow followed by the parent dir
init_clean(contained_workflows[0], opts)
if opts.rm_dirs:
return # Do not delete parent dir if --rm dirs specified
elif len(contained_workflows) > 1:
msg = (
f"{local_run_dir} contains the following workflows:"
f"{WorkflowFilesError.bullet}"
f"{WorkflowFilesError.bullet.join(contained_workflows)}"
)
if not opts.force:
raise WorkflowFilesError(f"Cannot clean because {msg}")
if opts.remote_only:
msg = f"Not performing remote clean because {msg}"
LOG.warning(msg)
if (not opts.local_only) and (len(contained_workflows) == 0):
platform_names = None
try:
platform_names = get_platforms_from_db(local_run_dir)
except FileNotFoundError:
if opts.remote_only:
raise ServiceFileError(
"No workflow database - cannot perform remote clean"
)
LOG.info("No workflow database - will only clean locally")
except ServiceFileError as exc:
raise ServiceFileError(f"Cannot clean - {exc}")
if platform_names and platform_names != {'localhost'}:
remote_clean(
reg, platform_names, opts.rm_dirs, opts.remote_timeout
)
if not opts.remote_only:
# Must be after remote clean
clean(reg, local_run_dir, rm_dirs)
def clean(reg: str, run_dir: Path, rm_dirs: Optional[Set[str]] = None) -> None:
"""Remove a stopped workflow from the local filesystem only.
Deletes the workflow run directory and any symlink dirs, or just the
specified sub dirs if rm_dirs is specified.
Note: if the run dir has already been manually deleted, it will not be
possible to clean any symlink dirs.
Args:
reg: Workflow name.
run_dir: Absolute path of the workflow's run dir.
rm_dirs: Set of sub dirs to remove instead of the whole run dir.
"""
symlink_dirs = get_symlink_dirs(reg, run_dir)
if rm_dirs is not None:
# Targeted clean
for pattern in rm_dirs:
_clean_using_glob(run_dir, pattern, symlink_dirs)
else:
# Wholesale clean
LOG.debug(f"Cleaning {run_dir}")
for symlink in symlink_dirs:
# Remove <symlink_dir>/cylc-run/<reg>/<symlink>
remove_dir_and_target(run_dir / symlink)
if '' not in symlink_dirs:
# if run dir isn't a symlink dir and hasn't been deleted yet
remove_dir_and_target(run_dir)
# Tidy up if necessary
# Remove any empty parents of run dir up to ~/cylc-run/
remove_empty_parents(run_dir, reg)
for symlink, target in symlink_dirs.items():
# Remove empty parents of symlink target up to <symlink_dir>/cylc-run/
remove_empty_parents(target, Path(reg, symlink))
# Remove `runN` symlink if it's now broken
runN = run_dir.parent / WorkflowFiles.RUN_N
if (
runN.is_symlink() and
not run_dir.exists() and
os.readlink(str(runN)) == run_dir.name
):
runN.unlink()
def get_symlink_dirs(reg: str, run_dir: Union[Path, str]) -> Dict[str, Path]:
"""Return the standard symlink dirs and their targets if they exist in
the workflow run dir.
Note: does not check the global config, only the existing run dir filetree.
Raises WorkflowFilesError if a symlink points to an unexpected place.
"""
ret: Dict[str, Path] = {}
for _dir in sorted(WorkflowFiles.SYMLINK_DIRS, reverse=True):
# ordered by deepest to shallowest
path = Path(run_dir, _dir)
if path.is_symlink():
target = path.resolve()
if target.exists() and not target.is_dir():
raise WorkflowFilesError(
f'Invalid symlink at {path}.\n'
f'Link target is not a directory: {target}')
expected_end = str(Path('cylc-run', reg, _dir))
if not str(target).endswith(expected_end):
raise WorkflowFilesError(
f'Invalid symlink at {path}\n'
f'The target should end with "{expected_end}"'
)
ret[_dir] = target
return ret
def glob_in_run_dir(
run_dir: Union[Path, str], pattern: str, symlink_dirs: Container[Path]
) -> List[Path]:
"""Execute a (recursive) glob search in the given run directory.
Returns list of any absolute paths that match the pattern. However:
* Does not follow symlinks (apart from the spcedified symlink dirs).
* Also does not return matching subpaths of matching directories (because
that would be redundant).
Args:
run_dir: Absolute path of the workflow run dir.
pattern: The glob pattern.
symlink_dirs: Absolute paths to the workflow's symlink dirs.
"""
# Note: use os.path.join, not pathlib, to preserve trailing slash if
# present in pattern
pattern = os.path.join(glob.escape(str(run_dir)), pattern)
# Note: don't use pathlib.Path.glob() because when you give it an exact
# filename instead of pattern, it doesn't return broken symlinks
matches = sorted(Path(i) for i in glob.iglob(pattern, recursive=True))
# sort guarantees parents come before their children
if len(matches) == 1 and not os.path.lexists(matches[0]):
# https://bugs.python.org/issue35201
return []
results: List[Path] = []
subpath_excludes: Set[Path] = set()
for path in matches:
for rel_ancestor in reversed(path.relative_to(run_dir).parents):
ancestor = run_dir / rel_ancestor
if ancestor in subpath_excludes:
break
if ancestor.is_symlink() and ancestor not in symlink_dirs:
# Do not follow non-standard symlinks
subpath_excludes.add(ancestor)
break
if not symlink_dirs and (ancestor in results):
# We can be sure all subpaths of this ancestor are redundant
subpath_excludes.add(ancestor)
break
if ancestor == path.parent: # noqa: SIM102
# Final iteration over ancestors
if ancestor in matches and path not in symlink_dirs:
# Redundant (but don't exclude subpaths in case any of the
# subpaths are std symlink dirs)
break
else: # No break
results.append(path)
return results
def _clean_using_glob(
run_dir: Path, pattern: str, symlink_dirs: Iterable[str]
) -> None:
"""Delete the files/dirs in the run dir that match the pattern.
Does not follow symlinks (apart from the standard symlink dirs).
Args:
run_dir: Absolute path of workflow run dir.
pattern: The glob pattern.
symlink_dirs: Paths of the workflow's symlink dirs relative to
the run dir.
"""
abs_symlink_dirs = tuple(sorted(
(run_dir / d for d in symlink_dirs),
reverse=True # ordered by deepest to shallowest
))
matches = glob_in_run_dir(run_dir, pattern, abs_symlink_dirs)
if not matches:
LOG.info(f"No files matching '{pattern}' in {run_dir}")
return
# First clean any matching symlink dirs
for path in abs_symlink_dirs:
if path in matches:
remove_dir_and_target(path)
if path == run_dir:
# We have deleted the run dir
return
matches.remove(path)
# Now clean the rest
for path in matches:
remove_dir_or_file(path)
def remote_clean(
reg: str,
platform_names: Iterable[str],
rm_dirs: Optional[List[str]] = None,
timeout: str = '120'
) -> None:
"""Run subprocesses to clean workflows on remote install targets
(skip localhost), given a set of platform names to look up.
Args:
reg: Workflow name.
platform_names: List of platform names to look up in the global
config, in order to determine the install targets to clean on.
rm_dirs: Sub dirs to remove instead of the whole run dir.
timeout: Number of seconds to wait before cancelling.
"""
try:
install_targets_map = (
get_install_target_to_platforms_map(platform_names))
except PlatformLookupError as exc:
raise PlatformLookupError(
"Cannot clean on remote platforms as the workflow database is "
f"out of date/inconsistent with the global config - {exc}")
queue: Deque[RemoteCleanQueueTuple] = deque()
remote_clean_cmd = partial(
_remote_clean_cmd, reg=reg, rm_dirs=rm_dirs, timeout=timeout
)
for target, platforms in install_targets_map.items():
if target == get_localhost_install_target():
continue
shuffle(platforms)
LOG.info(
f"Cleaning on install target: {platforms[0]['install target']}"
)
# Issue ssh command:
queue.append(
RemoteCleanQueueTuple(
remote_clean_cmd(platform=platforms[0]), target, platforms
)
)
failed_targets: Dict[str, PlatformError] = {}
# Handle subproc pool results almost concurrently:
while queue:
item = queue.popleft()
ret_code = item.proc.poll()
if ret_code is None: # proc still running
queue.append(item)
continue
out, err = item.proc.communicate()
if out:
LOG.info(f"[{item.install_target}]\n{out}")
if ret_code:
this_platform = item.platforms.pop(0)
excp = PlatformError(
PlatformError.MSG_TIDY,
this_platform['name'],
cmd=item.proc.args,
ret_code=ret_code,
out=out,
err=err,
)
if ret_code == 255 and item.platforms:
# SSH error; try again using the next platform for this
# install target
LOG.debug(excp)
queue.append(
item._replace(
proc=remote_clean_cmd(platform=item.platforms[0])
)
)
else: # Exhausted list of platforms
failed_targets[item.install_target] = excp
elif err:
# Only show stderr from remote host in debug mode if ret code 0
# because stderr often contains useless stuff like ssh login
# messages
LOG.debug(f"[{item.install_target}]\n{err}")
sleep(0.2)
if failed_targets:
for target, excp in failed_targets.items():
LOG.error(
f"Could not clean on install target: {target}\n{excp}"
)
raise CylcError("Remote clean failed")
def _remote_clean_cmd(
reg: str,
platform: Dict[str, Any],
rm_dirs: Optional[List[str]],
timeout: str
) -> 'Popen[str]':
"""Remove a stopped workflow on a remote host.
Call "cylc clean --local-only" over ssh and return the subprocess.
Args:
reg: Workflow name.
platform: Config for the platform on which to remove the workflow.
rm_dirs: Sub dirs to remove instead of the whole run dir.
timeout: Number of seconds to wait before cancelling the command.
"""
LOG.debug(
f'Cleaning on install target: {platform["install target"]} '
f'(using platform: {platform["name"]})'
)
cmd = ['clean', '--local-only', reg]
if rm_dirs is not None:
for item in rm_dirs:
cmd.extend(['--rm', item])
cmd = construct_ssh_cmd(
cmd, platform,
get_host_from_platform(platform),
timeout=timeout, set_verbosity=True
)
LOG.debug(" ".join(cmd))
return Popen( # nosec
cmd,
stdin=DEVNULL,
stdout=PIPE,
stderr=PIPE,
text=True,
)
# * command constructed by internal interface
def remove_keys_on_server(keys):
"""Removes server-held authentication keys"""
# WARNING, DESTRUCTIVE. Removes old keys if they already exist.
for k in keys.values():
if os.path.exists(k.full_key_path):
os.remove(k.full_key_path)
# Remove client public key folder
client_public_key_dir = keys["client_public_key"].key_path
if os.path.exists(client_public_key_dir):
shutil.rmtree(client_public_key_dir, onerror=handle_rmtree_err)
def create_server_keys(keys, workflow_srv_dir):
"""Create or renew authentication keys for workflow 'reg' in the .service
directory.
Generate a pair of ZMQ authentication keys"""
# ZMQ keys generated in .service directory.
# .service/client_public_keys will store client public keys generated on
# platform and sent back.
# ZMQ keys need to be created with stricter file permissions, changing
# umask default denials.
os.makedirs(keys["client_public_key"].key_path, exist_ok=True)
old_umask = os.umask(0o177) # u=rw only set as default for file creation
_server_public_full_key_path, _server_private_full_key_path = (
zmq.auth.create_certificates(
workflow_srv_dir,
KeyOwner.SERVER.value))
# cylc scan requires host to behave as a client, so copy public server
# key into client public key folder
server_pub_in_client_folder = keys["client_public_key"].full_key_path
client_host_private_key = keys["client_private_key"].full_key_path
shutil.copyfile(_server_private_full_key_path, client_host_private_key)
shutil.copyfile(_server_public_full_key_path, server_pub_in_client_folder)
# Return file permissions to default settings.
os.umask(old_umask)
def get_workflow_title(reg):
"""Return the the workflow title without a full file parse
Limitations:
* 1st line of title only.
* Assume title is not in an include-file.
"""
title = NO_TITLE
with open(get_flow_file(reg), 'r') as handle:
for line in handle:
if line.lstrip().startswith("[meta]"):
# continue : title comes inside [meta] section
continue
elif line.lstrip().startswith("["):
# abort: title comes before first [section]
break
match = REC_TITLE.match(line)
if match:
title = match.groups()[0].strip('"\'')
return title
def _load_local_item(item, path):
"""Load and return content of a file (item) in path."""
try:
with open(os.path.join(path, item)) as file_:
return file_.read()
except IOError:
return None
def get_platforms_from_db(run_dir):
"""Load the set of names of platforms (that jobs ran on) from the
workflow database.
Args:
run_dir (str): The workflow run directory.
"""
workflow_db_mgr = WorkflowDatabaseManager(
os.path.join(run_dir, WorkflowFiles.Service.DIRNAME))
workflow_db_mgr.check_workflow_db_compatibility()
try:
pri_dao = workflow_db_mgr.get_pri_dao()
platform_names = pri_dao.select_task_job_platforms()
return platform_names
finally:
pri_dao.close()
def parse_reg(reg: str, src: bool = False, warn_depr=True) -> Tuple[str, Path]:
"""Centralised parsing of the workflow argument, to be used by most
cylc commands (script modules).
Infers the latest numbered run if a specific one is not given (e.g.
foo -> foo/run3, foo/runN -> foo/run3).
"Offline" commands (e.g. cylc validate) can usually be used on
workflow sources so will need src = True.
"Online" commands (e.g. cylc stop) are usually only used on workflows in
the cylc-run dir so will need src = False.
Args:
reg: The workflow arg. Can be one of:
- relative path to the run dir from ~/cylc-run, i.e. the "name"
of the workflow;
- absolute path to a run dir, source dir or workflow file (only
if src is True);
- '.' for the current directory (only if src is True).
src: Whether the workflow arg can be a workflow source (i.e. an
absolute path (which might not be in ~/cylc-run) and/or a
flow.cylc file (or any file really), or '.' for cwd).
Returns:
reg: The normalised workflow arg.
path: If src is True, the absolute path to the workflow file
(flow.cylc or suite.rc). Otherwise, the absolute path to the
workflow run dir.
"""
if not src:
validate_workflow_name(reg)
cur_dir_only = reg.startswith(f'{os.curdir}{os.sep}') # starts with './'
reg: Path = Path(expand_path(reg))
if src:
reg, abs_path = _parse_src_reg(reg, cur_dir_only)
else:
abs_path = Path(get_workflow_run_dir(reg))
if abs_path.is_file():
raise WorkflowFilesError(
"Workflow name must refer to a directory, "
f"but '{reg}' is a file."
)
abs_path, reg = infer_latest_run(abs_path)
detect_both_flow_and_suite(abs_path)
check_deprecation(abs_path, warn=warn_depr)
return (str(reg), abs_path)
def check_deprecation(path, warn=True):
"""Warn and turn on back-compat flag if Cylc 7 suite.rc detected.
Path can point to config file or parent directory (i.e. workflow name).
"""
if (
path.resolve().name == WorkflowFiles.SUITE_RC
or (path / WorkflowFiles.SUITE_RC).is_file()
):
cylc.flow.flags.cylc7_back_compat = True
if warn:
LOG.warning(SUITERC_DEPR_MSG)
def _parse_src_reg(reg: Path, cur_dir_only: bool = False) -> Tuple[Path, Path]:
"""Helper function for parse_reg() when src=True.
Args:
reg: Reg.
cur_dir_only: Whether the pre-normalised reg began with './'
i.e. whether we should only look in the current directory.
"""
if reg.is_absolute():
abs_path = reg
with suppress(ValueError):
# ValueError if abs_path not relative to ~/cylc-run
abs_path, reg = infer_latest_run(abs_path)
else:
run_dir_path = Path(get_workflow_run_dir(reg))
cwd = Path.cwd()
reg = Path(os.path.normpath(cwd / reg))
abs_path = reg
with suppress(ValueError):
# ValueError if abs_path not relative to ~/cylc-run
abs_path, reg = infer_latest_run(abs_path)
try:
run_dir_path, run_dir_reg = infer_latest_run(run_dir_path)
except ValueError:
# run_dir_path not relative to ~/cylc-run
pass
else:
if (
not cur_dir_only and
abs_path.resolve() != run_dir_path.resolve()
):
if abs_path.is_file():
if run_dir_path.is_file():
LOG.warning(REG_CLASH_MSG.format(
abs_path.relative_to(cwd),
run_dir_path.relative_to(get_cylc_run_dir())
))
return (reg.parent, abs_path)
if run_dir_path.is_file():
return (run_dir_reg.parent, run_dir_path)
try:
run_dir_path = check_flow_file(run_dir_path)
except WorkflowFilesError:
try:
abs_path = check_flow_file(abs_path)
except WorkflowFilesError:
raise WorkflowFilesError(NO_FLOW_FILE_MSG.format(
f"./{abs_path.relative_to(cwd)} or {run_dir_path}"
))
else:
try:
abs_path = check_flow_file(abs_path)
except WorkflowFilesError:
return (run_dir_reg, run_dir_path)
LOG.warning(REG_CLASH_MSG.format(
abs_path.relative_to(cwd),
run_dir_path.relative_to(get_cylc_run_dir())
))
return (reg, abs_path)
if abs_path.is_file():
reg = reg.parent
else:
abs_path = check_flow_file(abs_path)
return (reg, abs_path)
def validate_workflow_name(
name: str, check_reserved_names: bool = False
) -> None:
"""Check workflow name/ID is valid and not an absolute path.
Args:
name: Workflow name or ID.
check_reserved_names: If True, check that the name does not
contain reserved dir names.
Raise WorkflowFilesError if not valid.
"""
is_valid, message = WorkflowNameValidator.validate(name)
if not is_valid:
raise WorkflowFilesError(
f"invalid workflow name '{name}' - {message}"
)
if os.path.isabs(name):
raise WorkflowFilesError(
f"workflow name cannot be an absolute path: {name}"
)
name = os.path.normpath(name)
if name.startswith(os.curdir):
raise WorkflowFilesError(
"Workflow name cannot be a path that points to the cylc-run "
"directory or above"
)
if check_reserved_names:
check_reserved_dir_names(name)
def check_reserved_dir_names(name: Union[Path, str]) -> None:
"""Check workflow/run name does not contain reserved dir names."""
err_msg = (
"Workflow/run name cannot contain a directory named '{}' "
"(that filename is reserved)"
)
for dir_name in Path(name).parts:
if dir_name in WorkflowFiles.RESERVED_NAMES:
raise WorkflowFilesError(err_msg.format(dir_name))
if re.match(r'^run\d+$', dir_name):
raise WorkflowFilesError(err_msg.format('run<number>'))
def infer_latest_run(
path: Path, implicit_runN: bool = True
) -> Tuple[Path, Path]:
"""Infer the numbered run dir if the workflow has a runN symlink.
Args:
path: Absolute path to the workflow dir, run dir or runN dir.
implicit_runN: If True, add runN on the end of the path if the path
doesn't include it.
Returns:
path: Absolute path of the numbered run dir if applicable, otherwise
the input arg path.
reg: The workflow name (including the numbered run if applicable).
Raises WorkflowFilesError if the runN symlink is not valid.
"""
cylc_run_dir = get_cylc_run_dir()
try:
reg = path.relative_to(cylc_run_dir)
except ValueError:
raise ValueError(f"{path} is not in the cylc-run directory")
if path.name == WorkflowFiles.RUN_N:
runN_path = path
elif implicit_runN:
runN_path = path / WorkflowFiles.RUN_N
if not os.path.lexists(runN_path):
return (path, reg)
else:
return (path, reg)
if not runN_path.is_symlink() or not runN_path.is_dir():
raise WorkflowFilesError(
f"runN directory at {runN_path} is a broken or invalid symlink"
)
numbered_run = os.readlink(str(runN_path))
if not re.match(r'run\d+$', numbered_run):
# Note: the link should be relative. This means it won't work for
# cylc 8.0b1 workflows where it was absolute (won't fix).
raise WorkflowFilesError(
f"runN symlink at {runN_path} points to invalid location: "
f"{numbered_run}"
)
path = runN_path.parent / numbered_run
reg = path.relative_to(cylc_run_dir)
return (path, reg)
def check_nested_dirs(
run_dir: Path,
install_dir: Optional[Path] = None
) -> None:
"""Disallow nested dirs:
- Nested installed run dirs
- Nested installed workflow dirs
Args:
run_dir: Absolute workflow run directory path.
install_dir: Absolute workflow install directory path
(contains _cylc-install). If None, will not check for nested
install dirs.
Raises:
WorkflowFilesError if reg dir is nested inside a run dir, or an
install dirs are nested.
"""
if install_dir is not None:
install_dir = Path(os.path.normpath(install_dir))
# Check parents:
for parent_dir in run_dir.parents:
# Stop searching at ~/cylc-run
if parent_dir == Path(get_cylc_run_dir()):
break
# check for run directories:
if is_valid_run_dir(parent_dir):
raise WorkflowFilesError(
NESTED_DIRS_MSG.format(
dir_type='run',
dest=run_dir,
existing=get_cylc_run_abs_path(parent_dir)
)
)
# Check for install directories:
if (
install_dir
and parent_dir in install_dir.parents
and (parent_dir / WorkflowFiles.Install.DIRNAME).is_dir()
):
raise WorkflowFilesError(
NESTED_DIRS_MSG.format(
dir_type='install',
dest=run_dir,
existing=get_cylc_run_abs_path(parent_dir)
)
)
if install_dir:
# Search child tree for install directories:
for depth in range(glbl_cfg().get(['install', 'max depth'])):
search_pattern = f'*/{"*/" * depth}{WorkflowFiles.Install.DIRNAME}'
for result in install_dir.glob(search_pattern):
raise WorkflowFilesError(
NESTED_DIRS_MSG.format(
dir_type='install',
dest=run_dir,
existing=get_cylc_run_abs_path(result.parent)
)
)
def is_valid_run_dir(path):
"""Return True if path is a valid, existing run directory, else False.
Args:
path (str): if this is a relative path, it is taken to be relative to
the cylc-run directory.
"""
path = get_cylc_run_abs_path(path)
if os.path.isdir(os.path.join(path, WorkflowFiles.Service.DIRNAME)):
return True
return False
def get_cylc_run_abs_path(path: Union[Path, str]) -> Union[Path, str]:
"""Return the absolute path under the cylc-run directory for the specified
relative path.
If the specified path is already absolute, just return it.
The path need not exist.
"""
if os.path.isabs(path):
return path
return get_workflow_run_dir(path)
def _get_logger(rund, log_name):
"""Get log and create and open if necessary."""
logger = logging.getLogger(log_name)
if logger.getEffectiveLevel != logging.INFO:
logger.setLevel(logging.INFO)
if not logger.hasHandlers():
_open_install_log(rund, logger)
return logger
def _open_install_log(rund, logger):
"""Open Cylc log handlers for install/reinstall."""
time_str = get_current_time_string(
override_use_utc=True, use_basic_format=True,
display_sub_seconds=False
)
rund = Path(rund).expanduser()
log_type = logger.name[logger.name.startswith('cylc-') and len('cylc-'):]
log_path = Path(
rund,
WorkflowFiles.LOG_DIR,
'install',
f"{time_str}-{log_type}.log")
log_parent_dir = log_path.parent
log_parent_dir.mkdir(exist_ok=True, parents=True)
handler = logging.FileHandler(log_path)
handler.setFormatter(CylcLogFormatter())
logger.addHandler(handler)
def get_rsync_rund_cmd(src, dst, reinstall=False, dry_run=False):
"""Create and return the rsync command used for cylc install/re-install.
Args:
src (str):
file path location of source directory
dst (str):
file path location of destination directory
reinstall (bool):
indicate reinstall (--delete option added)
dry-run (bool):
indicate dry-run, rsync will not take place but report output if a
real run were to be executed
Return:
list: command to use for rsync.
"""
rsync_cmd = ["rsync"] + DEFAULT_RSYNC_OPTS
if dry_run:
rsync_cmd.append("--dry-run")
if reinstall:
rsync_cmd.append('--delete')
for exclude in [
'.git',
'.svn',
'.cylcignore',
'rose-suite.conf',
'opt/rose-suite-cylc-install.conf',
WorkflowFiles.LOG_DIR,
WorkflowFiles.WORK_DIR,
WorkflowFiles.SHARE_DIR,
WorkflowFiles.Install.DIRNAME,
WorkflowFiles.Service.DIRNAME
]:
if (Path(src).joinpath(exclude).exists() or
Path(dst).joinpath(exclude).exists()):
rsync_cmd.append(f"--exclude={exclude}")
if Path(src).joinpath('.cylcignore').exists():
rsync_cmd.append("--exclude-from=.cylcignore")
rsync_cmd.append(f"{src}/")
rsync_cmd.append(f"{dst}/")
return rsync_cmd
def reinstall_workflow(named_run, rundir, source, dry_run=False):
"""Reinstall workflow.
Args:
named_run (str):
name of the run e.g. my-flow/run1
rundir (path):
run directory
source (path):
source directory
dry_run (bool):
if True, will not execute the file transfer but report what would
be changed.
"""
validate_source_dir(source, named_run)
check_nested_dirs(rundir)
reinstall_log = _get_logger(rundir, 'cylc-reinstall')
reinstall_log.info(f"Reinstalling \"{named_run}\", from "
f"\"{source}\" to \"{rundir}\"")
rsync_cmd = get_rsync_rund_cmd(
source, rundir, reinstall=True, dry_run=dry_run)
proc = Popen(rsync_cmd, stdout=PIPE, stderr=PIPE, text=True) # nosec
# * command is constructed via internal interface
stdout, stderr = proc.communicate()
reinstall_log.info(
f"Copying files from {source} to {rundir}"
f'\n{stdout}'
)
if proc.returncode != 0:
reinstall_log.warning(
f"An error occurred when copying files from {source} to {rundir}")
reinstall_log.warning(f" Error: {stderr}")
check_flow_file(rundir)
reinstall_log.info(f'REINSTALLED {named_run} from {source}')
print(f'REINSTALLED {named_run} from {source}')
close_log(reinstall_log)
return
def install_workflow(
workflow_name: Optional[str] = None,
source: Optional[Union[Path, str]] = None,
run_name: Optional[str] = None,
no_run_name: bool = False,
cli_symlink_dirs: Optional[Dict[str, Dict[str, Any]]] = None
) -> Tuple[Path, Path, str]:
"""Install a workflow, or renew its installation.
Install workflow into new run directory.
Create symlink to workflow source location, creating any symlinks for run,
work, log, share, share/cycle directories.
Args:
workflow_name: workflow name, default basename($PWD).
source: directory location of flow.cylc file, default $PWD.
run_name: name of the run, overrides run1, run2, run 3 etc...
If specified, cylc install will not create runN symlink.
rundir: for overriding the default cylc-run directory.
no_run_name: Flag as True to install workflow into
~/cylc-run/<workflow_name>
cli_symlink_dirs: Symlink dirs, if entered on the cli.
Return:
source: source directory.
rundir: directory the workflow has been installed into.
workflow_name: installed workflow name (which may be computed here).
Raise:
WorkflowFilesError:
No flow.cylc file found in source location.
Illegal name (can look like a relative path, but not absolute).
Another workflow already has this name (unless --redirect).
Trying to install a workflow that is nested inside of another.
"""
if not source:
source = Path.cwd()
elif Path(source).name == WorkflowFiles.FLOW_FILE:
source = Path(source).parent
source = Path(expand_path(source))
if not workflow_name:
workflow_name = source.name
validate_workflow_name(workflow_name, check_reserved_names=True)
if run_name is not None:
if len(Path(run_name).parts) != 1:
raise WorkflowFilesError(
f'Run name cannot be a path. (You used {run_name})'
)
check_reserved_dir_names(run_name)
validate_source_dir(source, workflow_name)
run_path_base = Path(get_workflow_run_dir(workflow_name))
relink, run_num, rundir = get_run_dir_info(
run_path_base, run_name, no_run_name)
max_scan_depth = glbl_cfg().get(['install', 'max depth'])
workflow_id = rundir.relative_to(get_cylc_run_dir())
if len(workflow_id.parts) > max_scan_depth:
raise WorkflowFilesError(
f"Cannot install: workflow ID '{workflow_id}' would exceed "
f"global.cylc[install]max depth = {max_scan_depth}"
)
check_nested_dirs(rundir, run_path_base)
if rundir.exists():
raise WorkflowFilesError(
f'"{rundir}" exists.\n'
" To install a new run use `cylc install --run-name`,"
" or to reinstall use `cylc reinstall`."
)
symlinks_created = {}
named_run = workflow_name
if run_name:
named_run = os.path.join(named_run, run_name)
elif run_num:
named_run = os.path.join(named_run, f'run{run_num}')
symlinks_created = make_localhost_symlinks(
rundir, named_run, symlink_conf=cli_symlink_dirs)
install_log = _get_logger(rundir, 'cylc-install')
if symlinks_created:
for src, dst in symlinks_created.items():
install_log.info(f"Symlink created from {src} to {dst}")
try:
rundir.mkdir(exist_ok=True, parents=True)
except FileExistsError:
# This occurs when the file exists but is _not_ a directory.
raise WorkflowFilesError(
f"Cannot install as there is an existing file at {rundir}."
)
if relink:
link_runN(rundir)
create_workflow_srv_dir(rundir)
rsync_cmd = get_rsync_rund_cmd(source, rundir)
proc = Popen(rsync_cmd, stdout=PIPE, stderr=PIPE, text=True) # nosec
# * command is constructed via internal interface
stdout, stderr = proc.communicate()
install_log.info(
f"Copying files from {source} to {rundir}"
f"\n{stdout}"
)
if proc.returncode != 0:
install_log.warning(
f"An error occurred when copying files from {source} to {rundir}")
install_log.warning(f" Warning: {stderr}")
cylc_install = Path(rundir.parent, WorkflowFiles.Install.DIRNAME)
check_deprecation(check_flow_file(rundir))
if no_run_name:
cylc_install = Path(rundir, WorkflowFiles.Install.DIRNAME)
source_link = cylc_install.joinpath(WorkflowFiles.Install.SOURCE)
# check source link matches the source symlink from workflow dir.
cylc_install.mkdir(parents=True, exist_ok=True)
if not source_link.exists():
if source_link.is_symlink():
# Condition represents a broken symlink.
raise WorkflowFilesError(
f'Symlink broken: {source_link} -> {source_link.resolve()}.'
)
install_log.info(f"Creating symlink from {source_link}")
source_link.symlink_to(source.resolve())
else:
if source_link.resolve() != source.resolve():
raise WorkflowFilesError(
f"Failed to install from {source.resolve()}: "
f"previous installations were from {source_link.resolve()}"
)
install_log.info(
f'Symlink from "{source_link}" to "{source}" in place.')
install_log.info(f'INSTALLED {named_run} from {source}')
print(f'INSTALLED {named_run} from {source}')
close_log(install_log)
return source, rundir, workflow_name
def get_run_dir_info(
run_path_base: Path, run_name: Optional[str], no_run_name: bool
) -> Tuple[bool, Optional[int], Path]:
"""Get (numbered, named or unnamed) run directory info for current install.
Args:
run_path_base: The workflow directory absolute path.
run_name: Name of the run.
no_run_name: Flag as True to indicate no run name - workflow installed
into ~/cylc-run/<run_path_base>.
Returns:
relink: True if runN symlink needs updating.
run_num: Run number of the current install, if using numbered runs.
rundir: Run directory absolute path.
"""
relink = False
run_num = None
if no_run_name:
rundir = run_path_base
elif run_name:
rundir = run_path_base.joinpath(run_name)
if (run_path_base.exists() and
detect_flow_exists(run_path_base, True)):
raise WorkflowFilesError(
f"--run-name option not allowed as '{run_path_base}' contains "
"installed numbered runs.")
else:
run_num = get_next_rundir_number(run_path_base)
rundir = Path(run_path_base, f'run{run_num}')
if run_path_base.exists() and detect_flow_exists(run_path_base, False):
raise WorkflowFilesError(
f"Path: \"{run_path_base}\" contains an installed"
" workflow. Use --run-name to create a new run.")
unlink_runN(run_path_base)
relink = True
return relink, run_num, rundir
def detect_both_flow_and_suite(path: Path) -> None:
"""Detects if both suite.rc and flow.cylc are in directory.
Permits flow.cylc to be a symlink.
Return true if present, raises error if flow.cylc path sent is a forbidden
symlink.
Raises:
WorkflowFilesError: If both flow.cylc and suite.rc are in directory
"""
flow_cylc = None
msg = (f"Both {WorkflowFiles.FLOW_FILE} and {WorkflowFiles.SUITE_RC} "
f"files are present in {path}. Please remove one and"
" try again. For more information visit: https://cylc.github.io/"
"cylc-doc/latest/html/7-to-8/summary.html#backward-compatibility")
if path.resolve().name == WorkflowFiles.SUITE_RC:
flow_cylc = path.parent / WorkflowFiles.FLOW_FILE
elif (path / WorkflowFiles.SUITE_RC).is_file():
flow_cylc = path / WorkflowFiles.FLOW_FILE
if flow_cylc and flow_cylc.is_file() and is_forbidden(flow_cylc):
raise WorkflowFilesError(msg)
def is_forbidden(flow_file: Path) -> bool:
"""Returns True for a forbidden file structure scenario.
Forbidden criteria:
A symlink elsewhere on file system but suite.rc also exists in the
directory.
flow.cylc and suite.rc in same directory but no symlink
Args:
flow_file : Absolute Path to the flow.cylc file
"""
if not flow_file.is_symlink():
if flow_file.parent.joinpath(WorkflowFiles.SUITE_RC).exists():
return True
return False
link = flow_file.resolve()
suite_rc = flow_file.parent.resolve() / WorkflowFiles.SUITE_RC
if link == suite_rc:
# link points within dir to suite.rc (permitted)
return False
# link points elsewhere, check that suite.rc does not also exist in dir
if suite_rc.exists():
return True
return False
def detect_flow_exists(
run_path_base: Union[Path, str], numbered: bool
) -> bool:
"""Returns True if installed flow already exists.
Args:
run_path_base: Absolute path of workflow directory,
i.e ~/cylc-run/<workflow_name>
numbered: If True, will detect if numbered runs exist. If False, will
detect if non-numbered runs exist, i.e. runs installed
by --run-name.
"""
for entry in Path(run_path_base).iterdir():
is_numbered = bool(re.search(r'^run\d+$', entry.name))
if (
entry.is_dir()
and entry.name not in {
WorkflowFiles.Install.DIRNAME, WorkflowFiles.RUN_N
}
and Path(entry, WorkflowFiles.FLOW_FILE).exists()
and is_numbered == numbered
):
return True
return False
def check_flow_file(path: Union[Path, str]) -> Path:
"""Checks the path for a suite.rc or flow.cylc file.
Raises:
WorkflowFilesError
- if no flow file in path sent
- both suite.rc and flow.cylc in path sent.
Args:
path: Absolute path to check for a flow.cylc and/or suite.rc file.
Returns the path of the flow file if present.
"""
flow_file_path = Path(expand_path(path), WorkflowFiles.FLOW_FILE)
suite_rc_path = Path(expand_path(path), WorkflowFiles.SUITE_RC)
if flow_file_path.is_file():
detect_both_flow_and_suite(Path(path))
return flow_file_path
if suite_rc_path.is_file():
return suite_rc_path
raise WorkflowFilesError(NO_FLOW_FILE_MSG.format(path))
def create_workflow_srv_dir(rundir: Path) -> None:
"""Create workflow service directory"""
workflow_srv_d = rundir.joinpath(WorkflowFiles.Service.DIRNAME)
workflow_srv_d.mkdir(exist_ok=True, parents=True)
def validate_source_dir(source, workflow_name):
"""Ensure the source directory is valid:
- has flow file
- does not contain reserved dir names
- is not inside ~/cylc-run.
Args:
source (path): Path to source directory
Raises:
WorkflowFilesError:
If log, share, work or _cylc-install directories exist in the
source directory.
Cylc installing from within the cylc-run dir
"""
# Ensure source dir does not contain log, share, work, _cylc-install
for dir_ in WorkflowFiles.RESERVED_DIRNAMES:
if Path(source, dir_).exists():
raise WorkflowFilesError(
f"{workflow_name} installation failed. "
f"- {dir_} exists in source directory.")
cylc_run_dir = Path(get_cylc_run_dir())
if (os.path.abspath(os.path.realpath(cylc_run_dir))
in os.path.abspath(os.path.realpath(source))):
raise WorkflowFilesError(
f"{workflow_name} installation failed. Source directory "
f"should not be in {cylc_run_dir}.")
check_flow_file(source)
def parse_cli_sym_dirs(symlink_dirs: str) -> Dict[str, Dict[str, Any]]:
"""Converts command line entered symlink dirs to a dictionary.
Args:
symlink_dirs: As entered by user on cli,
e.g. "log=$DIR, share=$DIR2".
Raises:
WorkflowFilesError: If directory to be symlinked is not in permitted
dirs: run, log, share, work, share/cycle
Returns:
dict: In the same form as would be returned by global config.
e.g. {'localhost': {'log': '$DIR',
'share': '$DIR2'
}
}
"""
# Ensures the same nested dict format which is returned by the glb cfg
symdict: Dict[str, Dict[str, Any]] = {'localhost': {'run': None}}
if symlink_dirs == "":
return symdict
symlist = symlink_dirs.strip(',').split(',')
possible_symlink_dirs = set(WorkflowFiles.SYMLINK_DIRS.union(
{WorkflowFiles.RUN_DIR})
)
possible_symlink_dirs.remove('')
for pair in symlist:
try:
key, val = pair.split("=")
key = key.strip()
except ValueError:
raise UserInputError(
'There is an error in --symlink-dirs option:'
f' {pair}. Try entering option in the form '
'--symlink-dirs=\'log=$DIR, share=$DIR2, ...\''
)
if key not in possible_symlink_dirs:
dirs = ', '.join(possible_symlink_dirs)
raise UserInputError(
f"{key} not a valid entry for --symlink-dirs. "
f"Configurable symlink dirs are: {dirs}"
)
symdict['localhost'][key] = val.strip() or None
return symdict
def unlink_runN(path: Union[Path, str]) -> bool:
"""Remove symlink runN if it exists.
Args:
path: Absolute path to workflow dir containing runN.
"""
try:
Path(expand_path(path, WorkflowFiles.RUN_N)).unlink()
except OSError:
return False
return True
def link_runN(latest_run: Union[Path, str]):
"""Create symlink runN, pointing at the latest run"""
latest_run = Path(latest_run)
run_n = Path(latest_run.parent, WorkflowFiles.RUN_N)
with suppress(OSError):
run_n.symlink_to(latest_run.name)
def search_install_source_dirs(workflow_name: str) -> Path:
"""Return the path of a workflow source dir if it is present in the
'global.cylc[install]source dirs' search path."""
search_path: List[str] = glbl_cfg().get(['install', 'source dirs'])
if not search_path:
raise WorkflowFilesError(
"Cannot find workflow as 'global.cylc[install]source dirs' "
"does not contain any paths")
for path in search_path:
try:
flow_file = check_flow_file(Path(path, workflow_name))
return flow_file.parent
except WorkflowFilesError:
continue
raise WorkflowFilesError(
f"Could not find workflow '{workflow_name}' in: "
f"{', '.join(search_path)}")<|fim▁end|> | temp)
elif (
(key_owner is KeyOwner.CLIENT
and key_type is KeyType.PUBLIC |
<|file_name|>__openerp__.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
# © 2014 Elico Corp (https://www.elico-corp.com)
# Licence AGPL-3.0 or later(http://www.gnu.org/licenses/agpl.html)
{
'name': 'Purchase Price List Item',
'version': '7.0.1.0.0',
'category': 'Purchase',
'sequence': 19,
'summary': 'Purchase Price List Item',
'description': """
Improve purchase price managment
================================
* In Purchase List Item, the price is fixed based on price_surchage if base is 'fixed on UOP'<|fim▁hole|> * If 'fixed on UOP', if product UOP change, the price list price will be change automtically.
* Add field 'Qty on Hand', and 'Stock Values' for product
* Add field 'Qty on Hand', 'Stock Values', UOP in product list view
""",
'author': 'Elico Corp',
'website': 'https://www.elico-corp.com',
'images' : [],
'depends': ['purchase'],
'data': [
'purchase_view.xml',
],
'test': [],
'demo': [],
'installable': True,
'auto_install': False,
'application': False,
}<|fim▁end|> | |
<|file_name|>chronopost.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
In order to use this lib, you have to call the method 'get_shipping_label'
with the right arguments. To know which keys to send to this method read these lists :
- required fields
- fields
"""
from datetime import datetime
import re
from suds.client import Client, WebFault
from .label_helper import AbstractLabel
from .exception_helper import (
InvalidSequence,
InvalidWeight,
InvalidSize,
InvalidType,
InvalidMissingField,
InvalidZipCode,
InvalidCountry,
InvalidDate,
InvalidCode,
InvalidValue,
InvalidValueNotInList,
)
WEBSERVICE_URL = 'https://ws.chronopost.fr/shipping-cxf/ShippingServiceWS?wsdl'
ESD_MODEL = {
"retrievalDateTime": {'max_size': 17},
"closingDateTime": {'max_size': 17},
"specificInstructions": {'max_size': 255},
"height": {'required': True},
"width": {'required': True},
"length": {'required': True},
"shipperCarriesCode": {'max_size': 38},
"shipperBuildingFloor": {'max_size': 32},
"shipperServiceDirection": {'max_size': 32},
}
HEADER_MODEL = {
"accountNumber": {'required': True, 'max_size': 8},
"subAccount": {'max_size': 3},
}
ADDRESS_MODEL = {
"civility": {'in': ['E', 'L', 'M']},
"name": {'required': True, 'max_size': 100},
"name2": {'required': True, 'max_size': 100},
"street": {'required': True, 'max_size': 38},
"street2": {'max_size': 38},
"zip": {'required': True, 'max_size': 9},
"city": {'required': True, 'max_size': 50},
"country_code": {'required': True, 'max_size': 2},
"phone": {'max_size': 17},
"mobile": {'max_size': 17},
"email": {'max_size': 80},
"alert": {}, #FIXME
}
REF_MODEL = {
"shipperRef": {'required': True, 'max_size': 35},
"recipientRef": {'required': True},
"customerSkybillNumber": {'max_size': 38},
}
SKYBILL_MODEL = {
"productCode": {'required': True},
"shipDate": {'max_size': 38},
"shipHour": {'required': True, 'max_size': 9},
"weight": {'required': True, 'type': float},
"weightUnit": {'required': True},
"insuredValue": {'type': int},
"insuredCurrency": {'max_size': 17},
"codValue": {'type': int},
"codCurrency": {'max_size': 80},
"customsValue": {'type': int},
"customsCurrency": {'max_size': 80},
"service": {'max_size': 1},
"objectType": {'max_size': 80},<|fim▁hole|> "content2": {'max_size': 80},
"content3": {'max_size': 80},
"content4": {'max_size': 80},
"content5": {'max_size': 80},
}
def is_digit(s):
return re.search("[^0-9]", s) is None
class Chronopost(AbstractLabel):
_client = None
def __init__(self):
self._client = Client(WEBSERVICE_URL)
def _send_request(self, request, *args):
""" Wrapper for API requests
:param request: callback for API request
:param **kwargs: params forwarded to the callback
"""
res = {}
try:
res['value'] = request(*args)
res['success'] = True
except WebFault as e:
res['success'] = False
res['errors'] = [e[0]]
except Exception as e:
# if authentification error
#if isinstance(e[0], tuple) and e[0][0] == 401:
#raise e[0][0]
raise e
return res
def _prepare_skybillparams(self, mode):
skybillparams_obj = self._client.factory.create('skybillParamsValue')
valid_values = ['PDF', 'PPR', 'SPD', 'THE', 'ZPL', 'XML']
if mode in valid_values:
skybillparams_obj['mode'] = mode
else:
raise InvalidValueNotInList(
"The printing mode must be in %s" % valid_values)
return skybillparams_obj
def _check_password(self, password):
if is_digit(password) is False:
raise InvalidType(
"Only digit chars are authorised for 'account' '%s'"
% account)
if len(str(password)) != 6:
raise InvalidSize(
"The password have to contain 6 characters")
return password
def _prepare_skybill(self, info):
self.check_model(info, SKYBILL_MODEL, 'skybill')
skybill_obj = self._client.factory.create('skybillValue')
#for key in info.keys():
# skybill_obj[key] = info[key]
skybill_obj = info.copy()
skybill_obj['evtCode'] = 'DC'
return skybill_obj
def _prepare_ref(self, info):
self.check_model(info, REF_MODEL, 'ref')
ref_obj = self._client.factory.create('refValue')
#for key in info.keys():
# ref_obj[key] = info[key]
ref_obj = info.copy()
return ref_obj
def _prepare_esd(self, info):
self.check_model(info, ESD_MODEL, 'esd')
esd_obj = self._client.factory.create('esdValue')
#esd_obj['retrievalDateTime'] = datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f")
#esd_obj['closingDateTime'] = datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f")
esd_obj = info.copy()
return esd_obj
def _prepare_customer_address(self, address):
customer_model = ADDRESS_MODEL.copy()
customer_model['civility'] = {'in': ['E', 'L', 'M'], 'required': True}
customer_model['print_as_sender'] = {'in': ['Y', 'N']}
print "****", address, "***"
self.check_model(address, customer_model, 'address')
elements = {
'customerCivility': 'civility',
'customerName': 'name',
'customerName2': 'name2',
'customerAdress1': 'street',
'customerAdress2': 'street2',
'customerZipCode': 'zip',
'customerCity': 'city',
'customerCountry': 'country_code',
'customerCountryName': 'country_name',
'customerContactName': 'contact_name',
'customerEmail': 'email',
'customerPhone': 'phone',
'customerMobilePhone': 'mobile',
'customerPreAlert': 'alert',
'printAsSender': 'print_as_sender'
}
customer = self._client.factory.create('customerValue')
return customer, elements
def _prepare_shipper_address(self, address):
shipper_model = ADDRESS_MODEL.copy()
shipper_model['civility'] = {'in': ['E', 'L', 'M'], 'required': True}
self.check_model(address, shipper_model, 'address')
elements = {
'shipperCivility': 'civility',
'shipperName': 'name',
'shipperName2': 'name2',
'shipperAdress1': 'street',
'shipperAdress2': 'street2',
'shipperZipCode': 'zip',
'shipperCity': 'city',
'shipperCountry': 'country_code',
'shipperCountryName': 'country_name',
'shipperContactName': False,
'shipperEmail': 'email',
'shipperPhone': 'phone',
'shipperMobilePhone': 'mobile',
'shipperPreAlert': 'alert',
}
shipper = self._client.factory.create('shipperValue')
return shipper, elements
def _prepare_recipient_address(self, address):
print "address", address
self.check_model(address, ADDRESS_MODEL, 'address')
elements = {
'recipientName': 'name',
'recipientName2': 'name2',
'recipientAdress1': 'street',
'recipientAdress2': 'street2',
'recipientZipCode': 'zip',
'recipientCity': 'city',
'recipientCountry': 'country_code',
'recipientCountryName': 'country_name',
'recipientContactName': 'contact_name',
'recipientEmail': 'email',
'recipientPhone': 'phone',
'recipientMobilePhone': 'mobile',
'recipientPreAlert': 'alert',
}
recipient = self._client.factory.create('recipientValue')
return recipient, elements
def _prepare_address(self, values, info_type):
if info_type == 'recipient':
obj, elements = self._prepare_recipient_address(values)
if info_type == 'shipper':
obj, elements = self._prepare_shipper_address(values)
if info_type == 'customer':
obj, elements = self._prepare_customer_address(values)
if obj and elements and values:
for elm, elm_v in elements.items():
obj[elm] = ''
if elm_v in values:
obj[elm] = values[elm_v]
return obj
def _check_account(self, account):
if is_digit(account) is False:
raise InvalidType(
"Only digit chars are authorised for 'account' '%s'"
% account)
return account
def _prepare_header(self, vals):
self.check_model(vals, HEADER_MODEL, 'header')
self._check_account(vals['accountNumber'])
header = self._client.factory.create('headerValue')
header['idEmit'] = 'CHRFR'
header['accountNumber'] = vals['accountNumber']
if vals.get('subAccount', False):
self._check_account(vals['subAccount'])
header['subAccount'] = vals['subAccount']
return header
def get_shipping_label(self, recipient, shipper, header, ref, skybill,
password, esd=None, mode=False, customer = None):
"""
Call Chronopost 'shipping' web service and return the label in binary.
Params TODO
"""
if not customer:
customer = shipper.copy()
header_obj = self._prepare_header(header.copy())
recipient_obj = self._prepare_address(recipient.copy(), 'recipient')
shipper_obj = self._prepare_address(shipper.copy(), 'shipper')
customer_obj = self._prepare_address(customer.copy(), 'customer')
if esd:
esd_obj = self._prepare_esd(esd.copy())
else:
esd_obj = self._client.factory.create('esdValue')
ref_obj = self._prepare_ref(ref.copy())
skybill_obj = self._prepare_skybill(skybill.copy())
password = self._check_password(password)
if mode:
skybillparams_obj = self._prepare_skybillparams(mode)
else:
skybillparams_obj = self._client.factory.create('skybillParamsValue')
#test = self._client.service.shipping(esd, head, shiping, customer, recipient, ref, sky, bill, '255562')
request = self._client.service.shipping
response = self._send_request(request, esd_obj, header_obj, shipper_obj,
customer_obj, recipient_obj, ref_obj,
skybill_obj, skybillparams_obj, password)
return response<|fim▁end|> | "content1": {'max_size': 80}, |
<|file_name|>51_N-Queens.py<|end_file_name|><|fim▁begin|>class Solution(object):
def solveNQueens(self, n):
"""
:type n: int
:rtype: List[List[str]]
"""
def search(cur):
if cur == n:
add_answer()
else:
for i in range(n):
ok = True
rows[cur] = i
for j in range(cur):
if not is_valied(cur, j):
ok = False
break
if ok:
search(cur + 1)
def is_valied(pre_row, cur_row):
if rows[pre_row] == rows[cur_row] or \
pre_row - rows[pre_row] == cur_row - rows[cur_row] or \
pre_row + rows[pre_row] == cur_row + rows[cur_row]:
return False
else:
return True
def add_answer():
ans = []
for num in rows:
res_str = ""
for i in range(n):
if i == num:
res_str += "Q"
else:<|fim▁hole|>
result = []
rows = [0] * n
search(0)
return result
print Solution().solveNQueens(4)<|fim▁end|> | res_str += "."
ans.append(res_str)
result.append(ans) |
<|file_name|>transform_resource_count.go<|end_file_name|><|fim▁begin|>package terraform
import (
"github.com/hashicorp/terraform/addrs"
"github.com/hashicorp/terraform/configs/configschema"
"github.com/hashicorp/terraform/dag"
)
// ResourceCountTransformer is a GraphTransformer that expands the count
// out for a specific resource.
//
// This assumes that the count is already interpolated.
type ResourceCountTransformer struct {
Concrete ConcreteResourceInstanceNodeFunc
Schema *configschema.Block
// Count is either the number of indexed instances to create, or -1 to
// indicate that count is not set at all and thus a no-key instance should
// be created.
Count int
Addr addrs.AbsResource
}
func (t *ResourceCountTransformer) Transform(g *Graph) error {
if t.Count < 0 {
// Negative count indicates that count is not set at all.
addr := t.Addr.Instance(addrs.NoKey)
abstract := NewNodeAbstractResourceInstance(addr)
abstract.Schema = t.Schema
var node dag.Vertex = abstract
if f := t.Concrete; f != nil {
node = f(abstract)
}
g.Add(node)
return nil
}
// For each count, build and add the node
for i := 0; i < t.Count; i++ {
key := addrs.IntKey(i)
addr := t.Addr.Instance(key)
abstract := NewNodeAbstractResourceInstance(addr)<|fim▁hole|> abstract.Schema = t.Schema
var node dag.Vertex = abstract
if f := t.Concrete; f != nil {
node = f(abstract)
}
g.Add(node)
}
return nil
}<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.<|fim▁hole|>from .client import FeedItemSetLinkServiceClient
__all__ = ("FeedItemSetLinkServiceClient",)<|fim▁end|> | # |
<|file_name|>sleep_schedule.py<|end_file_name|><|fim▁begin|>from datetime import datetime, timedelta
from time import sleep
from random import uniform
class SleepSchedule(object):
"""Pauses the execution of the bot every day for some time
Simulates the user going to sleep every day for some time, the sleep time
and the duration is changed every day by a random offset defined in the
config file
Example Config:
"sleep_schedule": [
{
"time": "12:00",
"duration": "5:30",
"time_random_offset": "00:30",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
},
{
"time": "17:45",
"duration": "3:00",
"time_random_offset": "01:00",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
}
]
time: (HH:MM) local time that the bot should sleep
duration: (HH:MM) the duration of sleep
time_random_offset: (HH:MM) random offset of time that the sleep will start
for this example the possible start time is 11:30-12:30
duration_random_offset: (HH:MM) random offset of duration of sleep
for this example the possible duration is 5:00-6:00
wake_up_at_location: (lat, long | lat, long, alt | "") the location at which the bot wake up
*Note that an empty string ("") will not change the location*. """
LOG_INTERVAL_SECONDS = 600
SCHEDULING_MARGIN = timedelta(minutes=10) # Skip if next sleep is RESCHEDULING_MARGIN from now
def __init__(self, bot, config):
self.bot = bot
self._process_config(config)
self._schedule_next_sleep()
self._calculate_current_sleep()
def work(self):
if self._should_sleep_now():
self._sleep()
wake_up_at_location = self._wake_up_at_location
self._schedule_next_sleep()
if wake_up_at_location:
if hasattr(self.bot, 'api'): # Check if api is already initialized
self.bot.api.set_position(wake_up_at_location[0],wake_up_at_location[1],wake_up_at_location[2])
else:
self.bot.wake_location = wake_up_at_location
if hasattr(self.bot, 'api'): self.bot.login() # Same here
def _process_config(self, config):<|fim▁hole|> for entry in config:
prepared = {}
prepared['time'] = datetime.strptime(entry['time'] if 'time' in entry else '01:00', '%H:%M')
# Using datetime for easier stripping of timedeltas
raw_duration = datetime.strptime(entry['duration'] if 'duration' in entry else '07:00', '%H:%M')
duration = int(timedelta(hours=raw_duration.hour, minutes=raw_duration.minute).total_seconds())
raw_time_random_offset = datetime.strptime(entry['time_random_offset'] if 'time_random_offset' in entry else '01:00', '%H:%M')
time_random_offset = int(
timedelta(
hours=raw_time_random_offset.hour, minutes=raw_time_random_offset.minute).total_seconds())
raw_duration_random_offset = datetime.strptime(entry['duration_random_offset'] if 'duration_random_offset' in entry else '00:30', '%H:%M')
duration_random_offset = int(
timedelta(
hours=raw_duration_random_offset.hour, minutes=raw_duration_random_offset.minute).total_seconds())
raw_wake_up_at_location = entry['wake_up_at_location'] if 'wake_up_at_location' in entry else ''
if raw_wake_up_at_location:
try:
wake_up_at_location = raw_wake_up_at_location.split(',',2)
lat=float(wake_up_at_location[0])
lng=float(wake_up_at_location[1])
if len(wake_up_at_location) == 3:
alt=float(wake_up_at_location[2])
else:
alt = uniform(self.bot.config.alt_min, self.bot.config.alt_max)
except ValueError:
raise ValueError('SleepSchedule wake_up_at_location, parsing error in location') #TODO there must be a more elegant way to do it...
prepared['wake_up_at_location'] = [lat, lng, alt]
prepared['duration'] = duration
prepared['time_random_offset'] = time_random_offset
prepared['duration_random_offset'] = duration_random_offset
self.entries.append(prepared)
def _schedule_next_sleep(self):
self._next_sleep, self._next_duration, self._wake_up_at_location = self._get_next_sleep_schedule()
self.bot.event_manager.emit(
'next_sleep',
sender=self,
formatted="Next sleep at {time}",
data={
'time': str(self._next_sleep)
}
)
def _calculate_current_sleep(self):
self._current_sleep = self._next_sleep - timedelta(days=1)
current_duration = self._next_duration
self._current_end = self._current_sleep + timedelta(seconds = current_duration)
def _should_sleep_now(self):
if datetime.now() >= self._next_sleep:
return True
if datetime.now() >= self._current_sleep and datetime.now() < self._current_end:
self._next_duration = (self._current_end - datetime.now()).total_seconds()
return True
return False
def _get_next_sleep_schedule(self):
now = datetime.now() + self.SCHEDULING_MARGIN
times = []
for index in range(len(self.entries)):
next_time = now.replace(hour=self.entries[index]['time'].hour, minute=self.entries[index]['time'].minute)
next_time += timedelta(seconds=self._get_random_offset(self.entries[index]['time_random_offset']))
# If sleep time is passed add one day
if next_time <= now:
next_time += timedelta(days=1)
times.append(next_time)
diffs = {}
for index in range(len(self.entries)):
diff = (times[index]-now).total_seconds()
if diff >= 0: diffs[index] = diff
closest = min(diffs.iterkeys(), key=lambda x: diffs[x])
next_time = times[closest]
next_duration = self._get_next_duration(self.entries[closest])
location = self.entries[closest]['wake_up_at_location'] if 'wake_up_at_location' in self.entries[closest] else ''
return next_time, next_duration, location
def _get_next_duration(self, entry):
duration = entry['duration'] + self._get_random_offset(entry['duration_random_offset'])
return duration
def _get_random_offset(self, max_offset):
offset = uniform(-max_offset, max_offset)
return int(offset)
def _sleep(self):
sleep_to_go = self._next_duration
sleep_m, sleep_s = divmod(sleep_to_go, 60)
sleep_h, sleep_m = divmod(sleep_m, 60)
sleep_hms = '%02d:%02d:%02d' % (sleep_h, sleep_m, sleep_s)
now = datetime.now()
wake = str(now + timedelta(seconds=sleep_to_go))
self.bot.event_manager.emit(
'bot_sleep',
sender=self,
formatted="Sleeping for {time_hms}, wake at {wake}",
data={
'time_hms': sleep_hms,
'wake': wake
}
)
while sleep_to_go > 0:
if sleep_to_go < self.LOG_INTERVAL_SECONDS:
sleep(sleep_to_go)
sleep_to_go = 0
else:
sleep(self.LOG_INTERVAL_SECONDS)
sleep_to_go -= self.LOG_INTERVAL_SECONDS<|fim▁end|> | self.entries = [] |
<|file_name|>server.js<|end_file_name|><|fim▁begin|>var fs = require("fs");
var express = require("express"),
optimist = require("optimist"),
gitstatic = require("../");
var argv = optimist.usage("Usage: $0")
.options("h", {
alias: "help",
describe: "display this help text"
})
.options("repository", {
default: ".git",
describe: "path to bare git repository"
})
.options("port", {
default: 3000,
describe: "http port"
})
.check(function(argv) {
if (argv.help) throw "";
try { var stats = fs.statSync(argv.repository); } catch (e) { throw "Error: " + e.message; }
if (!stats.isDirectory()) throw "Error: invalid --repository directory.";
})<|fim▁hole|> .argv;
var server = express();
server.get(/^\/.*/, gitstatic.route()
.repository(argv.repository));
server.listen(argv.port);<|fim▁end|> | |
<|file_name|>quickopen.ts<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { SpectronApplication } from '../../spectron/application';
export class QuickOpen {
static QUICK_OPEN_HIDDEN = 'div.quick-open-widget[aria-hidden="true"]';
static QUICK_OPEN = 'div.quick-open-widget[aria-hidden="false"]';
static QUICK_OPEN_INPUT = `${QuickOpen.QUICK_OPEN} .quick-open-input input`;
static QUICK_OPEN_FOCUSED_ELEMENT = `${QuickOpen.QUICK_OPEN} .quick-open-tree .monaco-tree-row.focused .monaco-highlighted-label`;
static QUICK_OPEN_ENTRY_SELECTOR = 'div[aria-label="Quick Picker"] .monaco-tree-rows.show-twisties .monaco-tree-row .quick-open-entry';
constructor(readonly spectron: SpectronApplication) { }
async openQuickOpen(value: string): Promise<void> {
await this.spectron.runCommand('workbench.action.quickOpen');
await this.waitForQuickOpenOpened();
if (value) {
await this.spectron.client.setValue(QuickOpen.QUICK_OPEN_INPUT, value);
}
}
async closeQuickOpen(): Promise<void> {
await this.spectron.runCommand('workbench.action.closeQuickOpen');
await this.waitForQuickOpenClosed();
}
async openFile(fileName: string): Promise<void> {
await this.openQuickOpen(fileName);
await this.waitForQuickOpenElements(names => names.some(n => n === fileName));
await this.spectron.client.keys(['Enter', 'NULL']);
await this.spectron.workbench.waitForActiveTab(fileName);
await this.spectron.workbench.waitForEditorFocus(fileName);
}
async runCommand(commandText: string): Promise<void> {
await this.openQuickOpen(`> ${commandText}`);
// wait for best choice to be focused
await this.spectron.client.waitForTextContent(QuickOpen.QUICK_OPEN_FOCUSED_ELEMENT, commandText);
// wait and click on best choice
await this.spectron.client.waitAndClick(QuickOpen.QUICK_OPEN_FOCUSED_ELEMENT);
}
async waitForQuickOpenOpened(): Promise<void> {
await this.spectron.client.waitForActiveElement(QuickOpen.QUICK_OPEN_INPUT);
// we gotta wait 50 milliseconds due to https://github.com/Microsoft/vscode/blob/master/src/vs/platform/list/browser/listService.ts#L59
await new Promise(c => setTimeout(c, 50));
}
private async waitForQuickOpenClosed(): Promise<void> {
await this.spectron.client.waitForElement(QuickOpen.QUICK_OPEN_HIDDEN);
}
async submit(text: string): Promise<void> {
await this.spectron.client.setValue(QuickOpen.QUICK_OPEN_INPUT, text);
await this.spectron.client.keys(['Enter', 'NULL']);
await this.waitForQuickOpenClosed();
}
async selectQuickOpenElement(index: number): Promise<void> {
await this.waitForQuickOpenOpened();
for (let from = 0; from < index; from++) {
await this.spectron.client.keys(['ArrowDown', 'NULL']);
}
await this.spectron.client.keys(['Enter', 'NULL']);
await this.waitForQuickOpenClosed();
}
async waitForQuickOpenElements(accept: (names: string[]) => boolean): Promise<void> {
await this.spectron.client.waitFor(() => this.getQuickOpenElements(), accept);<|fim▁hole|> private async getQuickOpenElements(): Promise<string[]> {
const result = await this.spectron.webclient.selectorExecute(QuickOpen.QUICK_OPEN_ENTRY_SELECTOR,
div => (Array.isArray(div) ? div : [div]).map(element => {
const name = element.querySelector('.label-name') as HTMLElement;
return name.textContent;
})
);
return Array.isArray(result) ? result : [];
}
}<|fim▁end|> | }
|
<|file_name|>app.js<|end_file_name|><|fim▁begin|>function htmlEncode(value){
return $('<div/>').text(value).html();
}<|fim▁hole|> $scope.snippet = function(item){
var elem = $("#"+item);
var contents = elem.html().trim();
elem.html(htmlEncode(contents));
$('pre code').each(function(i, block) {
hljs.highlightBlock(block);
});
};
$scope.loadMenu = function(){
$('#side-menu').metisMenu();
};
});<|fim▁end|> | var app = angular.module("oasassets",[]).controller("snippetsController",function($scope){
|
<|file_name|>voxel_utils.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility function for voxels."""
import gin
import gin.tf
import tensorflow as tf
from tf3d.layers import sparse_voxel_net_utils
from tf3d.utils import shape_utils
compute_pooled_voxel_indices = sparse_voxel_net_utils.compute_pooled_voxel_indices
pool_features_given_indices = sparse_voxel_net_utils.pool_features_given_indices
def crop_and_pad_voxels(voxels, start_coordinates, end_coordinates):
"""Crops a voxel region and pads past the boundaries with zeros.
This accepts start and end coordinates past the limits of the voxel grid,
and uses it to calculate how much top/left/right/bottom padding to add.
Args:
voxels: A tf.float32 tensor of shape [x, y, z, f] to crop
start_coordinates: A list of len 4 with the [x, y, z, f] starting location
of our crop. This can be negative, which indicates left/top padding.
end_coordinates: A list of len 4 with the [x, y, z, f] ending location of
our crop. This can be beyond the size of the voxel tensor, which indicates
padding.
Returns:
cropped_and_padded_voxels: A voxel grid with shape
[end_coordinates[0] - start_coordinates[0],
end_coordinates[1] - start_coordinates[1],
end_coordinates[2] - start_coordinates[2],
end_coordinates[3] - start_coordinates[3]]
Raises:
ValueError: If requested crop and pad is outside the bounds of what the
function supports.
"""
if len(start_coordinates) != 4:
raise ValueError('start_coordinates should be of length 4')
if len(end_coordinates) != 4:
raise ValueError('end_coordinates should be of length 4')
if any([coord <= 0 for coord in end_coordinates]):
raise ValueError('Requested end coordinates should be > 0')
start_coordinates = tf.convert_to_tensor(start_coordinates, tf.int32)
end_coordinates = tf.convert_to_tensor(end_coordinates, tf.int32)
# Clip the coordinates to within the voxel grid
clipped_start_coordinates = tf.maximum(0, start_coordinates)
clipped_end_coordinates = tf.minimum(voxels.shape, end_coordinates)
cropped_voxels = tf.slice(voxels,
begin=clipped_start_coordinates,
size=(clipped_end_coordinates -
clipped_start_coordinates))
top_and_left_padding = tf.maximum(0, -start_coordinates)
bottom_and_right_padding = tf.maximum(0, end_coordinates - voxels.shape)
padding = tf.stack([top_and_left_padding, bottom_and_right_padding], axis=1)
return tf.pad(cropped_voxels, padding)
def pointcloud_to_voxel_grid(points,
features,
grid_cell_size,
start_location,
end_location,
segment_func=tf.math.unsorted_segment_mean):
"""Converts a pointcloud into a voxel grid.
Args:
points: A tf.float32 tensor of size [N, 3].
features: A tf.float32 tensor of size [N, F].
grid_cell_size: A tf.float32 tensor of size [3].
start_location: A tf.float32 tensor of size [3].
end_location: A tf.float32 tensor of size [3].
segment_func: A tensorflow function that operates on segments. Expect one
of tf.math.unsorted_segment_{min/max/mean/prod/sum}. Defaults to
tf.math.unsorted_segment_mean
Returns:
voxel_features: A tf.float32 tensor of
size [grid_x_len, grid_y_len, grid_z_len, F].
segment_ids: A tf.int32 tensor of IDs for each point indicating
which (flattened) voxel cell its data was mapped to.
point_indices: A tf.int32 tensor of size [num_points, 3] containing the
location of each point in the 3d voxel grid.
"""
grid_cell_size = tf.convert_to_tensor(grid_cell_size, dtype=tf.float32)
start_location = tf.convert_to_tensor(start_location, dtype=tf.float32)
end_location = tf.convert_to_tensor(end_location, dtype=tf.float32)
point_indices = tf.cast(
(points - tf.expand_dims(start_location, axis=0)) /
tf.expand_dims(grid_cell_size, axis=0),
dtype=tf.int32)
grid_size = tf.cast(
tf.math.ceil((end_location - start_location) / grid_cell_size),
dtype=tf.int32)
# Note: all points outside the grid are added to the edges
# Cap index at grid_size - 1 (so a 10x10x10 grid's max cell is (9,9,9))
point_indices = tf.minimum(point_indices, tf.expand_dims(grid_size - 1,
axis=0))
# Don't allow any points below index (0, 0, 0)
point_indices = tf.maximum(point_indices, 0)
segment_ids = tf.reduce_sum(
point_indices * tf.stack(
[grid_size[1] * grid_size[2], grid_size[2], 1], axis=0),
axis=1)
voxel_features = segment_func(
data=features,
segment_ids=segment_ids,
num_segments=(grid_size[0] * grid_size[1] * grid_size[2]))
return (tf.reshape(voxel_features,
[grid_size[0],
grid_size[1],
grid_size[2],
features.get_shape().as_list()[1]]),
segment_ids,
point_indices)
def voxels_to_points(voxels, segment_ids):
"""Convert voxels back to points given their segment id.
Args:
voxels: A tf.float32 tensor representing a voxel grid. Expect shape
[x, y, z, f].<|fim▁hole|> in the original pointcloud we want to project voxel features back to.
Returns:
point_features: A tf.float32 tensor of shape [N, f] where each point
now has the features in the associated voxel cell.
"""
flattened_voxels = tf.reshape(voxels, shape=(-1, voxels.shape[-1]))
return tf.gather(flattened_voxels, segment_ids)
def _points_offset_in_voxels_unbatched(points, grid_cell_size):
"""Converts points into offsets in voxel grid for a single batch.
The values range from -0.5 to 0.5
Args:
points: A tf.float32 tensor of size [N, 3].
grid_cell_size: The size of the grid cells in x, y, z dimensions in the
voxel grid. It should be either a tf.float32 tensor, a numpy array or a
list of size [3].
Returns:
voxel_xyz_offsets: A tf.float32 tensor of size [N, 3].
"""
min_points = tf.reduce_min(points, axis=0)
points_index = tf.math.floordiv(points - min_points, grid_cell_size)
points_offset = points - min_points - (points_index * grid_cell_size)
return (points_offset / grid_cell_size) - 0.5
def points_offset_in_voxels(points, grid_cell_size):
"""Converts points into offsets in voxel grid.
Args:
points: A tf.float32 tensor of size [batch_size, N, 3].
grid_cell_size: The size of the grid cells in x, y, z dimensions in the
voxel grid. It should be either a tf.float32 tensor, a numpy array or a
list of size [3].
Returns:
voxel_xyz_offsets: A tf.float32 tensor of size [batch_size, N, 3].
"""
batch_size = points.get_shape().as_list()[0]
def fn(i):
return _points_offset_in_voxels_unbatched(
points=points[i, :, :], grid_cell_size=grid_cell_size)
return tf.map_fn(fn=fn, elems=tf.range(batch_size), dtype=tf.float32)
def _points_to_voxel_indices(points, grid_cell_size):
"""Converts points into corresponding voxel indices.
Maps each point into a voxel grid with cell size given by grid_cell_size.
For each voxel, it computes a x, y, z index. Also converts the x, y, z index
to a single number index where there is a one-on-one mapping between
each x, y, z index value and its corresponding single number index value.
Args:
points: A tf.float32 tensor of size [N, 3].
grid_cell_size: The size of the grid cells in x, y, z dimensions in the
voxel grid. It should be either a tf.float32 tensor, a numpy array or a
list of size [3].
Returns:
voxel_xyz_indices: A tf.int32 tensor of size [N, 3] containing the x, y, z
index of the voxel corresponding to each given point.
voxel_single_number_indices: A tf.int32 tensor of size [N] containing the
single number index of the voxel corresponding to each given point.
voxel_start_location: A tf.float32 tensor of size [3] containing the start
location of the voxels.
"""
voxel_start_location = tf.reduce_min(points, axis=0)
voxel_xyz_indices = tf.cast(
tf.math.floordiv(points - voxel_start_location, grid_cell_size),
dtype=tf.int32)
voxel_xyz_indices, voxel_single_number_indices = compute_pooled_voxel_indices(
voxel_xyz_indices=voxel_xyz_indices, pooling_size=(1, 1, 1))
return voxel_xyz_indices, voxel_single_number_indices, voxel_start_location
def pointcloud_to_sparse_voxel_grid_unbatched(points, features, grid_cell_size,
segment_func):
"""Converts a pointcloud into a voxel grid.
This function does not handle batch size and only works for a single batch
of points. The function `pointcloud_to_sparse_voxel_grid` below calls this
function in a while loop to map a batch of points to a batch of voxels.
A sparse voxel grid is represented by only keeping the voxels that
have points in them in memory. Assuming that N' voxels have points in them,
we represent a sparse voxel grid by
(a) voxel_features, a [N', F] or [N', G, F] tensor containing the feature
vector for each voxel.
(b) voxel_indices, a [N', 3] tensor containing the x, y, z index of each
voxel.
Args:
points: A tf.float32 tensor of size [N, 3].
features: A tf.float32 tensor of size [N, F].
grid_cell_size: The size of the grid cells in x, y, z dimensions in the
voxel grid. It should be either a tf.float32 tensor, a numpy array or a
list of size [3].
segment_func: A tensorflow function that operates on segments. Examples are
one of tf.math.unsorted_segment_{min/max/mean/prod/sum}.
Returns:
voxel_features: A tf.float32 tensor of size [N', F] or [N', G, F] where G is
the number of points sampled per voxel.
voxel_indices: A tf.int32 tensor of size [N', 3].
segment_ids: A size [N] tf.int32 tensor of IDs for each point indicating
which (flattened) voxel cell its data was mapped to.
voxel_start_location: A tf.float32 tensor of size [3] containing the start
location of the voxels.
Raises:
ValueError: If pooling method is unknown.
"""
grid_cell_size = tf.convert_to_tensor(grid_cell_size, dtype=tf.float32)
voxel_xyz_indices, voxel_single_number_indices, voxel_start_location = (
_points_to_voxel_indices(points=points, grid_cell_size=grid_cell_size))
voxel_features, segment_ids, num_segments = pool_features_given_indices(
features=features,
indices=voxel_single_number_indices,
segment_func=segment_func)
voxel_xyz_indices = tf.math.unsorted_segment_max(
data=voxel_xyz_indices,
segment_ids=segment_ids,
num_segments=num_segments)
return voxel_features, voxel_xyz_indices, segment_ids, voxel_start_location
def _pad_or_clip_voxels(voxel_features, voxel_indices, num_valid_voxels,
segment_ids, voxels_pad_or_clip_size):
"""Pads or clips voxels."""
if voxels_pad_or_clip_size:
num_valid_voxels = tf.minimum(num_valid_voxels, voxels_pad_or_clip_size)
num_channels = voxel_features.get_shape().as_list()[-1]
if len(voxel_features.shape.as_list()) == 2:
output_shape = [voxels_pad_or_clip_size, num_channels]
elif len(voxel_features.shape.as_list()) == 3:
num_samples_per_voxel = voxel_features.get_shape().as_list()[1]
if num_samples_per_voxel is None:
num_samples_per_voxel = tf.shape(voxel_features)[1]
output_shape = [
voxels_pad_or_clip_size, num_samples_per_voxel, num_channels
]
else:
raise ValueError('voxel_features should be either rank 2 or 3.')
voxel_features = shape_utils.pad_or_clip_nd(
tensor=voxel_features, output_shape=output_shape)
voxel_indices = shape_utils.pad_or_clip_nd(
tensor=voxel_indices, output_shape=[voxels_pad_or_clip_size, 3])
valid_segment_ids_mask = tf.cast(
tf.less(segment_ids, num_valid_voxels), dtype=tf.int32)
segment_ids *= valid_segment_ids_mask
return voxel_features, voxel_indices, num_valid_voxels, segment_ids
def pointcloud_to_sparse_voxel_grid(points, features, num_valid_points,
grid_cell_size, voxels_pad_or_clip_size,
segment_func):
"""Converts a pointcloud into a voxel grid.
This function calls the `pointcloud_to_sparse_voxel_grid_unbatched`
function above in a while loop to map a batch of points to a batch of voxels.
Args:
points: A tf.float32 tensor of size [batch_size, N, 3].
features: A tf.float32 tensor of size [batch_size, N, F].
num_valid_points: A tf.int32 tensor of size [num_batches] containing the
number of valid points in each batch example.
grid_cell_size: A tf.float32 tensor of size [3].
voxels_pad_or_clip_size: Number of target voxels to pad or clip to. If None,
it will not perform the padding.
segment_func: A tensorflow function that operates on segments. Examples are
one of tf.math.unsorted_segment_{min/max/mean/prod/sum}.
Returns:
voxel_features: A tf.float32 tensor of size [batch_size, N', F]
or [batch_size, N', G, F] where G is the number of points sampled per
voxel.
voxel_indices: A tf.int32 tensor of size [batch_size, N', 3].
num_valid_voxels: A tf.int32 tensor of size [batch_size].
segment_ids: A size [batch_size, N] tf.int32 tensor of IDs for each point
indicating which (flattened) voxel cell its data was mapped to.
voxel_start_location: A size [batch_size, 3] tf.float32 tensor of voxel
start locations.
Raises:
ValueError: If pooling method is unknown.
"""
batch_size = points.get_shape().as_list()[0]
if batch_size is None:
batch_size = tf.shape(points)[0]
num_points = tf.shape(points)[1]
def fn(i):
"""Map function."""
num_valid_points_i = num_valid_points[i]
points_i = points[i, :num_valid_points_i, :]
features_i = features[i, :num_valid_points_i, :]
voxel_features_i, voxel_indices_i, segment_ids_i, voxel_start_location_i = (
pointcloud_to_sparse_voxel_grid_unbatched(
points=points_i,
features=features_i,
grid_cell_size=grid_cell_size,
segment_func=segment_func))
num_valid_voxels_i = tf.shape(voxel_features_i)[0]
(voxel_features_i, voxel_indices_i, num_valid_voxels_i,
segment_ids_i) = _pad_or_clip_voxels(
voxel_features=voxel_features_i,
voxel_indices=voxel_indices_i,
num_valid_voxels=num_valid_voxels_i,
segment_ids=segment_ids_i,
voxels_pad_or_clip_size=voxels_pad_or_clip_size)
segment_ids_i = tf.pad(
segment_ids_i, paddings=[[0, num_points - num_valid_points_i]])
return (voxel_features_i, voxel_indices_i, num_valid_voxels_i,
segment_ids_i, voxel_start_location_i)
return tf.map_fn(
fn=fn,
elems=tf.range(batch_size),
dtype=(tf.float32, tf.int32, tf.int32, tf.int32, tf.float32))
def sparse_voxel_grid_to_pointcloud(voxel_features, segment_ids,
num_valid_voxels, num_valid_points):
"""Convert voxel features back to points given their segment ids.
Args:
voxel_features: A tf.float32 tensor of size [batch_size, N', F].
segment_ids: A size [batch_size, N] tf.int32 tensor of IDs for each point
indicating which (flattened) voxel cell its data was mapped to.
num_valid_voxels: A tf.int32 tensor of size [batch_size] containing the
number of valid voxels in each batch example.
num_valid_points: A tf.int32 tensor of size [batch_size] containing the
number of valid points in each batch example.
Returns:
point_features: A tf.float32 tensor of size [batch_size, N, F].
Raises:
ValueError: If batch_size is unknown at graph construction time.
"""
batch_size = voxel_features.shape[0]
if batch_size is None:
raise ValueError('batch_size is unknown at graph construction time.')
num_points = tf.shape(segment_ids)[1]
def fn(i):
num_valid_voxels_i = num_valid_voxels[i]
num_valid_points_i = num_valid_points[i]
voxel_features_i = voxel_features[i, :num_valid_voxels_i, :]
segment_ids_i = segment_ids[i, :num_valid_points_i]
point_features = tf.gather(voxel_features_i, segment_ids_i)
point_features_rank = len(point_features.get_shape().as_list())
point_features_paddings = [[0, num_points - num_valid_points_i]]
for _ in range(point_features_rank - 1):
point_features_paddings.append([0, 0])
point_features = tf.pad(point_features, paddings=point_features_paddings)
return point_features
return tf.map_fn(fn=fn, elems=tf.range(batch_size), dtype=tf.float32)
@gin.configurable
def per_voxel_point_sample_segment_func(data, segment_ids, num_segments,
num_samples_per_voxel):
"""Samples features from the points within each voxel.
Args:
data: A tf.float32 tensor of size [N, F].
segment_ids: A tf.int32 tensor of size [N].
num_segments: Number of segments.
num_samples_per_voxel: Number of features to sample per voxel. If the voxel
has less number of points in it, the point features will be padded by 0.
Returns:
A tf.float32 tensor of size [num_segments, num_samples_per_voxel, F].
A tf.int32 indices of size [N, num_samples_per_voxel].
"""
num_channels = data.get_shape().as_list()[1]
if num_channels is None:
raise ValueError('num_channels is None.')
n = tf.shape(segment_ids)[0]
def _body_fn(i, indices_range, indices):
"""Computes the indices of the i-th point feature in each segment."""
indices_i = tf.math.unsorted_segment_max(
data=indices_range, segment_ids=segment_ids, num_segments=num_segments)
indices_i_positive_mask = tf.greater(indices_i, 0)
indices_i_positive = tf.boolean_mask(indices_i, indices_i_positive_mask)
boolean_mask = tf.scatter_nd(
indices=tf.cast(
tf.expand_dims(indices_i_positive - 1, axis=1), dtype=tf.int64),
updates=tf.ones_like(indices_i_positive, dtype=tf.int32),
shape=(n,))
indices_range *= (1 - boolean_mask)
indices_i *= tf.cast(indices_i_positive_mask, dtype=tf.int32)
indices_i = tf.pad(
tf.expand_dims(indices_i, axis=1),
paddings=[[0, 0], [i, num_samples_per_voxel - i - 1]])
indices += indices_i
i = i + 1
return i, indices_range, indices
cond = lambda i, indices_range, indices: i < num_samples_per_voxel
(_, _, indices) = tf.while_loop(
cond=cond,
body=_body_fn,
loop_vars=(tf.constant(0, dtype=tf.int32), tf.range(n) + 1,
tf.zeros([num_segments, num_samples_per_voxel],
dtype=tf.int32)))
data = tf.pad(data, paddings=[[1, 0], [0, 0]])
voxel_features = tf.gather(data, tf.reshape(indices, [-1]))
return tf.reshape(voxel_features,
[num_segments, num_samples_per_voxel, num_channels])
def compute_pointcloud_weights_based_on_voxel_density(points, grid_cell_size):
"""Computes pointcloud weights based on voxel density.
Args:
points: A tf.float32 tensor of size [num_points, 3].
grid_cell_size: The size of the grid cells in x, y, z dimensions in the
voxel grid. It should be either a tf.float32 tensor, a numpy array or a
list of size [3].
Returns:
A tf.float32 tensor of size [num_points, 1] containing weights that are
inverse proportional to the denisty of the points in voxels.
"""
num_points = tf.shape(points)[0]
features = tf.ones([num_points, 1], dtype=tf.float32)
voxel_features, _, segment_ids, _ = (
pointcloud_to_sparse_voxel_grid_unbatched(
points=points,
features=features,
grid_cell_size=grid_cell_size,
segment_func=tf.math.unsorted_segment_sum))
num_voxels = tf.shape(voxel_features)[0]
point_features = sparse_voxel_grid_to_pointcloud(
voxel_features=tf.expand_dims(voxel_features, axis=0),
segment_ids=tf.expand_dims(segment_ids, axis=0),
num_valid_voxels=tf.expand_dims(num_voxels, axis=0),
num_valid_points=tf.expand_dims(num_points, axis=0))
inverse_point_densities = 1.0 / tf.squeeze(point_features, axis=0)
total_inverse_density = tf.reduce_sum(inverse_point_densities)
return (inverse_point_densities * tf.cast(num_points, dtype=tf.float32) /
total_inverse_density)<|fim▁end|> | segment_ids: A tf.int32 tensor representing the segment id of each point |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.