file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
key_value_table_demo.py | import json
import pprint
from a2qt import QtWidgets
from a2widget.key_value_table import KeyValueTable
from a2widget.a2text_field import A2CodeField
_DEMO_DATA = {
'Name': 'Some Body',
'Surname': 'Body',
'Street. Nr': 'Thingstreet 8',
'Street': 'Thingstreet',
'Nr': '8',
'PLZ': '12354',
'City': 'Frankfurt am Main',
'Phone+': '+1232222222',
'Phone': '2222222',
'Country': 'Germany',
}
class Demo(QtWidgets.QMainWindow):
def __init__(self):
super(Demo, self).__init__()
w = QtWidgets.QWidget(self)
self.setCentralWidget(w)
lyt = QtWidgets.QVBoxLayout(w)
self.key_value_table = KeyValueTable(self)
self.key_value_table.changed.connect(self.table_to_code)
lyt.addWidget(self.key_value_table)
btn = QtWidgets.QPushButton('GET DATA')
btn.clicked.connect(self.get_data)
lyt.addWidget(btn)
self.text_field = A2CodeField(self)
self.text_field.text_changed.connect(self.code_to_table)
lyt.addWidget(self.text_field)
btn = QtWidgets.QPushButton('SET DATA')
btn.clicked.connect(self.set_data)
lyt.addWidget(btn)
self.text_field.setText(json.dumps(_DEMO_DATA, indent=2))
self.set_data()
def table_to_code(self):
data = self.key_value_table.get_data()
self.text_field.setText(json.dumps(data, indent=2))
def code_to_table(self):
data = json.loads(self.text_field.text())
self.key_value_table.set_silent(data)
def get_data(self):
data = self.key_value_table.get_data()
print(data)
pprint.pprint(data, sort_dicts=False)
def set_data(self):
data = json.loads(self.text_field.text())
self.key_value_table.set_data(data)
def show():
app = QtWidgets.QApplication([])
win = Demo()
win.show()
app.exec()
if __name__ == '__main__':
| show() | conditional_block | |
qscintilla.py | #############################################################################
##
## Copyright (c) 2011 Riverbank Computing Limited <info@riverbankcomputing.com>
##
## This file is part of PyQt.
##
## This file may be used under the terms of the GNU General Public
## License versions 2.0 or 3.0 as published by the Free Software
## Foundation and appearing in the files LICENSE.GPL2 and LICENSE.GPL3
## included in the packaging of this file. Alternatively you may (at
## your option) use any later version of the GNU General Public
## License if such license has been publicly approved by Riverbank
## Computing Limited (or its successors, if any) and the KDE Free Qt
## Foundation. In addition, as a special exception, Riverbank gives you
## certain additional rights. These rights are described in the Riverbank
## GPL Exception version 1.1, which can be found in the file
## GPL_EXCEPTION.txt in this package.
## | ## Please review the following information to ensure GNU General
## Public Licensing requirements will be met:
## http://trolltech.com/products/qt/licenses/licensing/opensource/. If
## you are unsure which license is appropriate for your use, please
## review the following information:
## http://trolltech.com/products/qt/licenses/licensing/licensingoverview
## or contact the sales department at sales@riverbankcomputing.com.
##
## This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
## WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
##
#############################################################################
# If pluginType is MODULE, the plugin loader will call moduleInformation. The
# variable MODULE is inserted into the local namespace by the plugin loader.
pluginType = MODULE
# moduleInformation() must return a tuple (module, widget_list). If "module"
# is "A" and any widget from this module is used, the code generator will write
# "import A". If "module" is "A[.B].C", the code generator will write
# "from A[.B] import C". Each entry in "widget_list" must be unique.
def moduleInformation():
return "PyQt4.Qsci", ("QsciScintilla", ) | random_line_split | |
qscintilla.py | #############################################################################
##
## Copyright (c) 2011 Riverbank Computing Limited <info@riverbankcomputing.com>
##
## This file is part of PyQt.
##
## This file may be used under the terms of the GNU General Public
## License versions 2.0 or 3.0 as published by the Free Software
## Foundation and appearing in the files LICENSE.GPL2 and LICENSE.GPL3
## included in the packaging of this file. Alternatively you may (at
## your option) use any later version of the GNU General Public
## License if such license has been publicly approved by Riverbank
## Computing Limited (or its successors, if any) and the KDE Free Qt
## Foundation. In addition, as a special exception, Riverbank gives you
## certain additional rights. These rights are described in the Riverbank
## GPL Exception version 1.1, which can be found in the file
## GPL_EXCEPTION.txt in this package.
##
## Please review the following information to ensure GNU General
## Public Licensing requirements will be met:
## http://trolltech.com/products/qt/licenses/licensing/opensource/. If
## you are unsure which license is appropriate for your use, please
## review the following information:
## http://trolltech.com/products/qt/licenses/licensing/licensingoverview
## or contact the sales department at sales@riverbankcomputing.com.
##
## This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
## WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
##
#############################################################################
# If pluginType is MODULE, the plugin loader will call moduleInformation. The
# variable MODULE is inserted into the local namespace by the plugin loader.
pluginType = MODULE
# moduleInformation() must return a tuple (module, widget_list). If "module"
# is "A" and any widget from this module is used, the code generator will write
# "import A". If "module" is "A[.B].C", the code generator will write
# "from A[.B] import C". Each entry in "widget_list" must be unique.
def moduleInformation():
| return "PyQt4.Qsci", ("QsciScintilla", ) | identifier_body | |
qscintilla.py | #############################################################################
##
## Copyright (c) 2011 Riverbank Computing Limited <info@riverbankcomputing.com>
##
## This file is part of PyQt.
##
## This file may be used under the terms of the GNU General Public
## License versions 2.0 or 3.0 as published by the Free Software
## Foundation and appearing in the files LICENSE.GPL2 and LICENSE.GPL3
## included in the packaging of this file. Alternatively you may (at
## your option) use any later version of the GNU General Public
## License if such license has been publicly approved by Riverbank
## Computing Limited (or its successors, if any) and the KDE Free Qt
## Foundation. In addition, as a special exception, Riverbank gives you
## certain additional rights. These rights are described in the Riverbank
## GPL Exception version 1.1, which can be found in the file
## GPL_EXCEPTION.txt in this package.
##
## Please review the following information to ensure GNU General
## Public Licensing requirements will be met:
## http://trolltech.com/products/qt/licenses/licensing/opensource/. If
## you are unsure which license is appropriate for your use, please
## review the following information:
## http://trolltech.com/products/qt/licenses/licensing/licensingoverview
## or contact the sales department at sales@riverbankcomputing.com.
##
## This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
## WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
##
#############################################################################
# If pluginType is MODULE, the plugin loader will call moduleInformation. The
# variable MODULE is inserted into the local namespace by the plugin loader.
pluginType = MODULE
# moduleInformation() must return a tuple (module, widget_list). If "module"
# is "A" and any widget from this module is used, the code generator will write
# "import A". If "module" is "A[.B].C", the code generator will write
# "from A[.B] import C". Each entry in "widget_list" must be unique.
def | ():
return "PyQt4.Qsci", ("QsciScintilla", )
| moduleInformation | identifier_name |
make-sim-options.py | #!/usr/bin/python
#This script create simulation and reconstruction options
import os
import sys
import re
if len(sys.argv)<4:
print "Usage: make-sim-options.py <decay_file> <output_prefix> <event_number>"
exit(1)
HOME_DIR = os.environ['HOME']
JPSIKKROOT_DIR = os.environ['JPSIKKROOT']
SHARE_DIR = os.path.join(JPSIKKROOT_DIR, "share")
TEMPLATE_DIR = os.path.join(JPSIKKROOT_DIR, "share/template")
TEMPLATE_SIM_FILE = os.path.joint(TEMPLATE_DIR, "simulation.cfg")
print HOMEDIR, JPSIKKROOT_DIR, TE | DECAY_FILE = os.path.abspath(os.path.join(SHARE_DIR,sys.argv[1]))
PREFIX = sys.argv[2]
RTRAW_FILE = os.path.abspath(PREFIX+".rtraw")
DST_FILE = os.path.abspath(PREFIX+".dst")
ROOT_FILE = os.path.abspath(PREFIX+".root") | random_line_split | |
make-sim-options.py | #!/usr/bin/python
#This script create simulation and reconstruction options
import os
import sys
import re
if len(sys.argv)<4:
|
HOME_DIR = os.environ['HOME']
JPSIKKROOT_DIR = os.environ['JPSIKKROOT']
SHARE_DIR = os.path.join(JPSIKKROOT_DIR, "share")
TEMPLATE_DIR = os.path.join(JPSIKKROOT_DIR, "share/template")
TEMPLATE_SIM_FILE = os.path.joint(TEMPLATE_DIR, "simulation.cfg")
print HOMEDIR, JPSIKKROOT_DIR, TE
DECAY_FILE = os.path.abspath(os.path.join(SHARE_DIR,sys.argv[1]))
PREFIX = sys.argv[2]
RTRAW_FILE = os.path.abspath(PREFIX+".rtraw")
DST_FILE = os.path.abspath(PREFIX+".dst")
ROOT_FILE = os.path.abspath(PREFIX+".root")
| print "Usage: make-sim-options.py <decay_file> <output_prefix> <event_number>"
exit(1) | conditional_block |
test_text.py | # This file is part of Checkbox.
#
# Copyright 2012 Canonical Ltd.
# Written by:
# Zygmunt Krynicki <zygmunt.krynicki@canonical.com>
# Daniel Manrique <roadmr@ubuntu.com>
#
# Checkbox is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Checkbox is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Checkbox. If not, see <http://www.gnu.org/licenses/>.
"""
plainbox.impl.exporter.test_text
================================
Test definitions for plainbox.impl.exporter.text module
"""
from io import BytesIO
from unittest import TestCase
from plainbox.impl.exporter.text import TextSessionStateExporter
class TextSessionStateExporterTests(TestCase):
def test_default_dump(self):
exporter = TextSessionStateExporter()
# Text exporter expects this data format
data = {'result_map': {'job_name': {'outcome': 'fail'}}}
stream = BytesIO()
exporter.dump(data, stream)
expected_bytes = "job_name: fail\n".encode('UTF-8') | self.assertEqual(stream.getvalue(), expected_bytes) | random_line_split | |
test_text.py | # This file is part of Checkbox.
#
# Copyright 2012 Canonical Ltd.
# Written by:
# Zygmunt Krynicki <zygmunt.krynicki@canonical.com>
# Daniel Manrique <roadmr@ubuntu.com>
#
# Checkbox is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Checkbox is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Checkbox. If not, see <http://www.gnu.org/licenses/>.
"""
plainbox.impl.exporter.test_text
================================
Test definitions for plainbox.impl.exporter.text module
"""
from io import BytesIO
from unittest import TestCase
from plainbox.impl.exporter.text import TextSessionStateExporter
class TextSessionStateExporterTests(TestCase):
def test_default_dump(self):
| exporter = TextSessionStateExporter()
# Text exporter expects this data format
data = {'result_map': {'job_name': {'outcome': 'fail'}}}
stream = BytesIO()
exporter.dump(data, stream)
expected_bytes = "job_name: fail\n".encode('UTF-8')
self.assertEqual(stream.getvalue(), expected_bytes) | identifier_body | |
test_text.py | # This file is part of Checkbox.
#
# Copyright 2012 Canonical Ltd.
# Written by:
# Zygmunt Krynicki <zygmunt.krynicki@canonical.com>
# Daniel Manrique <roadmr@ubuntu.com>
#
# Checkbox is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Checkbox is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Checkbox. If not, see <http://www.gnu.org/licenses/>.
"""
plainbox.impl.exporter.test_text
================================
Test definitions for plainbox.impl.exporter.text module
"""
from io import BytesIO
from unittest import TestCase
from plainbox.impl.exporter.text import TextSessionStateExporter
class | (TestCase):
def test_default_dump(self):
exporter = TextSessionStateExporter()
# Text exporter expects this data format
data = {'result_map': {'job_name': {'outcome': 'fail'}}}
stream = BytesIO()
exporter.dump(data, stream)
expected_bytes = "job_name: fail\n".encode('UTF-8')
self.assertEqual(stream.getvalue(), expected_bytes)
| TextSessionStateExporterTests | identifier_name |
lib.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::path::Path;
use anyhow::{Context, Result};
use serde::{Deserialize, Serialize};
use stack_config::StackConfig;
use tracing::{event, trace, Level};
use edenfs_error::EdenFsError;
#[derive(Serialize, Deserialize, StackConfig, Debug)]
#[serde(rename_all = "camelCase")]
pub struct Core {
#[stack(default)]
eden_directory: Option<String>,
}
#[derive(Serialize, Deserialize, StackConfig, Debug)]
pub struct EdenFsConfig {
#[stack(nested)]
core: Core,
#[stack(merge = "merge_table")]
#[serde(flatten)]
other: toml::value::Table,
}
fn merge_table(lhs: &mut toml::value::Table, rhs: toml::value::Table) {
for (key, value) in rhs.into_iter() {
if let Some(lhs_value) = lhs.get_mut(&key) {
// Key exists
if let (Some(lhs_table), true) = (lhs_value.as_table_mut(), value.is_table()) {
// Both value are table, we merge them
// SAFETY: unwrap here is guaranteed by `value.is_table()`. This
// is awkward because try_into will consume the value, making
// the else-clause not able to use it later.
merge_table(lhs_table, value.try_into::<toml::value::Table>().unwrap());
} else {
// Otherwise, either the values are not table type, or they have
// different types. In both case we prefer rhs value.
*lhs_value = value;
}
} else {
// Key does not exist in lhs
lhs.insert(key, value);
}
}
}
fn load_path(loader: &mut EdenFsConfigLoader, path: &Path) -> Result<()> {
let content = String::from_utf8(std::fs::read(&path)?)?;
trace!(?content, ?path, "Loading config");
loader.load(toml::from_str(&content)?);
Ok(())
}
fn load_system(loader: &mut EdenFsConfigLoader, etc_dir: &Path) -> Result<()> {
load_path(loader, &etc_dir.join("edenfs.rc"))
}
fn load_system_rcs(loader: &mut EdenFsConfigLoader, etc_dir: &Path) -> Result<()> {
let rcs_dir = etc_dir.join("config.d");
let entries = std::fs::read_dir(&rcs_dir)
.with_context(|| format!("Unable to read configuration from {:?}", rcs_dir))?;
for rc in entries {
let rc = match rc {
Ok(rc) => rc,
Err(e) => {
event!(
Level::INFO,
"Unable to read configuration, skipped: {:?}",
e
);
continue;
}
};
let name = rc.file_name();
let name = if let Some(name) = name.to_str() {
name
} else {
continue;
};
if name.starts_with('.') || !name.ends_with(".toml") {
continue;
}
if let Err(e) = load_path(loader, &rc.path()) {
event!(
Level::DEBUG,
"Not able to load '{}': {:?}",
rc.path().display(),
e
);
}
}
Ok(())
}
fn load_user(loader: &mut EdenFsConfigLoader, home_dir: &Path) -> Result<()> {
let home_rc = home_dir.join(".edenrc");
load_path(loader, &home_rc)
}
pub fn load_config(
etc_eden_dir: &Path,
home_dir: Option<&Path>,
) -> Result<EdenFsConfig, EdenFsError> {
let mut loader = EdenFsConfig::loader();
if let Err(e) = load_system(&mut loader, &etc_eden_dir) {
event!(
Level::INFO,
etc_eden_dir = ?etc_eden_dir,
"Unable to load system configuration, skipped: {:?}",
e
);
} else {
event!(Level::DEBUG, "System configuration loaded");
}
if let Err(e) = load_system_rcs(&mut loader, &etc_eden_dir) | else {
event!(Level::DEBUG, "System RC configurations loaded");
}
if let Some(home) = home_dir {
if let Err(e) = load_user(&mut loader, &home) {
event!(Level::INFO, home = ?home, "Unable to load user configuration, skipped: {:?}", e);
} else {
event!(Level::DEBUG, "User configuration loaded");
}
} else {
event!(
Level::INFO,
"Unable to find home dir. User configuration is not loaded."
);
}
Ok(loader.build().map_err(EdenFsError::ConfigurationError)?)
}
| {
event!(
Level::INFO,
etc_eden_dir = ?etc_eden_dir,
"Unable to load system RC configurations, skipped: {:?}",
e
);
} | conditional_block |
lib.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::path::Path;
use anyhow::{Context, Result};
use serde::{Deserialize, Serialize};
use stack_config::StackConfig;
use tracing::{event, trace, Level};
use edenfs_error::EdenFsError;
#[derive(Serialize, Deserialize, StackConfig, Debug)]
#[serde(rename_all = "camelCase")]
pub struct Core {
#[stack(default)]
eden_directory: Option<String>,
}
#[derive(Serialize, Deserialize, StackConfig, Debug)]
pub struct EdenFsConfig {
#[stack(nested)]
core: Core,
#[stack(merge = "merge_table")]
#[serde(flatten)]
other: toml::value::Table,
}
fn merge_table(lhs: &mut toml::value::Table, rhs: toml::value::Table) {
for (key, value) in rhs.into_iter() {
if let Some(lhs_value) = lhs.get_mut(&key) {
// Key exists
if let (Some(lhs_table), true) = (lhs_value.as_table_mut(), value.is_table()) {
// Both value are table, we merge them
// SAFETY: unwrap here is guaranteed by `value.is_table()`. This
// is awkward because try_into will consume the value, making
// the else-clause not able to use it later.
merge_table(lhs_table, value.try_into::<toml::value::Table>().unwrap());
} else {
// Otherwise, either the values are not table type, or they have
// different types. In both case we prefer rhs value.
*lhs_value = value;
}
} else {
// Key does not exist in lhs
lhs.insert(key, value);
}
}
}
fn load_path(loader: &mut EdenFsConfigLoader, path: &Path) -> Result<()> {
let content = String::from_utf8(std::fs::read(&path)?)?;
trace!(?content, ?path, "Loading config");
loader.load(toml::from_str(&content)?);
Ok(())
}
fn load_system(loader: &mut EdenFsConfigLoader, etc_dir: &Path) -> Result<()> {
load_path(loader, &etc_dir.join("edenfs.rc"))
}
fn | (loader: &mut EdenFsConfigLoader, etc_dir: &Path) -> Result<()> {
let rcs_dir = etc_dir.join("config.d");
let entries = std::fs::read_dir(&rcs_dir)
.with_context(|| format!("Unable to read configuration from {:?}", rcs_dir))?;
for rc in entries {
let rc = match rc {
Ok(rc) => rc,
Err(e) => {
event!(
Level::INFO,
"Unable to read configuration, skipped: {:?}",
e
);
continue;
}
};
let name = rc.file_name();
let name = if let Some(name) = name.to_str() {
name
} else {
continue;
};
if name.starts_with('.') || !name.ends_with(".toml") {
continue;
}
if let Err(e) = load_path(loader, &rc.path()) {
event!(
Level::DEBUG,
"Not able to load '{}': {:?}",
rc.path().display(),
e
);
}
}
Ok(())
}
fn load_user(loader: &mut EdenFsConfigLoader, home_dir: &Path) -> Result<()> {
let home_rc = home_dir.join(".edenrc");
load_path(loader, &home_rc)
}
pub fn load_config(
etc_eden_dir: &Path,
home_dir: Option<&Path>,
) -> Result<EdenFsConfig, EdenFsError> {
let mut loader = EdenFsConfig::loader();
if let Err(e) = load_system(&mut loader, &etc_eden_dir) {
event!(
Level::INFO,
etc_eden_dir = ?etc_eden_dir,
"Unable to load system configuration, skipped: {:?}",
e
);
} else {
event!(Level::DEBUG, "System configuration loaded");
}
if let Err(e) = load_system_rcs(&mut loader, &etc_eden_dir) {
event!(
Level::INFO,
etc_eden_dir = ?etc_eden_dir,
"Unable to load system RC configurations, skipped: {:?}",
e
);
} else {
event!(Level::DEBUG, "System RC configurations loaded");
}
if let Some(home) = home_dir {
if let Err(e) = load_user(&mut loader, &home) {
event!(Level::INFO, home = ?home, "Unable to load user configuration, skipped: {:?}", e);
} else {
event!(Level::DEBUG, "User configuration loaded");
}
} else {
event!(
Level::INFO,
"Unable to find home dir. User configuration is not loaded."
);
}
Ok(loader.build().map_err(EdenFsError::ConfigurationError)?)
}
| load_system_rcs | identifier_name |
lib.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::path::Path;
use anyhow::{Context, Result};
use serde::{Deserialize, Serialize};
use stack_config::StackConfig;
use tracing::{event, trace, Level};
use edenfs_error::EdenFsError;
#[derive(Serialize, Deserialize, StackConfig, Debug)]
#[serde(rename_all = "camelCase")]
pub struct Core {
#[stack(default)]
eden_directory: Option<String>,
}
#[derive(Serialize, Deserialize, StackConfig, Debug)]
pub struct EdenFsConfig {
#[stack(nested)]
core: Core,
#[stack(merge = "merge_table")]
#[serde(flatten)]
other: toml::value::Table,
}
fn merge_table(lhs: &mut toml::value::Table, rhs: toml::value::Table) {
for (key, value) in rhs.into_iter() {
if let Some(lhs_value) = lhs.get_mut(&key) {
// Key exists
if let (Some(lhs_table), true) = (lhs_value.as_table_mut(), value.is_table()) {
// Both value are table, we merge them
// SAFETY: unwrap here is guaranteed by `value.is_table()`. This
// is awkward because try_into will consume the value, making
// the else-clause not able to use it later.
merge_table(lhs_table, value.try_into::<toml::value::Table>().unwrap());
} else {
// Otherwise, either the values are not table type, or they have
// different types. In both case we prefer rhs value.
*lhs_value = value;
}
} else {
// Key does not exist in lhs
lhs.insert(key, value);
}
}
}
fn load_path(loader: &mut EdenFsConfigLoader, path: &Path) -> Result<()> {
let content = String::from_utf8(std::fs::read(&path)?)?;
trace!(?content, ?path, "Loading config");
loader.load(toml::from_str(&content)?);
Ok(())
}
fn load_system(loader: &mut EdenFsConfigLoader, etc_dir: &Path) -> Result<()> {
load_path(loader, &etc_dir.join("edenfs.rc"))
}
fn load_system_rcs(loader: &mut EdenFsConfigLoader, etc_dir: &Path) -> Result<()> {
let rcs_dir = etc_dir.join("config.d");
let entries = std::fs::read_dir(&rcs_dir)
.with_context(|| format!("Unable to read configuration from {:?}", rcs_dir))?;
for rc in entries {
let rc = match rc {
Ok(rc) => rc,
Err(e) => { | continue;
}
};
let name = rc.file_name();
let name = if let Some(name) = name.to_str() {
name
} else {
continue;
};
if name.starts_with('.') || !name.ends_with(".toml") {
continue;
}
if let Err(e) = load_path(loader, &rc.path()) {
event!(
Level::DEBUG,
"Not able to load '{}': {:?}",
rc.path().display(),
e
);
}
}
Ok(())
}
fn load_user(loader: &mut EdenFsConfigLoader, home_dir: &Path) -> Result<()> {
let home_rc = home_dir.join(".edenrc");
load_path(loader, &home_rc)
}
pub fn load_config(
etc_eden_dir: &Path,
home_dir: Option<&Path>,
) -> Result<EdenFsConfig, EdenFsError> {
let mut loader = EdenFsConfig::loader();
if let Err(e) = load_system(&mut loader, &etc_eden_dir) {
event!(
Level::INFO,
etc_eden_dir = ?etc_eden_dir,
"Unable to load system configuration, skipped: {:?}",
e
);
} else {
event!(Level::DEBUG, "System configuration loaded");
}
if let Err(e) = load_system_rcs(&mut loader, &etc_eden_dir) {
event!(
Level::INFO,
etc_eden_dir = ?etc_eden_dir,
"Unable to load system RC configurations, skipped: {:?}",
e
);
} else {
event!(Level::DEBUG, "System RC configurations loaded");
}
if let Some(home) = home_dir {
if let Err(e) = load_user(&mut loader, &home) {
event!(Level::INFO, home = ?home, "Unable to load user configuration, skipped: {:?}", e);
} else {
event!(Level::DEBUG, "User configuration loaded");
}
} else {
event!(
Level::INFO,
"Unable to find home dir. User configuration is not loaded."
);
}
Ok(loader.build().map_err(EdenFsError::ConfigurationError)?)
} | event!(
Level::INFO,
"Unable to read configuration, skipped: {:?}",
e
); | random_line_split |
utils.js | "use strict";
// copied from http://www.broofa.com/Tools/Math.uuid.js
var CHARS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'.split('');
exports.uuid = function () {
var chars = CHARS, uuid = new Array(36), rnd=0, r;
for (var i = 0; i < 36; i++) {
if (i==8 || i==13 || i==18 || i==23) {
uuid[i] = '-';
}
else if (i==14) {
uuid[i] = '4';
}
else {
if (rnd <= 0x02) rnd = 0x2000000 + (Math.random()*0x1000000)|0;
r = rnd & 0xf;
rnd = rnd >> 4;
uuid[i] = chars[(i == 19) ? (r & 0x3) | 0x8 : r];
}
}
return uuid.join('');
};
exports.in_array = function (item, array) {
return (array.indexOf(item) != -1);
};
exports.sort_keys = function (obj) {
return Object.keys(obj).sort();
};
exports.uniq = function (arr) {
var out = [];
var o = 0;
for (var i=0,l=arr.length; i < l; i++) {
if (out.length === 0) {
out.push(arr[i]);
}
else if (out[o] != arr[i]) {
out.push(arr[i]);
o++;
}
}
return out;
}
exports.ISODate = function (d) {
function pad(n) {return n<10 ? '0'+n : n}
return d.getUTCFullYear()+'-'
+ pad(d.getUTCMonth()+1)+'-'
+ pad(d.getUTCDate())+'T'
+ pad(d.getUTCHours())+':'
+ pad(d.getUTCMinutes())+':'
+ pad(d.getUTCSeconds())+'Z'
}
var _daynames = ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat'];
var _monnames = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'];
function _pad (num, n, p) {
var s = '' + num;
p = p || '0';
while (s.length < n) s = p + s;
return s;
}
exports.pad = _pad;
exports.date_to_str = function (d) {
return _daynames[d.getDay()] + ', ' + _pad(d.getDate(),2) + ' ' +
_monnames[d.getMonth()] + ' ' + d.getFullYear() + ' ' +
_pad(d.getHours(),2) + ':' + _pad(d.getMinutes(),2) + ':' + _pad(d.getSeconds(),2) +
' ' + d.toString().match(/\sGMT([+-]\d+)/)[1];
}
exports.decode_qp = function (line) {
line = line.replace(/\r\n/g,"\n").replace(/[ \t]+\r?\n/g,"\n");
if (! /=/.test(line)) {
// this may be a pointless optimisation...
return new Buffer(line);
}
line = line.replace(/=\n/mg, '');
var buf = new Buffer(line.length);
var pos = 0;
for (var i=0,l=line.length; i < l; i++) {
if (line[i] === '=' &&
/=[0-9a-fA-F]{2}/.test(line[i] + line[i+1] + line[i+2])) {
i++;
buf[pos] = parseInt(line[i] + line[i+1], 16);
i++;
}
else {
buf[pos] = line.charCodeAt(i);
}
pos++;
}
return buf.slice(0, pos);
}
function _char_to_qp (ch) {
return "=" + _pad(ch.charCodeAt(0).toString(16).toUpperCase(), 2);
}
// Shameless attempt to copy from Perl's MIME::QuotedPrint::Perl code.
exports.encode_qp = function (str) {
str = str.replace(/([^\ \t\n!"#\$%&'()*+,\-.\/0-9:;<>?\@A-Z\[\\\]^_`a-z{|}~])/g, function (orig, p1) {
return _char_to_qp(p1);
}).replace(/([ \t]+)$/gm, function (orig, p1) {
return p1.split('').map(_char_to_qp).join('');
});
// Now shorten lines to 76 chars, but don't break =XX encodes.
// Method: iterate over to char 73.
// If char 74, 75 or 76 is = we need to break before the =.
// Otherwise break at 76.
var cur_length = 0;
var out = '';
for (var i=0; i<str.length; i++) {
if (str[i] === '\n') {
out += '\n';
cur_length = 0;
continue;
}
cur_length++;
if (cur_length <= 73) {
out += str[i];
}
else if (cur_length > 73 && cur_length < 76) {
if (str[i] === '=') {
out += '=\n=';
cur_length = 1;
}
else {
out += str[i];
}
}
else {
// Otherwise got to char 76
// Don't insert '=\n' if end of string or next char is already \n:
if ((i === (str.length - 1)) || (str[i+1] === '\n')) {
out += str[i];
}
else {
out += '=\n' + str[i];
cur_length = 1;
}
}
}
return out;
}
var versions = process.version.split('.'),
version = Number(versions[0].substring(1)),
subversion = Number(versions[1]);
exports.existsSync = require((version > 0 || subversion >= 8) ? 'fs' : 'path').existsSync;
exports.indexOfLF = function (buf, maxlength) {
for (var i=0; i<buf.length; i++) {
if (maxlength && (i === maxlength)) break; | } | if (buf[i] === 0x0a) return i;
}
return -1; | random_line_split |
utils.js | "use strict";
// copied from http://www.broofa.com/Tools/Math.uuid.js
var CHARS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'.split('');
exports.uuid = function () {
var chars = CHARS, uuid = new Array(36), rnd=0, r;
for (var i = 0; i < 36; i++) {
if (i==8 || i==13 || i==18 || i==23) {
uuid[i] = '-';
}
else if (i==14) {
uuid[i] = '4';
}
else {
if (rnd <= 0x02) rnd = 0x2000000 + (Math.random()*0x1000000)|0;
r = rnd & 0xf;
rnd = rnd >> 4;
uuid[i] = chars[(i == 19) ? (r & 0x3) | 0x8 : r];
}
}
return uuid.join('');
};
exports.in_array = function (item, array) {
return (array.indexOf(item) != -1);
};
exports.sort_keys = function (obj) {
return Object.keys(obj).sort();
};
exports.uniq = function (arr) {
var out = [];
var o = 0;
for (var i=0,l=arr.length; i < l; i++) {
if (out.length === 0) {
out.push(arr[i]);
}
else if (out[o] != arr[i]) {
out.push(arr[i]);
o++;
}
}
return out;
}
exports.ISODate = function (d) {
function pad(n) {return n<10 ? '0'+n : n}
return d.getUTCFullYear()+'-'
+ pad(d.getUTCMonth()+1)+'-'
+ pad(d.getUTCDate())+'T'
+ pad(d.getUTCHours())+':'
+ pad(d.getUTCMinutes())+':'
+ pad(d.getUTCSeconds())+'Z'
}
var _daynames = ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat'];
var _monnames = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'];
function | (num, n, p) {
var s = '' + num;
p = p || '0';
while (s.length < n) s = p + s;
return s;
}
exports.pad = _pad;
exports.date_to_str = function (d) {
return _daynames[d.getDay()] + ', ' + _pad(d.getDate(),2) + ' ' +
_monnames[d.getMonth()] + ' ' + d.getFullYear() + ' ' +
_pad(d.getHours(),2) + ':' + _pad(d.getMinutes(),2) + ':' + _pad(d.getSeconds(),2) +
' ' + d.toString().match(/\sGMT([+-]\d+)/)[1];
}
exports.decode_qp = function (line) {
line = line.replace(/\r\n/g,"\n").replace(/[ \t]+\r?\n/g,"\n");
if (! /=/.test(line)) {
// this may be a pointless optimisation...
return new Buffer(line);
}
line = line.replace(/=\n/mg, '');
var buf = new Buffer(line.length);
var pos = 0;
for (var i=0,l=line.length; i < l; i++) {
if (line[i] === '=' &&
/=[0-9a-fA-F]{2}/.test(line[i] + line[i+1] + line[i+2])) {
i++;
buf[pos] = parseInt(line[i] + line[i+1], 16);
i++;
}
else {
buf[pos] = line.charCodeAt(i);
}
pos++;
}
return buf.slice(0, pos);
}
function _char_to_qp (ch) {
return "=" + _pad(ch.charCodeAt(0).toString(16).toUpperCase(), 2);
}
// Shameless attempt to copy from Perl's MIME::QuotedPrint::Perl code.
exports.encode_qp = function (str) {
str = str.replace(/([^\ \t\n!"#\$%&'()*+,\-.\/0-9:;<>?\@A-Z\[\\\]^_`a-z{|}~])/g, function (orig, p1) {
return _char_to_qp(p1);
}).replace(/([ \t]+)$/gm, function (orig, p1) {
return p1.split('').map(_char_to_qp).join('');
});
// Now shorten lines to 76 chars, but don't break =XX encodes.
// Method: iterate over to char 73.
// If char 74, 75 or 76 is = we need to break before the =.
// Otherwise break at 76.
var cur_length = 0;
var out = '';
for (var i=0; i<str.length; i++) {
if (str[i] === '\n') {
out += '\n';
cur_length = 0;
continue;
}
cur_length++;
if (cur_length <= 73) {
out += str[i];
}
else if (cur_length > 73 && cur_length < 76) {
if (str[i] === '=') {
out += '=\n=';
cur_length = 1;
}
else {
out += str[i];
}
}
else {
// Otherwise got to char 76
// Don't insert '=\n' if end of string or next char is already \n:
if ((i === (str.length - 1)) || (str[i+1] === '\n')) {
out += str[i];
}
else {
out += '=\n' + str[i];
cur_length = 1;
}
}
}
return out;
}
var versions = process.version.split('.'),
version = Number(versions[0].substring(1)),
subversion = Number(versions[1]);
exports.existsSync = require((version > 0 || subversion >= 8) ? 'fs' : 'path').existsSync;
exports.indexOfLF = function (buf, maxlength) {
for (var i=0; i<buf.length; i++) {
if (maxlength && (i === maxlength)) break;
if (buf[i] === 0x0a) return i;
}
return -1;
}
| _pad | identifier_name |
utils.js | "use strict";
// copied from http://www.broofa.com/Tools/Math.uuid.js
var CHARS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'.split('');
exports.uuid = function () {
var chars = CHARS, uuid = new Array(36), rnd=0, r;
for (var i = 0; i < 36; i++) {
if (i==8 || i==13 || i==18 || i==23) {
uuid[i] = '-';
}
else if (i==14) {
uuid[i] = '4';
}
else {
if (rnd <= 0x02) rnd = 0x2000000 + (Math.random()*0x1000000)|0;
r = rnd & 0xf;
rnd = rnd >> 4;
uuid[i] = chars[(i == 19) ? (r & 0x3) | 0x8 : r];
}
}
return uuid.join('');
};
exports.in_array = function (item, array) {
return (array.indexOf(item) != -1);
};
exports.sort_keys = function (obj) {
return Object.keys(obj).sort();
};
exports.uniq = function (arr) {
var out = [];
var o = 0;
for (var i=0,l=arr.length; i < l; i++) {
if (out.length === 0) {
out.push(arr[i]);
}
else if (out[o] != arr[i]) {
out.push(arr[i]);
o++;
}
}
return out;
}
exports.ISODate = function (d) {
function pad(n) |
return d.getUTCFullYear()+'-'
+ pad(d.getUTCMonth()+1)+'-'
+ pad(d.getUTCDate())+'T'
+ pad(d.getUTCHours())+':'
+ pad(d.getUTCMinutes())+':'
+ pad(d.getUTCSeconds())+'Z'
}
var _daynames = ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat'];
var _monnames = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'];
function _pad (num, n, p) {
var s = '' + num;
p = p || '0';
while (s.length < n) s = p + s;
return s;
}
exports.pad = _pad;
exports.date_to_str = function (d) {
return _daynames[d.getDay()] + ', ' + _pad(d.getDate(),2) + ' ' +
_monnames[d.getMonth()] + ' ' + d.getFullYear() + ' ' +
_pad(d.getHours(),2) + ':' + _pad(d.getMinutes(),2) + ':' + _pad(d.getSeconds(),2) +
' ' + d.toString().match(/\sGMT([+-]\d+)/)[1];
}
exports.decode_qp = function (line) {
line = line.replace(/\r\n/g,"\n").replace(/[ \t]+\r?\n/g,"\n");
if (! /=/.test(line)) {
// this may be a pointless optimisation...
return new Buffer(line);
}
line = line.replace(/=\n/mg, '');
var buf = new Buffer(line.length);
var pos = 0;
for (var i=0,l=line.length; i < l; i++) {
if (line[i] === '=' &&
/=[0-9a-fA-F]{2}/.test(line[i] + line[i+1] + line[i+2])) {
i++;
buf[pos] = parseInt(line[i] + line[i+1], 16);
i++;
}
else {
buf[pos] = line.charCodeAt(i);
}
pos++;
}
return buf.slice(0, pos);
}
function _char_to_qp (ch) {
return "=" + _pad(ch.charCodeAt(0).toString(16).toUpperCase(), 2);
}
// Shameless attempt to copy from Perl's MIME::QuotedPrint::Perl code.
exports.encode_qp = function (str) {
str = str.replace(/([^\ \t\n!"#\$%&'()*+,\-.\/0-9:;<>?\@A-Z\[\\\]^_`a-z{|}~])/g, function (orig, p1) {
return _char_to_qp(p1);
}).replace(/([ \t]+)$/gm, function (orig, p1) {
return p1.split('').map(_char_to_qp).join('');
});
// Now shorten lines to 76 chars, but don't break =XX encodes.
// Method: iterate over to char 73.
// If char 74, 75 or 76 is = we need to break before the =.
// Otherwise break at 76.
var cur_length = 0;
var out = '';
for (var i=0; i<str.length; i++) {
if (str[i] === '\n') {
out += '\n';
cur_length = 0;
continue;
}
cur_length++;
if (cur_length <= 73) {
out += str[i];
}
else if (cur_length > 73 && cur_length < 76) {
if (str[i] === '=') {
out += '=\n=';
cur_length = 1;
}
else {
out += str[i];
}
}
else {
// Otherwise got to char 76
// Don't insert '=\n' if end of string or next char is already \n:
if ((i === (str.length - 1)) || (str[i+1] === '\n')) {
out += str[i];
}
else {
out += '=\n' + str[i];
cur_length = 1;
}
}
}
return out;
}
var versions = process.version.split('.'),
version = Number(versions[0].substring(1)),
subversion = Number(versions[1]);
exports.existsSync = require((version > 0 || subversion >= 8) ? 'fs' : 'path').existsSync;
exports.indexOfLF = function (buf, maxlength) {
for (var i=0; i<buf.length; i++) {
if (maxlength && (i === maxlength)) break;
if (buf[i] === 0x0a) return i;
}
return -1;
}
| {return n<10 ? '0'+n : n} | identifier_body |
pollutionController.py | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
from bs4 import BeautifulSoup
from urllib.request import urlopen
def | ():
html = urlopen("http://www.aqhi.gov.hk/en/aqhi/past-24-hours-aqhi45fd.html?stationid=80")
soup = BeautifulSoup(html, "lxml")
return soup
def getLatestAQHI(dataTable):
aqhiTable = dataTable.findAll('tr')[1].findAll('td')
aqhi = {}
aqhi['dateTime'] = aqhiTable[0].text
aqhi['index'] = aqhiTable[1].text
return aqhi
def getRawAQICN():
source = urlopen("http://aqicn.org/?city=HongKong/Central/Western&widgetscript&lang=en&size=xsmall&id=56d839cf2ad376.29520771")
source = source.read().decode('utf-8')
return source
def getLatestAQICN(source):
aqi = source.split("Air Pollution.")[1]
aqi = aqi.split("title")[1]
aqi = aqi.split("</div>")[0]
aqi = aqi.split(">")[1]
aqits = source.split("Updated on ")[1].strip()
aqits = aqits.split("<")[0]
aqhiData = {}
aqhiData['index'] = aqi
aqhiData['dateTime'] = aqits
return aqhiData
def getPollutionData():
soupAQHI = getSoupAQHI()
dataTableAQHI = soupAQHI.find('table', {'id' : 'dd_stnh24_table'})
aqhi = getLatestAQHI(dataTableAQHI)
rawAQICN = getRawAQICN()
aqicn = getLatestAQICN(rawAQICN)
data = {}
data['AQHI'] = aqhi['index']
data['AQHITS'] = aqhi['dateTime']
data['AQICN'] = aqicn['index']
data['AQICNTS'] = aqicn['dateTime']
return data
def testModule():
data = getPollutionData()
print(data['AQHI'] + " " + data['AQHITS'] + " " + data['AQICN'] + " " + data['AQICNTS']) | getSoupAQHI | identifier_name |
pollutionController.py | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
from bs4 import BeautifulSoup
from urllib.request import urlopen
def getSoupAQHI():
html = urlopen("http://www.aqhi.gov.hk/en/aqhi/past-24-hours-aqhi45fd.html?stationid=80")
soup = BeautifulSoup(html, "lxml")
return soup
def getLatestAQHI(dataTable):
aqhiTable = dataTable.findAll('tr')[1].findAll('td')
aqhi = {}
aqhi['dateTime'] = aqhiTable[0].text
aqhi['index'] = aqhiTable[1].text
return aqhi
def getRawAQICN():
source = urlopen("http://aqicn.org/?city=HongKong/Central/Western&widgetscript&lang=en&size=xsmall&id=56d839cf2ad376.29520771")
source = source.read().decode('utf-8')
return source
def getLatestAQICN(source):
|
def getPollutionData():
soupAQHI = getSoupAQHI()
dataTableAQHI = soupAQHI.find('table', {'id' : 'dd_stnh24_table'})
aqhi = getLatestAQHI(dataTableAQHI)
rawAQICN = getRawAQICN()
aqicn = getLatestAQICN(rawAQICN)
data = {}
data['AQHI'] = aqhi['index']
data['AQHITS'] = aqhi['dateTime']
data['AQICN'] = aqicn['index']
data['AQICNTS'] = aqicn['dateTime']
return data
def testModule():
data = getPollutionData()
print(data['AQHI'] + " " + data['AQHITS'] + " " + data['AQICN'] + " " + data['AQICNTS']) | aqi = source.split("Air Pollution.")[1]
aqi = aqi.split("title")[1]
aqi = aqi.split("</div>")[0]
aqi = aqi.split(">")[1]
aqits = source.split("Updated on ")[1].strip()
aqits = aqits.split("<")[0]
aqhiData = {}
aqhiData['index'] = aqi
aqhiData['dateTime'] = aqits
return aqhiData | identifier_body |
pollutionController.py | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
from bs4 import BeautifulSoup
from urllib.request import urlopen
def getSoupAQHI():
html = urlopen("http://www.aqhi.gov.hk/en/aqhi/past-24-hours-aqhi45fd.html?stationid=80")
soup = BeautifulSoup(html, "lxml")
return soup
def getLatestAQHI(dataTable):
aqhiTable = dataTable.findAll('tr')[1].findAll('td')
aqhi = {}
aqhi['dateTime'] = aqhiTable[0].text
aqhi['index'] = aqhiTable[1].text
return aqhi
def getRawAQICN():
source = urlopen("http://aqicn.org/?city=HongKong/Central/Western&widgetscript&lang=en&size=xsmall&id=56d839cf2ad376.29520771")
source = source.read().decode('utf-8')
return source
def getLatestAQICN(source):
aqi = source.split("Air Pollution.")[1]
aqi = aqi.split("title")[1]
aqi = aqi.split("</div>")[0]
aqi = aqi.split(">")[1] | aqits = source.split("Updated on ")[1].strip()
aqits = aqits.split("<")[0]
aqhiData = {}
aqhiData['index'] = aqi
aqhiData['dateTime'] = aqits
return aqhiData
def getPollutionData():
soupAQHI = getSoupAQHI()
dataTableAQHI = soupAQHI.find('table', {'id' : 'dd_stnh24_table'})
aqhi = getLatestAQHI(dataTableAQHI)
rawAQICN = getRawAQICN()
aqicn = getLatestAQICN(rawAQICN)
data = {}
data['AQHI'] = aqhi['index']
data['AQHITS'] = aqhi['dateTime']
data['AQICN'] = aqicn['index']
data['AQICNTS'] = aqicn['dateTime']
return data
def testModule():
data = getPollutionData()
print(data['AQHI'] + " " + data['AQHITS'] + " " + data['AQICN'] + " " + data['AQICNTS']) | random_line_split | |
nodes.py | # GeneaCrystal Copyright (C) 2012-2013
# Christian Jaeckel, <christian.doe@gmail.com>
# Frederic Kerber, <fkerber@gmail.com>
# Pascal Lessel, <maverickthe6@gmail.com>
# Michael Mauderer, <mail@michaelmauderer.de>
#
# GeneaCrystal is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GeneaCrystal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GeneaCrystal. If not, see <http://www.gnu.org/licenses/>.
import libavg as avg
import pymunk
from geneacrystal import util, physic
from geneacrystal.alphaKeyboard import AlphaKeyboard
from geneacrystal.highscore import Highscore
class ItemImageNode(avg.DivNode):
def __init__(self, href, size, *args, **kwargs):
avg.DivNode.__init__(self, *args, **kwargs)
self.pivot = 0, 0
self.opacity = 1
self.sensitive = False
imageNode = avg.ImageNode(parent=self,
opacity=1,
href=href,
size=size,
)
imageNode.pos = util.vectorMult(size, -0.5)
self.image = imageNode
if __debug__:
self.elementoutlinecolor = "FFFFFF"
@property
def size(self):
return self.image.size
@size.setter
def size(self, value):
self.image.size = value
util.centerNodeOnPosition(self.image, (0,0))
def setEffect(self, node):
self.image.setEffect(node)
def setEventHandler(self, *args, **kwargs):
return self.image.setEventHandler(*args, **kwargs)
class TouchPointNode(avg.CircleNode):
def delete(self):
self.unlink(True)
def __init__(self, space, theme=None, owner=None, *args, **kwargs):
avg.CircleNode.__init__(self, *args, **kwargs)
if theme is None:
from geneacrystal import themes
self._theme = themes.DefaultTheme
self.owner = owner
self._body = physic.TouchPointBody(self)
self._body.position = tuple(self.pos)
self.filltexhref = self._theme.getStaticImage("TouchPointNode")
#self.fillcolor = "00FF00"
self.strokewidth = 0
self.shape = pymunk.Circle(self._body, self.r, (0, 0))
self.shape.elasticity = 1
self.shape.collision_type = physic.TouchPointCollisionType
space.add(self._body, self.shape)
if __debug__:
print "Created ", self
def __str__(self, *args, **kwargs):
formatString = "TouchPointNode(pos={tp.pos}, owner={tp.owner})"
return formatString.format(tp=self)
class ShieldNode(avg.LineNode):
def __init__(self, space, owner=None, *args, **kwargs):
avg.LineNode.__init__(self, *args, **kwargs)
self._body = physic.ShieldBody(self)
self.owner = owner
self._body.position = tuple(self.pos1)
from geneacrystal import themes
self.texhref = themes.DefaultTheme.getStaticImage("Wall")
self.fillopacity = 0
self.opacity = 1
space.add(self._body, self._body.shape)
self._body.sleep()
def update(self, pos1, pos2):
self.pos1 = pos1
self.pos2 = pos2
self._body.position = tuple(self.pos1)
self._body.shape.b = util.transformVector((pos2.x - pos1.x, pos2.y - pos1.y))
def delete(self):
pass
class HighscoreEntryNode(avg.DivNode):
def __init__(self, mode, score, allScores, callback=None, theme=None, *args, **kwargs):
avg.DivNode.__init__(self, *args, **kwargs)
if theme is None:
from geneacrystal import themes
theme = themes.DefaultTheme
bgPath = theme.getStaticImage("keySymbol")
backPath = theme.getStaticImage("backspaceSymbol")
enterPath = theme.getStaticImage("enterSymbol")
shiftPath = theme.getStaticImage("shiftSymbol")
emptyPath = theme.getStaticImage("spaceSymbol")
highscore = Highscore(mode)
myScores = []
myScores.extend(allScores)
myScores.extend(highscore.scores)
myScores.sort(reverse=True, key=lambda val: int(val))
if len(myScores) < util.MAX_HIGHSCORE_LENGTH or score > int(myScores[9]) or score == int(myScores[9]) and not score in highscore.scores:
self.__value = ""
def onKeyDown(keyCode):
if len(self.__value) < 20:
|
def onBack():
self.__value = self.__value[0:-1]
self.__edit.text = self.__value
def onEnter():
if not self.__value == "":
highscore.addEntry(self.__value, score)
if callback is not None:
callback(self.__value)
self._keyboard.cleanup()
self._keyboard.unlink(True)
self._keyboard = None
self.__edit.unlink(True)
self.__edit = None
self.unlink(True)
self.__edit = avg.WordsNode(size=(self.size.x, self.size.y // 8),
parent=self, fontsize=self.size.y // 8,
alignment="center")
self.__edit.pos = (self.size.x // 2, 0)
self._keyboard = AlphaKeyboard(bgPath, backPath, enterPath, shiftPath,
emptyPath , onKeyDown=onKeyDown,
onBack=onBack, onEnter=onEnter,
size=(self.size.x, self.size.y // 10 * 8),
pos=(0, self.size.y // 5),
parent=self)
else:
if callback is not None:
callback("")
self.unlink(True)
class ItemImageLayeredNode(avg.DivNode):
def __init__(self, layers,size, *args, **kwargs):
avg.DivNode.__init__(self, *args, **kwargs)
self.pivot = 0, 0
self.opacity = 1
self.sensitive = False
childPos = util.vectorMult(size, -0.5)
self._layer = []
self._topImage = None
for image in layers:
node = avg.ImageNode(parent=self,
opacity=1,
href=image,
size=size,
pos=childPos,
sensitive=False
)
self._layer.append(node)
node.sensitive=True
self._topImage = self._layer[-1]
def removeLayer(self, index):
node = self._layer[index]
node.unlink(True)
self._layer.remove(node)
if node == self._topImage:
self._topImage = self._layer[-1]
@property
def size(self):
return self._layer[0].size
def setEventHandler(self, *args, **kwargs):
return self._topImage.setEventHandler(*args, **kwargs)
def setEffect(self, *args, **kwargs):
for node in self._layer:
node.setEffect(*args, **kwargs)
class OverlayNode(avg.DivNode):
def __init__(self, theme=None, *args, **kwargs):
if theme is None:
from geneacrystal import themes
theme = themes.StandardTheme()
super(OverlayNode, self).__init__(*args, **kwargs)
self._background=theme.getNode("ExitButton")(size=self.size, parent=self, opacity=1);
class StaticOverlayNode(OverlayNode):
def __init__(self, finishCB, *args, **kwargs):
super(StaticOverlayNode, self).__init__(*args, **kwargs)
self.__anim = None
self.__initalRadius=self._background.size.x*0.08
self.__circle = avg.CircleNode(pos=(self._background.size.x//2, self._background.size.y//2), r=self.__initalRadius, fillcolor="000000", fillopacity=1.0, parent=self)
self.__finishCB = finishCB
self.setEventHandler(avg.CURSORDOWN,avg.TOUCH | avg.MOUSE, lambda x: self.__start())
self.setEventHandler(avg.CURSOROUT,avg.TOUCH | avg.MOUSE, lambda x: self.__abort())
self.setEventHandler(avg.CURSORUP,avg.TOUCH | avg.MOUSE, lambda x: self.__abort())
def __start(self):
self.__circle.sensitive=False
self.__aborted = True
if self.__anim is not None:
self.__anim.abort()
self.__anim = avg.LinearAnim(self.__circle,"r", 2000, self.__circle.r, self._background.size.y//2, False, None, self.__finish)
self.__aborted = False
self.__anim.start()
def __abort(self):
if self.__anim is not None:
self.__aborted = True
self.__anim.abort()
self.__anim = None
self.__circle.r = self.__initalRadius
self.__circle.sensitive=True
def __finish(self):
if not self.__aborted:
self.__anim = None
self.__finishCB()
self.__circle.r = self.__initalRadius
self.__circle.sensitive=True
| self.__value += keyCode
self.__edit.text += keyCode | conditional_block |
nodes.py | # GeneaCrystal Copyright (C) 2012-2013
# Christian Jaeckel, <christian.doe@gmail.com>
# Frederic Kerber, <fkerber@gmail.com>
# Pascal Lessel, <maverickthe6@gmail.com>
# Michael Mauderer, <mail@michaelmauderer.de>
#
# GeneaCrystal is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GeneaCrystal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GeneaCrystal. If not, see <http://www.gnu.org/licenses/>.
import libavg as avg
import pymunk
from geneacrystal import util, physic
from geneacrystal.alphaKeyboard import AlphaKeyboard
from geneacrystal.highscore import Highscore
class ItemImageNode(avg.DivNode):
def __init__(self, href, size, *args, **kwargs):
avg.DivNode.__init__(self, *args, **kwargs)
self.pivot = 0, 0
self.opacity = 1
self.sensitive = False
imageNode = avg.ImageNode(parent=self,
opacity=1,
href=href,
size=size,
)
imageNode.pos = util.vectorMult(size, -0.5)
self.image = imageNode
if __debug__:
self.elementoutlinecolor = "FFFFFF"
@property
def size(self):
return self.image.size
@size.setter
def size(self, value):
self.image.size = value
util.centerNodeOnPosition(self.image, (0,0))
def setEffect(self, node):
self.image.setEffect(node)
def setEventHandler(self, *args, **kwargs):
return self.image.setEventHandler(*args, **kwargs)
class TouchPointNode(avg.CircleNode):
def delete(self):
self.unlink(True)
def __init__(self, space, theme=None, owner=None, *args, **kwargs):
avg.CircleNode.__init__(self, *args, **kwargs)
if theme is None:
from geneacrystal import themes
self._theme = themes.DefaultTheme
self.owner = owner
self._body = physic.TouchPointBody(self)
self._body.position = tuple(self.pos)
self.filltexhref = self._theme.getStaticImage("TouchPointNode")
#self.fillcolor = "00FF00"
self.strokewidth = 0
self.shape = pymunk.Circle(self._body, self.r, (0, 0))
self.shape.elasticity = 1
self.shape.collision_type = physic.TouchPointCollisionType
space.add(self._body, self.shape)
if __debug__:
print "Created ", self
def __str__(self, *args, **kwargs):
formatString = "TouchPointNode(pos={tp.pos}, owner={tp.owner})"
return formatString.format(tp=self)
class ShieldNode(avg.LineNode):
def __init__(self, space, owner=None, *args, **kwargs):
avg.LineNode.__init__(self, *args, **kwargs)
self._body = physic.ShieldBody(self)
self.owner = owner
self._body.position = tuple(self.pos1)
from geneacrystal import themes
self.texhref = themes.DefaultTheme.getStaticImage("Wall")
self.fillopacity = 0
self.opacity = 1
space.add(self._body, self._body.shape)
self._body.sleep()
def update(self, pos1, pos2):
self.pos1 = pos1
self.pos2 = pos2
self._body.position = tuple(self.pos1)
self._body.shape.b = util.transformVector((pos2.x - pos1.x, pos2.y - pos1.y))
def | (self):
pass
class HighscoreEntryNode(avg.DivNode):
def __init__(self, mode, score, allScores, callback=None, theme=None, *args, **kwargs):
avg.DivNode.__init__(self, *args, **kwargs)
if theme is None:
from geneacrystal import themes
theme = themes.DefaultTheme
bgPath = theme.getStaticImage("keySymbol")
backPath = theme.getStaticImage("backspaceSymbol")
enterPath = theme.getStaticImage("enterSymbol")
shiftPath = theme.getStaticImage("shiftSymbol")
emptyPath = theme.getStaticImage("spaceSymbol")
highscore = Highscore(mode)
myScores = []
myScores.extend(allScores)
myScores.extend(highscore.scores)
myScores.sort(reverse=True, key=lambda val: int(val))
if len(myScores) < util.MAX_HIGHSCORE_LENGTH or score > int(myScores[9]) or score == int(myScores[9]) and not score in highscore.scores:
self.__value = ""
def onKeyDown(keyCode):
if len(self.__value) < 20:
self.__value += keyCode
self.__edit.text += keyCode
def onBack():
self.__value = self.__value[0:-1]
self.__edit.text = self.__value
def onEnter():
if not self.__value == "":
highscore.addEntry(self.__value, score)
if callback is not None:
callback(self.__value)
self._keyboard.cleanup()
self._keyboard.unlink(True)
self._keyboard = None
self.__edit.unlink(True)
self.__edit = None
self.unlink(True)
self.__edit = avg.WordsNode(size=(self.size.x, self.size.y // 8),
parent=self, fontsize=self.size.y // 8,
alignment="center")
self.__edit.pos = (self.size.x // 2, 0)
self._keyboard = AlphaKeyboard(bgPath, backPath, enterPath, shiftPath,
emptyPath , onKeyDown=onKeyDown,
onBack=onBack, onEnter=onEnter,
size=(self.size.x, self.size.y // 10 * 8),
pos=(0, self.size.y // 5),
parent=self)
else:
if callback is not None:
callback("")
self.unlink(True)
class ItemImageLayeredNode(avg.DivNode):
def __init__(self, layers,size, *args, **kwargs):
avg.DivNode.__init__(self, *args, **kwargs)
self.pivot = 0, 0
self.opacity = 1
self.sensitive = False
childPos = util.vectorMult(size, -0.5)
self._layer = []
self._topImage = None
for image in layers:
node = avg.ImageNode(parent=self,
opacity=1,
href=image,
size=size,
pos=childPos,
sensitive=False
)
self._layer.append(node)
node.sensitive=True
self._topImage = self._layer[-1]
def removeLayer(self, index):
node = self._layer[index]
node.unlink(True)
self._layer.remove(node)
if node == self._topImage:
self._topImage = self._layer[-1]
@property
def size(self):
return self._layer[0].size
def setEventHandler(self, *args, **kwargs):
return self._topImage.setEventHandler(*args, **kwargs)
def setEffect(self, *args, **kwargs):
for node in self._layer:
node.setEffect(*args, **kwargs)
class OverlayNode(avg.DivNode):
def __init__(self, theme=None, *args, **kwargs):
if theme is None:
from geneacrystal import themes
theme = themes.StandardTheme()
super(OverlayNode, self).__init__(*args, **kwargs)
self._background=theme.getNode("ExitButton")(size=self.size, parent=self, opacity=1);
class StaticOverlayNode(OverlayNode):
def __init__(self, finishCB, *args, **kwargs):
super(StaticOverlayNode, self).__init__(*args, **kwargs)
self.__anim = None
self.__initalRadius=self._background.size.x*0.08
self.__circle = avg.CircleNode(pos=(self._background.size.x//2, self._background.size.y//2), r=self.__initalRadius, fillcolor="000000", fillopacity=1.0, parent=self)
self.__finishCB = finishCB
self.setEventHandler(avg.CURSORDOWN,avg.TOUCH | avg.MOUSE, lambda x: self.__start())
self.setEventHandler(avg.CURSOROUT,avg.TOUCH | avg.MOUSE, lambda x: self.__abort())
self.setEventHandler(avg.CURSORUP,avg.TOUCH | avg.MOUSE, lambda x: self.__abort())
def __start(self):
self.__circle.sensitive=False
self.__aborted = True
if self.__anim is not None:
self.__anim.abort()
self.__anim = avg.LinearAnim(self.__circle,"r", 2000, self.__circle.r, self._background.size.y//2, False, None, self.__finish)
self.__aborted = False
self.__anim.start()
def __abort(self):
if self.__anim is not None:
self.__aborted = True
self.__anim.abort()
self.__anim = None
self.__circle.r = self.__initalRadius
self.__circle.sensitive=True
def __finish(self):
if not self.__aborted:
self.__anim = None
self.__finishCB()
self.__circle.r = self.__initalRadius
self.__circle.sensitive=True
| delete | identifier_name |
nodes.py | # GeneaCrystal Copyright (C) 2012-2013
# Christian Jaeckel, <christian.doe@gmail.com>
# Frederic Kerber, <fkerber@gmail.com>
# Pascal Lessel, <maverickthe6@gmail.com>
# Michael Mauderer, <mail@michaelmauderer.de>
#
# GeneaCrystal is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GeneaCrystal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GeneaCrystal. If not, see <http://www.gnu.org/licenses/>.
import libavg as avg
import pymunk
from geneacrystal import util, physic
from geneacrystal.alphaKeyboard import AlphaKeyboard
from geneacrystal.highscore import Highscore
class ItemImageNode(avg.DivNode):
def __init__(self, href, size, *args, **kwargs):
avg.DivNode.__init__(self, *args, **kwargs)
self.pivot = 0, 0
self.opacity = 1
self.sensitive = False
imageNode = avg.ImageNode(parent=self,
opacity=1,
href=href,
size=size,
)
imageNode.pos = util.vectorMult(size, -0.5)
self.image = imageNode
if __debug__:
self.elementoutlinecolor = "FFFFFF"
@property
def size(self):
return self.image.size
@size.setter
def size(self, value):
self.image.size = value
util.centerNodeOnPosition(self.image, (0,0))
def setEffect(self, node):
self.image.setEffect(node)
def setEventHandler(self, *args, **kwargs):
return self.image.setEventHandler(*args, **kwargs)
class TouchPointNode(avg.CircleNode):
def delete(self):
self.unlink(True)
def __init__(self, space, theme=None, owner=None, *args, **kwargs):
avg.CircleNode.__init__(self, *args, **kwargs)
if theme is None:
from geneacrystal import themes
self._theme = themes.DefaultTheme
self.owner = owner
self._body = physic.TouchPointBody(self)
self._body.position = tuple(self.pos)
self.filltexhref = self._theme.getStaticImage("TouchPointNode")
#self.fillcolor = "00FF00"
self.strokewidth = 0
self.shape = pymunk.Circle(self._body, self.r, (0, 0))
self.shape.elasticity = 1
self.shape.collision_type = physic.TouchPointCollisionType
space.add(self._body, self.shape)
if __debug__:
print "Created ", self
def __str__(self, *args, **kwargs):
formatString = "TouchPointNode(pos={tp.pos}, owner={tp.owner})"
return formatString.format(tp=self)
class ShieldNode(avg.LineNode):
def __init__(self, space, owner=None, *args, **kwargs):
avg.LineNode.__init__(self, *args, **kwargs)
self._body = physic.ShieldBody(self)
self.owner = owner
self._body.position = tuple(self.pos1)
from geneacrystal import themes
self.texhref = themes.DefaultTheme.getStaticImage("Wall")
self.fillopacity = 0
self.opacity = 1
space.add(self._body, self._body.shape)
self._body.sleep()
def update(self, pos1, pos2):
self.pos1 = pos1
self.pos2 = pos2
self._body.position = tuple(self.pos1)
self._body.shape.b = util.transformVector((pos2.x - pos1.x, pos2.y - pos1.y))
def delete(self):
pass
class HighscoreEntryNode(avg.DivNode):
def __init__(self, mode, score, allScores, callback=None, theme=None, *args, **kwargs):
avg.DivNode.__init__(self, *args, **kwargs)
if theme is None:
from geneacrystal import themes
theme = themes.DefaultTheme
bgPath = theme.getStaticImage("keySymbol")
backPath = theme.getStaticImage("backspaceSymbol")
enterPath = theme.getStaticImage("enterSymbol")
shiftPath = theme.getStaticImage("shiftSymbol")
emptyPath = theme.getStaticImage("spaceSymbol")
highscore = Highscore(mode)
myScores = []
myScores.extend(allScores)
myScores.extend(highscore.scores)
myScores.sort(reverse=True, key=lambda val: int(val))
if len(myScores) < util.MAX_HIGHSCORE_LENGTH or score > int(myScores[9]) or score == int(myScores[9]) and not score in highscore.scores:
self.__value = ""
def onKeyDown(keyCode):
if len(self.__value) < 20:
self.__value += keyCode
self.__edit.text += keyCode
def onBack():
self.__value = self.__value[0:-1]
self.__edit.text = self.__value
def onEnter():
if not self.__value == "":
highscore.addEntry(self.__value, score)
if callback is not None:
callback(self.__value)
self._keyboard.cleanup()
self._keyboard.unlink(True)
self._keyboard = None
self.__edit.unlink(True)
self.__edit = None
self.unlink(True)
self.__edit = avg.WordsNode(size=(self.size.x, self.size.y // 8),
parent=self, fontsize=self.size.y // 8,
alignment="center")
self.__edit.pos = (self.size.x // 2, 0)
self._keyboard = AlphaKeyboard(bgPath, backPath, enterPath, shiftPath,
emptyPath , onKeyDown=onKeyDown,
onBack=onBack, onEnter=onEnter,
size=(self.size.x, self.size.y // 10 * 8),
pos=(0, self.size.y // 5),
parent=self)
else:
if callback is not None:
callback("")
self.unlink(True)
class ItemImageLayeredNode(avg.DivNode):
def __init__(self, layers,size, *args, **kwargs):
avg.DivNode.__init__(self, *args, **kwargs)
self.pivot = 0, 0
self.opacity = 1
self.sensitive = False
childPos = util.vectorMult(size, -0.5)
self._layer = []
self._topImage = None
for image in layers:
node = avg.ImageNode(parent=self,
opacity=1,
href=image,
size=size,
pos=childPos,
sensitive=False
)
self._layer.append(node)
node.sensitive=True
self._topImage = self._layer[-1]
def removeLayer(self, index):
node = self._layer[index]
node.unlink(True)
self._layer.remove(node)
if node == self._topImage:
self._topImage = self._layer[-1]
@property
def size(self):
return self._layer[0].size
def setEventHandler(self, *args, **kwargs):
return self._topImage.setEventHandler(*args, **kwargs)
def setEffect(self, *args, **kwargs):
for node in self._layer:
node.setEffect(*args, **kwargs)
class OverlayNode(avg.DivNode):
def __init__(self, theme=None, *args, **kwargs):
if theme is None:
from geneacrystal import themes
theme = themes.StandardTheme()
super(OverlayNode, self).__init__(*args, **kwargs)
self._background=theme.getNode("ExitButton")(size=self.size, parent=self, opacity=1);
class StaticOverlayNode(OverlayNode):
| def __init__(self, finishCB, *args, **kwargs):
super(StaticOverlayNode, self).__init__(*args, **kwargs)
self.__anim = None
self.__initalRadius=self._background.size.x*0.08
self.__circle = avg.CircleNode(pos=(self._background.size.x//2, self._background.size.y//2), r=self.__initalRadius, fillcolor="000000", fillopacity=1.0, parent=self)
self.__finishCB = finishCB
self.setEventHandler(avg.CURSORDOWN,avg.TOUCH | avg.MOUSE, lambda x: self.__start())
self.setEventHandler(avg.CURSOROUT,avg.TOUCH | avg.MOUSE, lambda x: self.__abort())
self.setEventHandler(avg.CURSORUP,avg.TOUCH | avg.MOUSE, lambda x: self.__abort())
def __start(self):
self.__circle.sensitive=False
self.__aborted = True
if self.__anim is not None:
self.__anim.abort()
self.__anim = avg.LinearAnim(self.__circle,"r", 2000, self.__circle.r, self._background.size.y//2, False, None, self.__finish)
self.__aborted = False
self.__anim.start()
def __abort(self):
if self.__anim is not None:
self.__aborted = True
self.__anim.abort()
self.__anim = None
self.__circle.r = self.__initalRadius
self.__circle.sensitive=True
def __finish(self):
if not self.__aborted:
self.__anim = None
self.__finishCB()
self.__circle.r = self.__initalRadius
self.__circle.sensitive=True | identifier_body | |
nodes.py | # GeneaCrystal Copyright (C) 2012-2013
# Christian Jaeckel, <christian.doe@gmail.com>
# Frederic Kerber, <fkerber@gmail.com>
# Pascal Lessel, <maverickthe6@gmail.com>
# Michael Mauderer, <mail@michaelmauderer.de>
| # GeneaCrystal is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GeneaCrystal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GeneaCrystal. If not, see <http://www.gnu.org/licenses/>.
import libavg as avg
import pymunk
from geneacrystal import util, physic
from geneacrystal.alphaKeyboard import AlphaKeyboard
from geneacrystal.highscore import Highscore
class ItemImageNode(avg.DivNode):
def __init__(self, href, size, *args, **kwargs):
avg.DivNode.__init__(self, *args, **kwargs)
self.pivot = 0, 0
self.opacity = 1
self.sensitive = False
imageNode = avg.ImageNode(parent=self,
opacity=1,
href=href,
size=size,
)
imageNode.pos = util.vectorMult(size, -0.5)
self.image = imageNode
if __debug__:
self.elementoutlinecolor = "FFFFFF"
@property
def size(self):
return self.image.size
@size.setter
def size(self, value):
self.image.size = value
util.centerNodeOnPosition(self.image, (0,0))
def setEffect(self, node):
self.image.setEffect(node)
def setEventHandler(self, *args, **kwargs):
return self.image.setEventHandler(*args, **kwargs)
class TouchPointNode(avg.CircleNode):
def delete(self):
self.unlink(True)
def __init__(self, space, theme=None, owner=None, *args, **kwargs):
avg.CircleNode.__init__(self, *args, **kwargs)
if theme is None:
from geneacrystal import themes
self._theme = themes.DefaultTheme
self.owner = owner
self._body = physic.TouchPointBody(self)
self._body.position = tuple(self.pos)
self.filltexhref = self._theme.getStaticImage("TouchPointNode")
#self.fillcolor = "00FF00"
self.strokewidth = 0
self.shape = pymunk.Circle(self._body, self.r, (0, 0))
self.shape.elasticity = 1
self.shape.collision_type = physic.TouchPointCollisionType
space.add(self._body, self.shape)
if __debug__:
print "Created ", self
def __str__(self, *args, **kwargs):
formatString = "TouchPointNode(pos={tp.pos}, owner={tp.owner})"
return formatString.format(tp=self)
class ShieldNode(avg.LineNode):
def __init__(self, space, owner=None, *args, **kwargs):
avg.LineNode.__init__(self, *args, **kwargs)
self._body = physic.ShieldBody(self)
self.owner = owner
self._body.position = tuple(self.pos1)
from geneacrystal import themes
self.texhref = themes.DefaultTheme.getStaticImage("Wall")
self.fillopacity = 0
self.opacity = 1
space.add(self._body, self._body.shape)
self._body.sleep()
def update(self, pos1, pos2):
self.pos1 = pos1
self.pos2 = pos2
self._body.position = tuple(self.pos1)
self._body.shape.b = util.transformVector((pos2.x - pos1.x, pos2.y - pos1.y))
def delete(self):
pass
class HighscoreEntryNode(avg.DivNode):
def __init__(self, mode, score, allScores, callback=None, theme=None, *args, **kwargs):
avg.DivNode.__init__(self, *args, **kwargs)
if theme is None:
from geneacrystal import themes
theme = themes.DefaultTheme
bgPath = theme.getStaticImage("keySymbol")
backPath = theme.getStaticImage("backspaceSymbol")
enterPath = theme.getStaticImage("enterSymbol")
shiftPath = theme.getStaticImage("shiftSymbol")
emptyPath = theme.getStaticImage("spaceSymbol")
highscore = Highscore(mode)
myScores = []
myScores.extend(allScores)
myScores.extend(highscore.scores)
myScores.sort(reverse=True, key=lambda val: int(val))
if len(myScores) < util.MAX_HIGHSCORE_LENGTH or score > int(myScores[9]) or score == int(myScores[9]) and not score in highscore.scores:
self.__value = ""
def onKeyDown(keyCode):
if len(self.__value) < 20:
self.__value += keyCode
self.__edit.text += keyCode
def onBack():
self.__value = self.__value[0:-1]
self.__edit.text = self.__value
def onEnter():
if not self.__value == "":
highscore.addEntry(self.__value, score)
if callback is not None:
callback(self.__value)
self._keyboard.cleanup()
self._keyboard.unlink(True)
self._keyboard = None
self.__edit.unlink(True)
self.__edit = None
self.unlink(True)
self.__edit = avg.WordsNode(size=(self.size.x, self.size.y // 8),
parent=self, fontsize=self.size.y // 8,
alignment="center")
self.__edit.pos = (self.size.x // 2, 0)
self._keyboard = AlphaKeyboard(bgPath, backPath, enterPath, shiftPath,
emptyPath , onKeyDown=onKeyDown,
onBack=onBack, onEnter=onEnter,
size=(self.size.x, self.size.y // 10 * 8),
pos=(0, self.size.y // 5),
parent=self)
else:
if callback is not None:
callback("")
self.unlink(True)
class ItemImageLayeredNode(avg.DivNode):
def __init__(self, layers,size, *args, **kwargs):
avg.DivNode.__init__(self, *args, **kwargs)
self.pivot = 0, 0
self.opacity = 1
self.sensitive = False
childPos = util.vectorMult(size, -0.5)
self._layer = []
self._topImage = None
for image in layers:
node = avg.ImageNode(parent=self,
opacity=1,
href=image,
size=size,
pos=childPos,
sensitive=False
)
self._layer.append(node)
node.sensitive=True
self._topImage = self._layer[-1]
def removeLayer(self, index):
node = self._layer[index]
node.unlink(True)
self._layer.remove(node)
if node == self._topImage:
self._topImage = self._layer[-1]
@property
def size(self):
return self._layer[0].size
def setEventHandler(self, *args, **kwargs):
return self._topImage.setEventHandler(*args, **kwargs)
def setEffect(self, *args, **kwargs):
for node in self._layer:
node.setEffect(*args, **kwargs)
class OverlayNode(avg.DivNode):
def __init__(self, theme=None, *args, **kwargs):
if theme is None:
from geneacrystal import themes
theme = themes.StandardTheme()
super(OverlayNode, self).__init__(*args, **kwargs)
self._background=theme.getNode("ExitButton")(size=self.size, parent=self, opacity=1);
class StaticOverlayNode(OverlayNode):
def __init__(self, finishCB, *args, **kwargs):
super(StaticOverlayNode, self).__init__(*args, **kwargs)
self.__anim = None
self.__initalRadius=self._background.size.x*0.08
self.__circle = avg.CircleNode(pos=(self._background.size.x//2, self._background.size.y//2), r=self.__initalRadius, fillcolor="000000", fillopacity=1.0, parent=self)
self.__finishCB = finishCB
self.setEventHandler(avg.CURSORDOWN,avg.TOUCH | avg.MOUSE, lambda x: self.__start())
self.setEventHandler(avg.CURSOROUT,avg.TOUCH | avg.MOUSE, lambda x: self.__abort())
self.setEventHandler(avg.CURSORUP,avg.TOUCH | avg.MOUSE, lambda x: self.__abort())
def __start(self):
self.__circle.sensitive=False
self.__aborted = True
if self.__anim is not None:
self.__anim.abort()
self.__anim = avg.LinearAnim(self.__circle,"r", 2000, self.__circle.r, self._background.size.y//2, False, None, self.__finish)
self.__aborted = False
self.__anim.start()
def __abort(self):
if self.__anim is not None:
self.__aborted = True
self.__anim.abort()
self.__anim = None
self.__circle.r = self.__initalRadius
self.__circle.sensitive=True
def __finish(self):
if not self.__aborted:
self.__anim = None
self.__finishCB()
self.__circle.r = self.__initalRadius
self.__circle.sensitive=True | #
| random_line_split |
jquery.freakload.js | /*
* freakLoad - v0.1.0
* Preloader JS library
* https://github.com/nofreakz/freakLoad
*
* Copyright (c) 2014
* MIT License
*/
;(function($, win, doc) {
'use strict';
/*
* DEFAULTS
*/
var _plugin = 'freakLoad',
itemTpl = {
node: undefined,
url: '',
data: {},
priority: 0.5,
tags: [],
async: true,
progress: 0,
onStart: $.noop,
onComplete: $.noop,
xhr: null
},
groupTpl = {
items: [],
loaded: 0
},
defaults = {
async: true,
groupOrder: [],
onStart: $.noop,
onComplete: $.noop,
item: {
onStart: $.noop,
onComplete: $.noop
},
group: {
onStart: $.noop,
onComplete: $.noop
}
};
/*
* CONSTRUCTOR
*/
function | (items, options) {
this.opt = $.extend(true, {}, defaults, options);
this.init(items);
}
Plugin.prototype = {
/*
* DATA
*/
data: {
// use queue as object to possibility multiple queues
queue: {
loaded: 0,
items: [],
groups: {} // @groupTpl
},
requested: {
items: [],
groups: []
},
progress: 0
},
/*
* PUBLIC
*/
init: function(items) {
this._addItems(items);
this.opt.onStart();
this.load();
},
load: function() {
var group;
// if has a groupOrder it'll load group by group listed
// groups that weren't listed will load as regular item
if (this.opt.groupOrder.length) {
for (group in this.opt.groupOrder) {
this.loadGroup(this.opt.groupOrder[group]);
}
}
this._request();
},
loadGroup: function(groupName) {
if (this._isGroup(groupName)) {
this._request(groupName);
} else {
console.warn('No items was found to be loaded on the group "' + groupName + '".');
}
},
// new items and a flag to indicate if have to load the new items
add: function(items, load) {
this._addItems(items);
// load by default
if (load === false ? load : true) {
this.load();
}
},
abort: function(item) {
if (item) {
_abortItem(item);
return;
}
for (var l = this.queue.loaded; l < this.queue.length; l++) {
_abortItem(this.queue.items[l]);
}
},
getData: function() {
return this.data;
},
/*
* PRIVATE
*/
// add items to general and specific queue
_addItems: function(items) {
var queue = this.data.queue,
item = {},
tag = '',
i = 0,
t = 0;
items = this._normalizeItems(items);
this._setPriority(items);
for (i in items) {
item = items[i];
queue.items[queue.items.length] = item;
// create the new queues based on tags
if (item.tags.length) {
for (t in item.tags) {
tag = item.tags[t];
this._createGroup(tag);
// add item to specific queue
queue.groups[tag].items[queue.groups[tag].items.length] = item;
}
}
}
},
_abortItem: function(item) {
item.xhr.abort();
item.progress = 0;
},
_normalizeItems: function(items) {
var item = {},
i = 0;
// if argument 'items' isn't a Array set as
if (!(items instanceof Array)) {
items = [items];
}
// normalize with the template setted up previously
for (i in items) {
item = items[i];
if (typeof item !== 'object') {
item = { url: item };
}
items[i] = item = $.extend({}, itemTpl, item);
item.priority = parseFloat(item.priority) || 0.1;
}
return items;
},
_setPriority: function(items) {
// organize items by priority
items.sort(function(a, b) {
return b.priority - a.priority;
});
return items;
},
_createGroup: function(tag) {
// if the new tag still doesn't have a queue create one
if (!this._isGroup(tag)) {
// create a new group on groups
this.data.queue.groups[tag] = $.extend(true, {}, groupTpl);
}
},
_isGroup: function(groupName) {
return this.data.queue.groups.hasOwnProperty(groupName) ? true : false;
},
// the _request will organize the queues that will be send to _load
_request: function(groupName) {
// group only will be setted if the function recive a groupName
// otherwise group is going to the default queue of items
var data = this.data,
group = data.queue,
i = 0,
len = 0;
// set group as lodaing and load the specific queue
if (groupName) {
group = data.queue.groups[groupName];
}
// load items
// stop loops when the number of loaded items is equal the size of the general queue
for (len = group.items.length; i < len && data.requested.items.length < data.queue.items.length; i++) {
this._load(group.items[i], group, groupName);
}
},
_load: function(item, group, groupName) {
var self = this,
data = this.data;
// check if the item has been loaded
// avoid multiple ajax calls for loaded items
if (data.requested.items.indexOf(item.url) === -1) {
// add to array of loaded items
data.requested.items[data.requested.items.length] = item.url + ($.isPlainObject(item.data) ? '' : '?' + $.param(item.data));
// flag as loading and fire the starting callback
(item.onStart !== $.noop ? item.onStart : this.opt.item.onStart)(item.node);
if (groupName && data.requested.groups.indexOf(groupName) === -1) {
data.requested.groups[data.requested.groups.length] = groupName;
this.opt.group.onStart(groupName);
}
// set xhr
item.xhr = $.ajax({
xhr: function() {
var _xhr = new win.XMLHttpRequest();
_xhr.addEventListener('progress', function(evt) {
if (evt.lengthComputable) {
item.progress = evt.loaded / evt.total;
}
}, false);
return _xhr;
},
url: item.url,
data: item.data,
async: item.async ? item.async : self.opt.async
})
.success(function(response) {
if (groupName) {
group.loaded++;
}
data.queue.loaded++;
// the data will only be passed to callback if the item is a text file
(item.onComplete !== $.noop ? item.onComplete : self.opt.item.onComplete)((/\.(xml|json|script|html|text)$/).test(item.url) ? response : '', item.node);
// runs group callabck when complete all items
if (groupName && (group.loaded === group.items.length || data.queue.loaded === data.queue.items.length)) {
self.opt.group.onComplete(groupName);
}
// runs the final complete callabck when complete all items
if (data.queue.loaded === data.queue.items.length) {
self.opt.onComplete();
}
// clean the xhr
item.xhr = 'complete';
})
.fail(function(jqXHR) {
item.xhr = 'fail';
throw jqXHR.responseText;
});
}
},
_updateProgress: function() {}
};
/*
* GLOBOL API
*/
$[_plugin] = function(fn, options) {
var args = arguments,
data = $.data(doc, _plugin),
method = data && data[fn] ? data[fn] : false;
// force to pass a method or items to plugin load
if (!args.length) {
throw 'The jquery plugin ' + _plugin + ' is not able to run whitout arguments or array of items to load.';
// if it still doesn't have been instanced, do that
} else if (!data) {
// fn here is the new items
$.data(doc, _plugin, new Plugin(fn, options));
// check if data is a instance of the Plugin and fire the specific method
// or simply add the new items to the loading
} else if (data instanceof Plugin) {
if (typeof method === 'function') {
return method.apply(data, Array.prototype.slice.call(args, 1));
} else {
$[_plugin]('add', fn);
}
// finally if the method doesn't exist or is a private method show a console error
} else if (!method || (typeof fn === 'string' && fn.charAt(0) === '_')) {
throw 'Method ' + fn + ' does not exist on jQuery.' + _plugin;
}
};
$.fn[_plugin] = function(itemOptions, generalOptions) {
var items = $.map(this, function(item) {
var dataset = JSON.parse(JSON.stringify(item.dataset)),
tags = dataset.tags;
dataset.tags = tags ? tags.replace(/\s+/g, '').split(',') : [];
return $.extend({node: item}, dataset, itemOptions);
});
$[_plugin](items, generalOptions);
};
})(jQuery, window, document); | Plugin | identifier_name |
jquery.freakload.js | /*
* freakLoad - v0.1.0
* Preloader JS library
* https://github.com/nofreakz/freakLoad
*
* Copyright (c) 2014
* MIT License
*/
;(function($, win, doc) {
'use strict';
/*
* DEFAULTS
*/
var _plugin = 'freakLoad',
itemTpl = {
node: undefined,
url: '',
data: {},
priority: 0.5,
tags: [],
async: true,
progress: 0,
onStart: $.noop,
onComplete: $.noop,
xhr: null
},
groupTpl = {
items: [],
loaded: 0
},
defaults = {
async: true,
groupOrder: [],
onStart: $.noop,
onComplete: $.noop,
item: {
onStart: $.noop,
onComplete: $.noop
},
group: {
onStart: $.noop,
onComplete: $.noop
}
};
/*
* CONSTRUCTOR
*/
function Plugin(items, options) |
Plugin.prototype = {
/*
* DATA
*/
data: {
// use queue as object to possibility multiple queues
queue: {
loaded: 0,
items: [],
groups: {} // @groupTpl
},
requested: {
items: [],
groups: []
},
progress: 0
},
/*
* PUBLIC
*/
init: function(items) {
this._addItems(items);
this.opt.onStart();
this.load();
},
load: function() {
var group;
// if has a groupOrder it'll load group by group listed
// groups that weren't listed will load as regular item
if (this.opt.groupOrder.length) {
for (group in this.opt.groupOrder) {
this.loadGroup(this.opt.groupOrder[group]);
}
}
this._request();
},
loadGroup: function(groupName) {
if (this._isGroup(groupName)) {
this._request(groupName);
} else {
console.warn('No items was found to be loaded on the group "' + groupName + '".');
}
},
// new items and a flag to indicate if have to load the new items
add: function(items, load) {
this._addItems(items);
// load by default
if (load === false ? load : true) {
this.load();
}
},
abort: function(item) {
if (item) {
_abortItem(item);
return;
}
for (var l = this.queue.loaded; l < this.queue.length; l++) {
_abortItem(this.queue.items[l]);
}
},
getData: function() {
return this.data;
},
/*
* PRIVATE
*/
// add items to general and specific queue
_addItems: function(items) {
var queue = this.data.queue,
item = {},
tag = '',
i = 0,
t = 0;
items = this._normalizeItems(items);
this._setPriority(items);
for (i in items) {
item = items[i];
queue.items[queue.items.length] = item;
// create the new queues based on tags
if (item.tags.length) {
for (t in item.tags) {
tag = item.tags[t];
this._createGroup(tag);
// add item to specific queue
queue.groups[tag].items[queue.groups[tag].items.length] = item;
}
}
}
},
_abortItem: function(item) {
item.xhr.abort();
item.progress = 0;
},
_normalizeItems: function(items) {
var item = {},
i = 0;
// if argument 'items' isn't a Array set as
if (!(items instanceof Array)) {
items = [items];
}
// normalize with the template setted up previously
for (i in items) {
item = items[i];
if (typeof item !== 'object') {
item = { url: item };
}
items[i] = item = $.extend({}, itemTpl, item);
item.priority = parseFloat(item.priority) || 0.1;
}
return items;
},
_setPriority: function(items) {
// organize items by priority
items.sort(function(a, b) {
return b.priority - a.priority;
});
return items;
},
_createGroup: function(tag) {
// if the new tag still doesn't have a queue create one
if (!this._isGroup(tag)) {
// create a new group on groups
this.data.queue.groups[tag] = $.extend(true, {}, groupTpl);
}
},
_isGroup: function(groupName) {
return this.data.queue.groups.hasOwnProperty(groupName) ? true : false;
},
// the _request will organize the queues that will be send to _load
_request: function(groupName) {
// group only will be setted if the function recive a groupName
// otherwise group is going to the default queue of items
var data = this.data,
group = data.queue,
i = 0,
len = 0;
// set group as lodaing and load the specific queue
if (groupName) {
group = data.queue.groups[groupName];
}
// load items
// stop loops when the number of loaded items is equal the size of the general queue
for (len = group.items.length; i < len && data.requested.items.length < data.queue.items.length; i++) {
this._load(group.items[i], group, groupName);
}
},
_load: function(item, group, groupName) {
var self = this,
data = this.data;
// check if the item has been loaded
// avoid multiple ajax calls for loaded items
if (data.requested.items.indexOf(item.url) === -1) {
// add to array of loaded items
data.requested.items[data.requested.items.length] = item.url + ($.isPlainObject(item.data) ? '' : '?' + $.param(item.data));
// flag as loading and fire the starting callback
(item.onStart !== $.noop ? item.onStart : this.opt.item.onStart)(item.node);
if (groupName && data.requested.groups.indexOf(groupName) === -1) {
data.requested.groups[data.requested.groups.length] = groupName;
this.opt.group.onStart(groupName);
}
// set xhr
item.xhr = $.ajax({
xhr: function() {
var _xhr = new win.XMLHttpRequest();
_xhr.addEventListener('progress', function(evt) {
if (evt.lengthComputable) {
item.progress = evt.loaded / evt.total;
}
}, false);
return _xhr;
},
url: item.url,
data: item.data,
async: item.async ? item.async : self.opt.async
})
.success(function(response) {
if (groupName) {
group.loaded++;
}
data.queue.loaded++;
// the data will only be passed to callback if the item is a text file
(item.onComplete !== $.noop ? item.onComplete : self.opt.item.onComplete)((/\.(xml|json|script|html|text)$/).test(item.url) ? response : '', item.node);
// runs group callabck when complete all items
if (groupName && (group.loaded === group.items.length || data.queue.loaded === data.queue.items.length)) {
self.opt.group.onComplete(groupName);
}
// runs the final complete callabck when complete all items
if (data.queue.loaded === data.queue.items.length) {
self.opt.onComplete();
}
// clean the xhr
item.xhr = 'complete';
})
.fail(function(jqXHR) {
item.xhr = 'fail';
throw jqXHR.responseText;
});
}
},
_updateProgress: function() {}
};
/*
* GLOBOL API
*/
$[_plugin] = function(fn, options) {
var args = arguments,
data = $.data(doc, _plugin),
method = data && data[fn] ? data[fn] : false;
// force to pass a method or items to plugin load
if (!args.length) {
throw 'The jquery plugin ' + _plugin + ' is not able to run whitout arguments or array of items to load.';
// if it still doesn't have been instanced, do that
} else if (!data) {
// fn here is the new items
$.data(doc, _plugin, new Plugin(fn, options));
// check if data is a instance of the Plugin and fire the specific method
// or simply add the new items to the loading
} else if (data instanceof Plugin) {
if (typeof method === 'function') {
return method.apply(data, Array.prototype.slice.call(args, 1));
} else {
$[_plugin]('add', fn);
}
// finally if the method doesn't exist or is a private method show a console error
} else if (!method || (typeof fn === 'string' && fn.charAt(0) === '_')) {
throw 'Method ' + fn + ' does not exist on jQuery.' + _plugin;
}
};
$.fn[_plugin] = function(itemOptions, generalOptions) {
var items = $.map(this, function(item) {
var dataset = JSON.parse(JSON.stringify(item.dataset)),
tags = dataset.tags;
dataset.tags = tags ? tags.replace(/\s+/g, '').split(',') : [];
return $.extend({node: item}, dataset, itemOptions);
});
$[_plugin](items, generalOptions);
};
})(jQuery, window, document); | {
this.opt = $.extend(true, {}, defaults, options);
this.init(items);
} | identifier_body |
jquery.freakload.js | /*
* freakLoad - v0.1.0
* Preloader JS library
* https://github.com/nofreakz/freakLoad
*
* Copyright (c) 2014
* MIT License
*/
;(function($, win, doc) {
'use strict';
/*
* DEFAULTS
*/
var _plugin = 'freakLoad',
itemTpl = {
node: undefined,
url: '',
data: {},
priority: 0.5,
tags: [],
async: true,
progress: 0,
onStart: $.noop,
onComplete: $.noop,
xhr: null
},
groupTpl = {
items: [],
loaded: 0
},
defaults = {
async: true,
groupOrder: [],
onStart: $.noop,
onComplete: $.noop,
item: {
onStart: $.noop,
onComplete: $.noop
},
group: {
onStart: $.noop,
onComplete: $.noop
}
};
/*
* CONSTRUCTOR
*/
function Plugin(items, options) {
this.opt = $.extend(true, {}, defaults, options);
this.init(items);
}
Plugin.prototype = {
/*
* DATA
*/
data: {
// use queue as object to possibility multiple queues
queue: {
loaded: 0,
items: [],
groups: {} // @groupTpl
},
requested: {
items: [],
groups: []
},
progress: 0
},
/*
* PUBLIC
*/
init: function(items) {
this._addItems(items);
this.opt.onStart();
this.load();
},
load: function() {
var group;
// if has a groupOrder it'll load group by group listed
// groups that weren't listed will load as regular item
if (this.opt.groupOrder.length) {
for (group in this.opt.groupOrder) {
this.loadGroup(this.opt.groupOrder[group]);
}
}
this._request();
},
loadGroup: function(groupName) {
if (this._isGroup(groupName)) {
this._request(groupName);
} else {
console.warn('No items was found to be loaded on the group "' + groupName + '".');
}
},
// new items and a flag to indicate if have to load the new items
add: function(items, load) {
this._addItems(items);
// load by default
if (load === false ? load : true) {
this.load();
}
},
abort: function(item) {
if (item) {
_abortItem(item);
return;
}
for (var l = this.queue.loaded; l < this.queue.length; l++) {
_abortItem(this.queue.items[l]);
}
},
getData: function() {
return this.data;
},
/*
* PRIVATE
*/
// add items to general and specific queue
_addItems: function(items) {
var queue = this.data.queue,
item = {},
tag = '',
i = 0,
t = 0;
items = this._normalizeItems(items);
this._setPriority(items);
for (i in items) {
item = items[i];
queue.items[queue.items.length] = item;
// create the new queues based on tags
if (item.tags.length) {
for (t in item.tags) {
tag = item.tags[t];
this._createGroup(tag);
// add item to specific queue
queue.groups[tag].items[queue.groups[tag].items.length] = item;
}
}
}
},
_abortItem: function(item) {
item.xhr.abort();
item.progress = 0;
},
_normalizeItems: function(items) {
var item = {},
i = 0;
// if argument 'items' isn't a Array set as
if (!(items instanceof Array)) {
items = [items];
}
// normalize with the template setted up previously
for (i in items) {
item = items[i];
if (typeof item !== 'object') {
item = { url: item };
}
items[i] = item = $.extend({}, itemTpl, item);
item.priority = parseFloat(item.priority) || 0.1;
}
return items;
},
_setPriority: function(items) {
// organize items by priority
items.sort(function(a, b) {
return b.priority - a.priority;
});
return items;
},
_createGroup: function(tag) {
// if the new tag still doesn't have a queue create one
if (!this._isGroup(tag)) {
// create a new group on groups
this.data.queue.groups[tag] = $.extend(true, {}, groupTpl);
}
},
_isGroup: function(groupName) {
return this.data.queue.groups.hasOwnProperty(groupName) ? true : false;
},
// the _request will organize the queues that will be send to _load
_request: function(groupName) {
// group only will be setted if the function recive a groupName
// otherwise group is going to the default queue of items
var data = this.data,
group = data.queue,
i = 0,
len = 0;
// set group as lodaing and load the specific queue
if (groupName) {
group = data.queue.groups[groupName];
}
// load items
// stop loops when the number of loaded items is equal the size of the general queue
for (len = group.items.length; i < len && data.requested.items.length < data.queue.items.length; i++) {
this._load(group.items[i], group, groupName);
}
},
_load: function(item, group, groupName) {
var self = this,
data = this.data;
// check if the item has been loaded
// avoid multiple ajax calls for loaded items
if (data.requested.items.indexOf(item.url) === -1) {
// add to array of loaded items
data.requested.items[data.requested.items.length] = item.url + ($.isPlainObject(item.data) ? '' : '?' + $.param(item.data));
// flag as loading and fire the starting callback
(item.onStart !== $.noop ? item.onStart : this.opt.item.onStart)(item.node);
if (groupName && data.requested.groups.indexOf(groupName) === -1) {
data.requested.groups[data.requested.groups.length] = groupName;
this.opt.group.onStart(groupName);
}
// set xhr
item.xhr = $.ajax({
xhr: function() {
var _xhr = new win.XMLHttpRequest();
_xhr.addEventListener('progress', function(evt) {
if (evt.lengthComputable) {
item.progress = evt.loaded / evt.total;
}
}, false);
return _xhr;
},
url: item.url,
data: item.data,
async: item.async ? item.async : self.opt.async
})
.success(function(response) {
if (groupName) {
group.loaded++;
}
data.queue.loaded++;
// the data will only be passed to callback if the item is a text file
(item.onComplete !== $.noop ? item.onComplete : self.opt.item.onComplete)((/\.(xml|json|script|html|text)$/).test(item.url) ? response : '', item.node);
// runs group callabck when complete all items
if (groupName && (group.loaded === group.items.length || data.queue.loaded === data.queue.items.length)) {
self.opt.group.onComplete(groupName);
}
// runs the final complete callabck when complete all items
if (data.queue.loaded === data.queue.items.length) {
self.opt.onComplete();
}
// clean the xhr
item.xhr = 'complete';
})
.fail(function(jqXHR) {
item.xhr = 'fail';
throw jqXHR.responseText;
});
}
},
_updateProgress: function() {}
};
/*
* GLOBOL API
*/
$[_plugin] = function(fn, options) {
var args = arguments,
data = $.data(doc, _plugin),
method = data && data[fn] ? data[fn] : false;
// force to pass a method or items to plugin load
if (!args.length) {
throw 'The jquery plugin ' + _plugin + ' is not able to run whitout arguments or array of items to load.';
// if it still doesn't have been instanced, do that
} else if (!data) {
// fn here is the new items
$.data(doc, _plugin, new Plugin(fn, options));
// check if data is a instance of the Plugin and fire the specific method
// or simply add the new items to the loading
} else if (data instanceof Plugin) {
if (typeof method === 'function') {
return method.apply(data, Array.prototype.slice.call(args, 1));
} else {
$[_plugin]('add', fn);
}
// finally if the method doesn't exist or is a private method show a console error
} else if (!method || (typeof fn === 'string' && fn.charAt(0) === '_')) {
throw 'Method ' + fn + ' does not exist on jQuery.' + _plugin;
}
};
$.fn[_plugin] = function(itemOptions, generalOptions) {
var items = $.map(this, function(item) {
var dataset = JSON.parse(JSON.stringify(item.dataset)),
tags = dataset.tags;
dataset.tags = tags ? tags.replace(/\s+/g, '').split(',') : []; | });
$[_plugin](items, generalOptions);
};
})(jQuery, window, document); | return $.extend({node: item}, dataset, itemOptions); | random_line_split |
jquery.freakload.js | /*
* freakLoad - v0.1.0
* Preloader JS library
* https://github.com/nofreakz/freakLoad
*
* Copyright (c) 2014
* MIT License
*/
;(function($, win, doc) {
'use strict';
/*
* DEFAULTS
*/
var _plugin = 'freakLoad',
itemTpl = {
node: undefined,
url: '',
data: {},
priority: 0.5,
tags: [],
async: true,
progress: 0,
onStart: $.noop,
onComplete: $.noop,
xhr: null
},
groupTpl = {
items: [],
loaded: 0
},
defaults = {
async: true,
groupOrder: [],
onStart: $.noop,
onComplete: $.noop,
item: {
onStart: $.noop,
onComplete: $.noop
},
group: {
onStart: $.noop,
onComplete: $.noop
}
};
/*
* CONSTRUCTOR
*/
function Plugin(items, options) {
this.opt = $.extend(true, {}, defaults, options);
this.init(items);
}
Plugin.prototype = {
/*
* DATA
*/
data: {
// use queue as object to possibility multiple queues
queue: {
loaded: 0,
items: [],
groups: {} // @groupTpl
},
requested: {
items: [],
groups: []
},
progress: 0
},
/*
* PUBLIC
*/
init: function(items) {
this._addItems(items);
this.opt.onStart();
this.load();
},
load: function() {
var group;
// if has a groupOrder it'll load group by group listed
// groups that weren't listed will load as regular item
if (this.opt.groupOrder.length) {
for (group in this.opt.groupOrder) {
this.loadGroup(this.opt.groupOrder[group]);
}
}
this._request();
},
loadGroup: function(groupName) {
if (this._isGroup(groupName)) {
this._request(groupName);
} else {
console.warn('No items was found to be loaded on the group "' + groupName + '".');
}
},
// new items and a flag to indicate if have to load the new items
add: function(items, load) {
this._addItems(items);
// load by default
if (load === false ? load : true) {
this.load();
}
},
abort: function(item) {
if (item) {
_abortItem(item);
return;
}
for (var l = this.queue.loaded; l < this.queue.length; l++) {
_abortItem(this.queue.items[l]);
}
},
getData: function() {
return this.data;
},
/*
* PRIVATE
*/
// add items to general and specific queue
_addItems: function(items) {
var queue = this.data.queue,
item = {},
tag = '',
i = 0,
t = 0;
items = this._normalizeItems(items);
this._setPriority(items);
for (i in items) {
item = items[i];
queue.items[queue.items.length] = item;
// create the new queues based on tags
if (item.tags.length) {
for (t in item.tags) {
tag = item.tags[t];
this._createGroup(tag);
// add item to specific queue
queue.groups[tag].items[queue.groups[tag].items.length] = item;
}
}
}
},
_abortItem: function(item) {
item.xhr.abort();
item.progress = 0;
},
_normalizeItems: function(items) {
var item = {},
i = 0;
// if argument 'items' isn't a Array set as
if (!(items instanceof Array)) {
items = [items];
}
// normalize with the template setted up previously
for (i in items) {
item = items[i];
if (typeof item !== 'object') {
item = { url: item };
}
items[i] = item = $.extend({}, itemTpl, item);
item.priority = parseFloat(item.priority) || 0.1;
}
return items;
},
_setPriority: function(items) {
// organize items by priority
items.sort(function(a, b) {
return b.priority - a.priority;
});
return items;
},
_createGroup: function(tag) {
// if the new tag still doesn't have a queue create one
if (!this._isGroup(tag)) {
// create a new group on groups
this.data.queue.groups[tag] = $.extend(true, {}, groupTpl);
}
},
_isGroup: function(groupName) {
return this.data.queue.groups.hasOwnProperty(groupName) ? true : false;
},
// the _request will organize the queues that will be send to _load
_request: function(groupName) {
// group only will be setted if the function recive a groupName
// otherwise group is going to the default queue of items
var data = this.data,
group = data.queue,
i = 0,
len = 0;
// set group as lodaing and load the specific queue
if (groupName) {
group = data.queue.groups[groupName];
}
// load items
// stop loops when the number of loaded items is equal the size of the general queue
for (len = group.items.length; i < len && data.requested.items.length < data.queue.items.length; i++) {
this._load(group.items[i], group, groupName);
}
},
_load: function(item, group, groupName) {
var self = this,
data = this.data;
// check if the item has been loaded
// avoid multiple ajax calls for loaded items
if (data.requested.items.indexOf(item.url) === -1) {
// add to array of loaded items
data.requested.items[data.requested.items.length] = item.url + ($.isPlainObject(item.data) ? '' : '?' + $.param(item.data));
// flag as loading and fire the starting callback
(item.onStart !== $.noop ? item.onStart : this.opt.item.onStart)(item.node);
if (groupName && data.requested.groups.indexOf(groupName) === -1) {
data.requested.groups[data.requested.groups.length] = groupName;
this.opt.group.onStart(groupName);
}
// set xhr
item.xhr = $.ajax({
xhr: function() {
var _xhr = new win.XMLHttpRequest();
_xhr.addEventListener('progress', function(evt) {
if (evt.lengthComputable) {
item.progress = evt.loaded / evt.total;
}
}, false);
return _xhr;
},
url: item.url,
data: item.data,
async: item.async ? item.async : self.opt.async
})
.success(function(response) {
if (groupName) {
group.loaded++;
}
data.queue.loaded++;
// the data will only be passed to callback if the item is a text file
(item.onComplete !== $.noop ? item.onComplete : self.opt.item.onComplete)((/\.(xml|json|script|html|text)$/).test(item.url) ? response : '', item.node);
// runs group callabck when complete all items
if (groupName && (group.loaded === group.items.length || data.queue.loaded === data.queue.items.length)) {
self.opt.group.onComplete(groupName);
}
// runs the final complete callabck when complete all items
if (data.queue.loaded === data.queue.items.length) {
self.opt.onComplete();
}
// clean the xhr
item.xhr = 'complete';
})
.fail(function(jqXHR) {
item.xhr = 'fail';
throw jqXHR.responseText;
});
}
},
_updateProgress: function() {}
};
/*
* GLOBOL API
*/
$[_plugin] = function(fn, options) {
var args = arguments,
data = $.data(doc, _plugin),
method = data && data[fn] ? data[fn] : false;
// force to pass a method or items to plugin load
if (!args.length) {
throw 'The jquery plugin ' + _plugin + ' is not able to run whitout arguments or array of items to load.';
// if it still doesn't have been instanced, do that
} else if (!data) {
// fn here is the new items
$.data(doc, _plugin, new Plugin(fn, options));
// check if data is a instance of the Plugin and fire the specific method
// or simply add the new items to the loading
} else if (data instanceof Plugin) {
if (typeof method === 'function') | else {
$[_plugin]('add', fn);
}
// finally if the method doesn't exist or is a private method show a console error
} else if (!method || (typeof fn === 'string' && fn.charAt(0) === '_')) {
throw 'Method ' + fn + ' does not exist on jQuery.' + _plugin;
}
};
$.fn[_plugin] = function(itemOptions, generalOptions) {
var items = $.map(this, function(item) {
var dataset = JSON.parse(JSON.stringify(item.dataset)),
tags = dataset.tags;
dataset.tags = tags ? tags.replace(/\s+/g, '').split(',') : [];
return $.extend({node: item}, dataset, itemOptions);
});
$[_plugin](items, generalOptions);
};
})(jQuery, window, document); | {
return method.apply(data, Array.prototype.slice.call(args, 1));
} | conditional_block |
phylomap.js | function addLoadEvent(func) {
var oldonload = window.onload;
if (typeof window.onload != 'function') {
window.onload = func;
} else {
window.onload = function() {
if (oldonload) {
oldonload();
}
func();
}
}
}
function log(msg) {
setTimeout(function() {
throw new Error(msg);
}, 0);
}
function clearLocations() {
infoWindow.close();
for (i in markers) {
markers[i].setMap(null);
// CRL
delete markerColorIndex[i];
}
markers.length = 0;
markerIndex = {};
for (i in overlays) {
overlays[i].setMap(null);
}
// set index to last item so it will reset on next element
iconIndex = 0;
// clear occurrence compilation list
phylomap.selectedOccurrences = []
//updateTableDisplay(phylomap.selectedOccurrences)
//updateGeoJSDisplay()
}
// Can create serious problems as it doesn't delete markerIndex references!
function clearOneLocation(index) {
var marker = markers[index];
if (marker) {
marker.setMap(null);
}
delete markers[index];
}
function clearOneId(id) {
// if a marker has ID = id
if (typeof markerIndex[id] != "undefined") {
markerIndex[id].forEach(function(d) {
clearOneLocation(d);
});
delete markerIndex[id];
}
}
// returns true if there is at least 1 marker on the map with ID = _id
function markerExists(id) {
if (typeof markerIndex[id] != "undefined" && markerIndex[id].length > 0)
return true;
else
return false;
}
// ---- build node lists to aid traversal, finding locations or matching nodes
function addTaxaToTaxaList(treenode) {
if ('children' in treenode) {
// case for intermediate nodes, continue traversal
for (var i=0;i<treenode.children.length;i++) {
addTaxaToTaxaList(treenode.children[i])
}
} else {
// case for a taxon
phylomap.taxalist.push(treenode)
}
}
// this is a pre-processing function that is called once each time a tree is loaded. It runs throgh
// the tree and builds a list of all nodes, so searching doesn't have to involve recursive searching
// each time. The routine assumes phylomap.currentTree is valid.
function addAllNodesToAllNodeList(treenode) {
phylomap.allnodelist.push(treenode)
if ('children' in treenode) {
// case for intermediate nodes, continue traversal
for (var i=0;i<treenode.children.length;i++) {
addAllNodesToAllNodeList(treenode.children[i])
}
}
}
// this is a pre-processing function that is called once each time a tree is loaded. It runs throgh
// the tree and builds a list of the taxa, so searching doesn't have to involve recursive searching
// each time. The routine assumes phylomap.currentTree is valid.
function processTreeForMapLocations() {
// clear out the previous list if there is one
while (phylomap.taxalist.length > 0) {
phylomap.taxalist.pop()
}
while (phylomap.allnodelist.length > 0) {
phylomap.allnodelist.pop()
}
// start a recursive traversals to build lists of just taxa and of all the nodes for use later
addTaxaToTaxaList(phylomap.currentTree)
addAllNodesToAllNodeList(phylomap.currentTree)
//console.log(phylomap.taxalist)
}
//------ end of build node lists
function searchLocationsNear(searchUrl) {
d3.json(searchUrl, function(json) {
var icon = getIcon();
var bounds = new google.maps.LatLngBounds();
json.result.data.forEach(function(d){
var name = d.name;
var id = d.ID.$oid;
var latlng = new google.maps.LatLng(
parseFloat(d.lat),
parseFloat(d.lng));
var text = 'name: ' + name + '\n';
text = text + "location: " + latlng + "\n"
text = text + "id: " + id;
createMarker(latlng, name, text, id, icon);
bounds.extend(latlng);
});
});
}
// The next few routines below are involved in handling circles drawn on the map by the user.
// These routines check for observation points that lie within the radius of the drawn circle.
// determine if a point (lat,lng) is inside the circle with center clat,clng, and given radius
function pointInCircle(lat,lng,clat,clng,radius) {
var KPiDouble = 3.141592654
var KDegreesToRadiansDouble = 0.01745329 // KPiDouble / 180.0
var earthRadius = 6378137 // in meters
clng = clng * KDegreesToRadiansDouble
clat = clat * KDegreesToRadiansDouble
var cradius = radius / earthRadius
var lng = lng * KDegreesToRadiansDouble
var lat = lat * KDegreesToRadiansDouble
var angle = Math.acos(Math.sin(clat) * Math.sin(lat) + Math.cos(clat) * Math.cos(lat) * Math.cos(lng - clng))
var decision = (angle < cradius)
//if (decision) {
// console.log(lat,lng,clat,clng,angle,cradius,(angle < cradius))
//}
return decision
}
// This call adds markers to the map for all occurrence points within the boundaries of a circle.
function searchLocationsNearCircle(lat,lon,radius) {
var geomap;
var icon = getIcon();
var bounds = new google.maps.LatLngBounds();
// look through all taxa in precompiled list
for (var i=0;i<phylomap.taxalist.length;i++) {
var name = phylomap.taxalist[i].node_data['node name'];
var id = phylomap.taxalist[i].node_data['nodeid'];
if ('loc' in phylomap.taxalist[i].node_data) {
for (var j = phylomap.taxalist[i].node_data['loc'].length - 1; j >= 0; j--) {
var point = phylomap.taxalist[i].node_data['loc'][j]
// if this point is inside the target circle, then add a marker
if (pointInCircle(point[1],point[0],lat,lon,radius)) {
var latlng = new google.maps.LatLng(
parseFloat(point[1]),
parseFloat(point[0]));
// the id field is used internally to phylomap for highlighting, it doesn't
// need to be displayed to the user generally
//var text = "species: " + name + " <br>id: " + id;
var text = "name: " + name + "\n";
// add other attributes to display tag if they are present in the taxon nodes
var attribs = []
if ('attributes' in phylomap.taxalist[i].node_data) {
if (phylomap.taxalist[i].node_data['attributes'].length >= j) {
attribs = phylomap.taxalist[i].node_data['attributes'][j]
// add descriptions to the text markers
for (var attrib in attribs) {
text = text + ' [' + attrib+']:'+attribs[attrib] + '\n'
};
}
}
createMarker(latlng, name, text, id, icon);
addLocationToSelectedList(phylomap.taxalist[i],attribs,point[1],point[0])
bounds.extend(latlng);
var colorToUse = getIconColor()
highlightPath(phylomap.taxalist[i],phylomap.currentTree,colorToUse)
// Candela is too slow to always update automatically
//updateCandelaDisplay()
}
}
}
}
//updateTableDisplay(phylomap.selectedOccurrences)
//geomap = updateGeoJSDisplay()
//geomap.pan({x:0.01,y:0.01})
}
function | (node,attribs,lat,lon) {
//console.log('adding node to selection list. Length now:',phylomap.selectedOccurrences.length)
var record = {}
// if there are extra attributes on this node, copy them over to the trait matrix selection entry
for (attrib in attribs) {
record[attrib] = attribs[attrib]
}
record['lat'] = lat
record['lon'] = lon
record['species'] = node.node_data['node name']
record['renderSize'] = 10
phylomap.selectedOccurrences.push(record)
}
function findNodeInTreeByNodeId(currentTreeNode, nodeID) {
for (var i = phylomap.allnodelist.length - 1; i >= 0; i--) {
if (phylomap.allnodelist[i].node_data['nodeid'] == nodeID) {
return phylomap.allnodelist[i]
}
}
}
function mapSingleNode(treeNode, rootNode,icon,selectionID) {
var bounds = new google.maps.LatLngBounds();
var name = treeNode.node_data['node name'];
//console.log('map single node of id=',id, treeNode)
// if this node has locations, then add them to the map
if ('loc' in treeNode.node_data) {
for (var i = 0; i< treeNode.node_data['loc'].length; i++) {
var thisloc = treeNode.node_data['loc'][i]
var latlng = new google.maps.LatLng(
parseFloat(thisloc[1]),
parseFloat(thisloc[0]));
var text = "name: " + name + "\nid: " + selectionID + '\n';
// add other attributes to display tag if they are present in the taxon node
var attribs = []
if ('attributes' in treeNode.node_data) {
if (treeNode.node_data['attributes'].length >= i) {
attribs = treeNode.node_data['attributes'][i]
// add descriptions to the text markers
for (var attrib in attribs) {
text = text + ' [' + attrib+']:'+attribs[attrib] + '\n'
};
}
}
createMarker(latlng, name, text, selectionID, icon);
bounds.extend(latlng);
addLocationToSelectedList(treeNode,attribs,thisloc[1],thisloc[0])
};
}
}
// recursive traversal of the current tree to uncover all nodes below the passed node and
// map them. The clade root is passed so highlighting can be performed by lighting nodes between
// the clade root and the current node
// *** had to use _children instead of children because of how the accessor algorithm
// in phylotree re-names the attributes. This search might fail sometimes, so testing
// for valid children references under either name
function mapAllNodesInClade(treeNode, cladeRootNode,icon,selectionID) {
//console.log('mapping everything below node:',treeNode.node_data['nodeid'])
// highlight the path on the tree between the rootId and this node if a valid id was passed
if (treeNode != null) {
var id = cladeRootNode.node_data['nodeid'];
var colorToUse = getIconColor(id)
highlightLimitedPath(treeNode,cladeRootNode,colorToUse)
}
if (('_children' in treeNode) && (treeNode._children.length>0)) {
for (var i = treeNode._children.length - 1; i >= 0; i--) {
mapAllNodesInClade(treeNode._children[i], cladeRootNode,icon,selectionID)
}
} else if (('children' in treeNode) && (treeNode.children.length>0)) {
//console.log('mapAllNodesInClade: traversing -children- attribute to follow clade')
for (var i = treeNode.children.length - 1; i >= 0; i--) {
mapAllNodesInClade(treeNode.children[i], cladeRootNode,icon,selectionID)
}
} else {
// we have reached the bottom of the hierarchy, write out the locations to the map
//
mapSingleNode(treeNode, cladeRootNode,icon,selectionID)
}
}
// This search is used only during highlight of entire clade.
// Processing moved from a service to inline javascript when integrated with Arbor/TangeloHub.
// the data element returned in the clicked node is a <g> element, so we need to look inside its
// '__data__' attribute to find the actual tree node record. This
function searchLocationsNearClade(selectedNode, callback) {
var selectedNodeID = selectedNode.node_data['nodeid']
//console.log("highlight clade below node id",selectedNodeID);
// find the node with the id that matches the one the user clicked on
rootOfClade = findNodeInTreeByNodeId(phylomap.currentTree, selectedNodeID)
// traverse tree recursively, adding all locations in all taxa below this. We create the
// icon here so each selection maps to just one type of icon
var icon = getIcon(selectedNodeID);
mapAllNodesInClade(rootOfClade, rootOfClade, icon, selectedNodeID)
//updateTableDisplay(phylomap.selectedOccurrences)
//updateGeoJSDisplay()
// Candela is too slow to always update automatically, user can invoke a render on demand
//updateCandelaDisplay()
// run the callback if one was passed. Use for setting and clearing processing badge
if (callback != null) callback();
}
function getIcon(nodeid) {
if (typeof iconIndex === "undefined" || iconIndex == null || iconIndex == iconList.length) {
iconIndex = 0;
}
return iconList[iconIndex++];
}
// CRL : since iconIndex is incremented immediately after each use, we handle the wraparound case and
// generate a lagging index value. Don't know why we have to adjust this, thought we caught the index
// before it it was incremeneted
function adjustColorIndex(index) {
if (index>0) {
return (index-1)
}
else {
return (iconList.length-1)
}
}
// CRL: lookup the color of the tree highlight by retrieving the color of the corresponding
// map icon and adjusting it (why adjustment needed?)
function getIconColor(id) {
var colorToUse;
// if this has been called before any markers are created for this node, set the indexes appropriately
if (typeof markerColorIndex[id] == "undefined") {
markerColorIndex[id] = iconIndex;
}
colorToUse = treeHighlightColorList[adjustColorIndex(markerColorIndex[id])];
//console.log("getIconColor: id=",id," markerColorIndex=",markerColorIndex[id]," treeColor=",colorToUse)
return colorToUse
}
function createMarker(latlng, name, text, id, icon) {
var html = "<b>" + name + "</b><br>" + text;
// save the color of this icon in a color index array
markerColorIndex[id] = iconIndex;
//console.log('saving marketColorIndex[',id,'] = ',iconIndex)
icon = ((icon != null) ? icon : getIcon());
var marker = new google.maps.Marker({
map: map,
position: latlng,
icon: icon,
title: text
});
google.maps.event.addListener(marker, 'mouseover', function() {
var node = nodeFromId(id);
// CRL: color highlight path according to icon color
highlightParents(node, getIconColor(id), "3px");
textOn(node[0]);
});
google.maps.event.addListener(marker, 'mouseout', function() {
if (typeof clickedOn === "undefined" || !clickedOn) {
var node = nodeFromId(id);
highlightParents(node, "#ccc", "1.5px");
textOff(node[0], true);
}
});
// enable persistent highlight after click ?
google.maps.event.addListener(marker, 'click', function() {
clickedOn = clickedOn == true ? false : true;
var node = nodeFromId(id);
if (clickedOn) {
infoWindow.setContent(html);
infoWindow.open(map, marker);
// CRL: change so highlights match icon colors
highlightParents(node, getIconColor(id), "3px");
//highlightParents(node, "red", "3px");
textOn(node[0]);
} else {
infoWindow.close();
highlightParents(node, "#ccc", "1.5px");
textOff(node[0], true);
}
});
// store in index the id/markerIndex key/value pair, easier to delete later.
if (typeof markerIndex[id] === "object") {
markerIndex[id].push(markers.length);
} else {
markerIndex[id] = [markers.length];
}
markers.push(marker);
}
function highlightParents (node, color, size) {
// ensure our helper functions were included
if (typeof highlightPath != 'undefined' && typeof nodeFromId != 'undefined') {
if (node[0].length > 0) {
highlightPath(node.datum(), color, size);
}
}
}
var map;
var clickedOn;
var mapOptions;
var overlays = [];
var markers = [];
var markerColorIndex = [];
var markerIndex = {};
var infoWindow;
var locationSelect;
var phylotree = {};
var iconIndex = 0;
var iconList = [
'http://maps.google.com/mapfiles/ms/icons/red-dot.png',
'http://maps.google.com/mapfiles/ms/icons/blue-dot.png',
'http://maps.google.com/mapfiles/ms/icons/green-dot.png',
'http://maps.google.com/mapfiles/ms/icons/orange-dot.png',
'http://maps.google.com/mapfiles/ms/icons/pink-dot.png',
'http://maps.google.com/mapfiles/ms/icons/ltblue-dot.png',
'http://maps.google.com/mapfiles/ms/icons/purple-dot.png',
'http://maps.google.com/mapfiles/ms/icons/yellow-dot.png',
'http://maps.google.com/mapfiles/ms/icons/red-pushpin.png',
'http://maps.google.com/mapfiles/ms/icons/blue-pushpin.png',
'http://maps.google.com/mapfiles/ms/icons/grn-pushpin.png',
'http://maps.google.com/mapfiles/ms/icons/pink-pushpin.png',
'http://maps.google.com/mapfiles/ms/icons/ltblu-pushpin.png',
'http://maps.google.com/mapfiles/ms/icons/purple-pushpin.png',
'http://maps.google.com/mapfiles/ms/icons/ylw-pushpin.png',
]
// adding matching hightlight colors to match the icons, so the tree hightlight color matches the icon color
var treeHighlightColorList = [ "red","blue","green","orange","pink","lightblue","purple","yellow","red","blue","green","pink","lightblue","purple","yellow"]
//function load() {
addLoadEvent(function () {
mapOptions = {
//center: new google.maps.LatLng(18.994609, -71.345215),
//zoom: 6,
center: new google.maps.LatLng(9.65, -83.0),
zoom: 5,
mapTypeId: google.maps.MapTypeId.ROADMAP
};
map = new google.maps.Map(d3.select("#map_canvas").node(),
mapOptions);
var drawingManager = new google.maps.drawing.DrawingManager({
drawingMode: google.maps.drawing.OverlayType.MARKER,
drawingControl: true,
drawingControlOptions: {
position: google.maps.ControlPosition.TOP_CENTER,
drawingModes: ['marker', 'circle', 'rectangle']
},
markerOptions: {icon: 'https://developers.google.com/maps/documentation/javascript/examples/full/images/beachflag.png'},
});
drawingManager.setMap(map);
google.maps.event.addListener(drawingManager, 'overlaycomplete', function(event) {
var searchUrl;
// create the restful style url to load data. The named arguments we used to use had to be
// replaced by positional arguments with a later version of Tangelo. Notice the /rect/id/long, etc.
// using up positions in the URL since the names of the arguments didn't work anymore.
if (event.type == google.maps.drawing.OverlayType.CIRCLE) {
searchLocationsNearCircle(
event.overlay.getCenter().lat(),
event.overlay.getCenter().lng(),
event.overlay.getRadius())
} else if (event.type == google.maps.drawing.OverlayType.RECTANGLE) {
searchLocationsNearRect(
event.overlay.bounds.getSouthWest().lng(),
event.overlay.bounds.getSouthWest().lat(),
event.overlay.bounds.getNorthEast().lng(),
event.overlay.bounds.getNorthEast().lat())
}
overlays.push(event.overlay);
});
var mylatlng = new google.maps.LatLng(-25.363882,131.044922);
infoWindow = new google.maps.InfoWindow();
});
function old_updateGeoJSDisplay() {
var spec = {
center: {
x: 0,
y: 0
},
zoom: 4,
layers: [{
renderer: 'vgl',
features: [{
type: 'point',
size: function (d) { return 5; },
position: function (d) { return {x: d.lon, y: d.lat}; },
fill: true,
fillColor: function (d) { return 'red'; },
fillOpacity: function (d) { return 0.5 ; },
stroke: true,
strokeColor: function (d) { return 'black'; },
strokeOpacity: 1,
strokeWidth: 2
}]
}]
};
spec.data = phylomap.selectedOccurrences;
return $('#geojs_map_canvas').geojsMap(spec);
}
//------------------------------------
// this function looks at the first entry in the selection list and makes an entry for each
// attribute so it could be chosen as the one to color the occurrences by
function fillAttributeSelector() {
var sample = phylomap.selectedOccurrences[0]
var list = []
for (attrib in sample) {
list.push(attrib)
}
d3.select("#geojs_attribute").selectAll("option").remove();
d3.select("#geojs_attribute").selectAll("option")
.data(list)
.enter().append("option")
.text(function (d) { return d; });
}
function returnDataAsText(p) {
var text = ''
for (var attrib in p) {
if (attrib.length>0) {
text += attrib+':'+p[attrib]+'\n'
}
}
return text
}
// #bb5a00 - reddish/brows for low values
// #ffffff - white for top values
function geojs_addVectorLayer(points) {
//console.log(points,"\n");
var markers = phylomap.geojsmap.map.createLayer("feature",{"renderer":"vgl"})
var uiLayer = phylomap.geojsmap.map.createLayer('ui', {"renderer":"vgl"});
var tooltip = uiLayer.createWidget('dom', {position: {x: 0, y: 0}});
tooltipElem = $(tooltip.canvas()).attr('id', 'tooltip').addClass('hidden');
// Add a vector layer to the map. Fill the layer with all the points that are currently selected
for (var i = 0; i < points.length; i++) {
//console.log(points[0])
var lng_float = points[i]['lon']
var lat_float = points[i]['lat']
var pointText = returnDataAsText(points[i])
// add a point to the d3 layer
markers.createFeature("point",{selectionAPI:true})
.data([{x:lng_float, y:lat_float,text:pointText }])
.position(function(d) { return {x: d.x, y: d.y};} )
.style("fillColor", function(d) { return {r: 0, g: 1, b: 0};})
.style('strokeColor', 'black')
.geoOn(geo.event.feature.mouseclick, function (evt) {
console.log(evt)
phylomap.geojsmap.map.center({x: evt.data.x, y: evt.data.y});
})
.geoOn(geo.event.feature.mouseover, function (evt) {
this.modified();
markers.map().draw();
//tooltip.position({x: evt.data.x+0.015, y: evt.data.y+0.015});
tooltip.position({x: evt.data.x+0.0015, y: evt.data.y+0.0015});
tooltipElem.text(' '+evt.data.text);
tooltipElem.removeClass('hidden');
})
.geoOn(geo.event.feature.mouseout, function (evt) {
// evt.data.opacity = 1.0;
// evt.data.strokeOpacity = 1.0;
// this.modified();
//markers.map().draw();
tooltipElem.addClass('hidden');
})
.style('fillOpacity', 1.0)
.style('strokeOpacity', 1.0)
}
// save markers layer globally
phylomap.geojsmap.markers = markers
phylomap.geojsmap.map.draw();
}
// this function loops through all of the occurrence points and assigns colors depending on the value
// of the individual occurence point within the range across all the points
function updateOccurrencePointColors() {
var minRed = 160.0/256.0
var minGreen = 80.0/256.0
// find out which attribute has been selected
var attribSelector = d3.select("#geojs_attribute").node();
var selectedAttrib = attribSelector.options[attribSelector.selectedIndex].text;
console.log('selected attrib is:',selectedAttrib)
candela_addGeoDots(phylomap.selectedOccurrences,selectedAttrib)
}
function geojs_resize() {
phylomap.geojsmap.map.resize(0, 0, $('#geojs_map_canvas').width()*0.9, $('#geojs_map_canvas').height());
}
function geojs_addBaseLayer() {
var map;
// clear out the old map
$('#geojs_map_canvas').empty()
map = geo.map({
node: '#geojs_map_canvas',
zoom: 2
});
map.createLayer('osm');
phylomap.geojsmap.map = map;
}
// this function is called as soon as the page is finished loading
function updateGeoJSDisplay() {
phylomap.geojsmap = {}
phylomap.geojsmap.map = null
phylomap.geojsmap.markers = null
phylomap.geojsmap.map = null
phylomap.geojsmap.markers = null
phylomap.geojsmap.previouscolor = null
//Proj4js.defs["EPSG:4326"] = "+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs";
//Proj4js.defs["EPSG:3031"] = "+proj=stere +lat_0=-90 +lat_ts=-71 +lon_0=0 +k=1 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs";
//Proj4js.defs["EPSG:900913"] = "+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null +no_defs";
$(window).resize(geojs_resize);
fillAttributeSelector();
d3.select("#geojs_attribute")
.on("change", updateOccurrencePointColors);
geojs_addBaseLayer();
geojs_resize();
geojs_addVectorLayer(phylomap.selectedOccurrences);
}
// ---------- Candela plotting functions
// this function is called as soon as the page is finished loading
function updateCandelaDisplay() {
fillAttributeSelector();
d3.select("#geojs_attribute")
.on("change", updateOccurrencePointColors);
//geojs_addBaseLayer();
//candela_resize();
candela_addGeoDots(phylomap.selectedOccurrences);
}
function candela_resize() {
phylomap.geojsmap.map.resize(0, 0, $('#geojs_map_canvas').width()*0.7, $('#geojs_map_canvas').height());
}
function candela_addGeoDots(points, attrib='Poll') {
//console.log('geodots:',points,"\n");
//var markers = phylomap.geojsmap.map.createLayer("feature",{"renderer":"vgl"})
//var uiLayer = phylomap.geojsmap.map.createLayer('ui', {"renderer":"vgl"});
//var tooltip = uiLayer.createWidget('dom', {position: {x: 0, y: 0}});
//tooltipElem = $(tooltip.canvas()).attr('id', 'tooltip').addClass('hidden');
// Add a vector layer to the map. Fill the layer with all the points that are currently selected
$("#candela_map_canvas").empty();
var el = document.getElementById('candela_map_canvas')
el.style.width = '900px';
el.style.height = '1000px';
document.body.appendChild(el);
phylomap.candela_map = new candela.components.GeoDots(el, {
zoom: 7,
center: {
longitude: -82.948,
latitude: 9.9725
},
data: points,
width: 1000,
height: 700,
latitude: 'lat',
longitude: 'lon',
size: 'renderSize',
//tileUrl: 'http://c.tiles.wmflabs.org/hillshading/${z}/${x}/${y}.png',
//tileUrl: 'http://tile.stamen.com/terrain/${z}/${x}/${y}.jpg',
tileUrl: 'https://{s}.tile.thunderforest.com/landscape/{z}/{x}/{y}.png?apikey=6548db9e547c4c5eacc2304ee947ebbe',
color: attrib
});
phylomap.candela_map.render();
}
| addLocationToSelectedList | identifier_name |
phylomap.js | function addLoadEvent(func) {
var oldonload = window.onload;
if (typeof window.onload != 'function') {
window.onload = func;
} else {
window.onload = function() {
if (oldonload) {
oldonload();
}
func();
}
}
}
function log(msg) {
setTimeout(function() {
throw new Error(msg);
}, 0);
}
function clearLocations() {
infoWindow.close();
for (i in markers) {
markers[i].setMap(null);
// CRL
delete markerColorIndex[i];
}
markers.length = 0;
markerIndex = {};
for (i in overlays) {
overlays[i].setMap(null);
}
// set index to last item so it will reset on next element
iconIndex = 0;
// clear occurrence compilation list
phylomap.selectedOccurrences = []
//updateTableDisplay(phylomap.selectedOccurrences)
//updateGeoJSDisplay()
}
// Can create serious problems as it doesn't delete markerIndex references!
function clearOneLocation(index) {
var marker = markers[index];
if (marker) {
marker.setMap(null);
}
delete markers[index];
}
function clearOneId(id) {
// if a marker has ID = id
if (typeof markerIndex[id] != "undefined") {
markerIndex[id].forEach(function(d) {
clearOneLocation(d);
});
delete markerIndex[id];
}
}
// returns true if there is at least 1 marker on the map with ID = _id
function markerExists(id) {
if (typeof markerIndex[id] != "undefined" && markerIndex[id].length > 0)
return true;
else
return false;
}
// ---- build node lists to aid traversal, finding locations or matching nodes
function addTaxaToTaxaList(treenode) {
if ('children' in treenode) {
// case for intermediate nodes, continue traversal
for (var i=0;i<treenode.children.length;i++) {
addTaxaToTaxaList(treenode.children[i])
}
} else {
// case for a taxon
phylomap.taxalist.push(treenode)
}
}
// this is a pre-processing function that is called once each time a tree is loaded. It runs throgh
// the tree and builds a list of all nodes, so searching doesn't have to involve recursive searching
// each time. The routine assumes phylomap.currentTree is valid.
function addAllNodesToAllNodeList(treenode) {
phylomap.allnodelist.push(treenode)
if ('children' in treenode) {
// case for intermediate nodes, continue traversal
for (var i=0;i<treenode.children.length;i++) {
addAllNodesToAllNodeList(treenode.children[i])
}
}
}
// this is a pre-processing function that is called once each time a tree is loaded. It runs throgh
// the tree and builds a list of the taxa, so searching doesn't have to involve recursive searching
// each time. The routine assumes phylomap.currentTree is valid.
function processTreeForMapLocations() {
// clear out the previous list if there is one
while (phylomap.taxalist.length > 0) {
phylomap.taxalist.pop()
}
while (phylomap.allnodelist.length > 0) {
phylomap.allnodelist.pop()
}
// start a recursive traversals to build lists of just taxa and of all the nodes for use later
addTaxaToTaxaList(phylomap.currentTree)
addAllNodesToAllNodeList(phylomap.currentTree)
//console.log(phylomap.taxalist)
}
//------ end of build node lists
function searchLocationsNear(searchUrl) {
d3.json(searchUrl, function(json) {
var icon = getIcon();
var bounds = new google.maps.LatLngBounds();
json.result.data.forEach(function(d){
var name = d.name;
var id = d.ID.$oid;
var latlng = new google.maps.LatLng(
parseFloat(d.lat),
parseFloat(d.lng));
var text = 'name: ' + name + '\n';
text = text + "location: " + latlng + "\n"
text = text + "id: " + id;
createMarker(latlng, name, text, id, icon);
bounds.extend(latlng);
});
});
}
// The next few routines below are involved in handling circles drawn on the map by the user.
// These routines check for observation points that lie within the radius of the drawn circle.
// determine if a point (lat,lng) is inside the circle with center clat,clng, and given radius
function pointInCircle(lat,lng,clat,clng,radius) {
var KPiDouble = 3.141592654
var KDegreesToRadiansDouble = 0.01745329 // KPiDouble / 180.0
var earthRadius = 6378137 // in meters
clng = clng * KDegreesToRadiansDouble
clat = clat * KDegreesToRadiansDouble
var cradius = radius / earthRadius
var lng = lng * KDegreesToRadiansDouble
var lat = lat * KDegreesToRadiansDouble
var angle = Math.acos(Math.sin(clat) * Math.sin(lat) + Math.cos(clat) * Math.cos(lat) * Math.cos(lng - clng))
var decision = (angle < cradius)
//if (decision) {
// console.log(lat,lng,clat,clng,angle,cradius,(angle < cradius))
//}
return decision
}
// This call adds markers to the map for all occurrence points within the boundaries of a circle.
function searchLocationsNearCircle(lat,lon,radius) {
var geomap;
var icon = getIcon();
var bounds = new google.maps.LatLngBounds();
// look through all taxa in precompiled list
for (var i=0;i<phylomap.taxalist.length;i++) {
var name = phylomap.taxalist[i].node_data['node name'];
var id = phylomap.taxalist[i].node_data['nodeid'];
if ('loc' in phylomap.taxalist[i].node_data) {
for (var j = phylomap.taxalist[i].node_data['loc'].length - 1; j >= 0; j--) {
var point = phylomap.taxalist[i].node_data['loc'][j]
// if this point is inside the target circle, then add a marker
if (pointInCircle(point[1],point[0],lat,lon,radius)) {
var latlng = new google.maps.LatLng(
parseFloat(point[1]),
parseFloat(point[0]));
// the id field is used internally to phylomap for highlighting, it doesn't
// need to be displayed to the user generally
//var text = "species: " + name + " <br>id: " + id;
var text = "name: " + name + "\n";
// add other attributes to display tag if they are present in the taxon nodes
var attribs = []
if ('attributes' in phylomap.taxalist[i].node_data) {
if (phylomap.taxalist[i].node_data['attributes'].length >= j) {
attribs = phylomap.taxalist[i].node_data['attributes'][j]
// add descriptions to the text markers
for (var attrib in attribs) {
text = text + ' [' + attrib+']:'+attribs[attrib] + '\n'
};
}
}
createMarker(latlng, name, text, id, icon);
addLocationToSelectedList(phylomap.taxalist[i],attribs,point[1],point[0])
bounds.extend(latlng);
var colorToUse = getIconColor()
highlightPath(phylomap.taxalist[i],phylomap.currentTree,colorToUse)
// Candela is too slow to always update automatically
//updateCandelaDisplay()
}
}
}
}
//updateTableDisplay(phylomap.selectedOccurrences)
//geomap = updateGeoJSDisplay()
//geomap.pan({x:0.01,y:0.01})
}
function addLocationToSelectedList(node,attribs,lat,lon) {
//console.log('adding node to selection list. Length now:',phylomap.selectedOccurrences.length)
var record = {}
// if there are extra attributes on this node, copy them over to the trait matrix selection entry
for (attrib in attribs) {
record[attrib] = attribs[attrib]
}
record['lat'] = lat
record['lon'] = lon
record['species'] = node.node_data['node name']
record['renderSize'] = 10
phylomap.selectedOccurrences.push(record)
}
function findNodeInTreeByNodeId(currentTreeNode, nodeID) {
for (var i = phylomap.allnodelist.length - 1; i >= 0; i--) {
if (phylomap.allnodelist[i].node_data['nodeid'] == nodeID) {
return phylomap.allnodelist[i]
}
}
}
function mapSingleNode(treeNode, rootNode,icon,selectionID) {
var bounds = new google.maps.LatLngBounds();
var name = treeNode.node_data['node name'];
//console.log('map single node of id=',id, treeNode)
// if this node has locations, then add them to the map
if ('loc' in treeNode.node_data) {
for (var i = 0; i< treeNode.node_data['loc'].length; i++) {
var thisloc = treeNode.node_data['loc'][i]
var latlng = new google.maps.LatLng(
parseFloat(thisloc[1]),
parseFloat(thisloc[0]));
var text = "name: " + name + "\nid: " + selectionID + '\n';
// add other attributes to display tag if they are present in the taxon node
var attribs = []
if ('attributes' in treeNode.node_data) {
if (treeNode.node_data['attributes'].length >= i) {
attribs = treeNode.node_data['attributes'][i]
// add descriptions to the text markers
for (var attrib in attribs) {
text = text + ' [' + attrib+']:'+attribs[attrib] + '\n'
};
}
}
createMarker(latlng, name, text, selectionID, icon);
bounds.extend(latlng);
addLocationToSelectedList(treeNode,attribs,thisloc[1],thisloc[0])
};
}
}
// recursive traversal of the current tree to uncover all nodes below the passed node and
// map them. The clade root is passed so highlighting can be performed by lighting nodes between
// the clade root and the current node
// *** had to use _children instead of children because of how the accessor algorithm
// in phylotree re-names the attributes. This search might fail sometimes, so testing
// for valid children references under either name
function mapAllNodesInClade(treeNode, cladeRootNode,icon,selectionID) {
//console.log('mapping everything below node:',treeNode.node_data['nodeid'])
// highlight the path on the tree between the rootId and this node if a valid id was passed
if (treeNode != null) {
var id = cladeRootNode.node_data['nodeid'];
var colorToUse = getIconColor(id)
highlightLimitedPath(treeNode,cladeRootNode,colorToUse)
}
if (('_children' in treeNode) && (treeNode._children.length>0)) {
for (var i = treeNode._children.length - 1; i >= 0; i--) {
mapAllNodesInClade(treeNode._children[i], cladeRootNode,icon,selectionID)
}
} else if (('children' in treeNode) && (treeNode.children.length>0)) {
//console.log('mapAllNodesInClade: traversing -children- attribute to follow clade')
for (var i = treeNode.children.length - 1; i >= 0; i--) {
mapAllNodesInClade(treeNode.children[i], cladeRootNode,icon,selectionID)
}
} else {
// we have reached the bottom of the hierarchy, write out the locations to the map
//
mapSingleNode(treeNode, cladeRootNode,icon,selectionID)
}
}
// This search is used only during highlight of entire clade.
// Processing moved from a service to inline javascript when integrated with Arbor/TangeloHub.
// the data element returned in the clicked node is a <g> element, so we need to look inside its
// '__data__' attribute to find the actual tree node record. This
function searchLocationsNearClade(selectedNode, callback) {
var selectedNodeID = selectedNode.node_data['nodeid']
//console.log("highlight clade below node id",selectedNodeID);
// find the node with the id that matches the one the user clicked on
rootOfClade = findNodeInTreeByNodeId(phylomap.currentTree, selectedNodeID)
// traverse tree recursively, adding all locations in all taxa below this. We create the
// icon here so each selection maps to just one type of icon
var icon = getIcon(selectedNodeID);
mapAllNodesInClade(rootOfClade, rootOfClade, icon, selectedNodeID)
//updateTableDisplay(phylomap.selectedOccurrences)
//updateGeoJSDisplay()
// Candela is too slow to always update automatically, user can invoke a render on demand
//updateCandelaDisplay()
// run the callback if one was passed. Use for setting and clearing processing badge
if (callback != null) callback();
}
function getIcon(nodeid) {
if (typeof iconIndex === "undefined" || iconIndex == null || iconIndex == iconList.length) {
iconIndex = 0;
}
return iconList[iconIndex++];
}
// CRL : since iconIndex is incremented immediately after each use, we handle the wraparound case and
// generate a lagging index value. Don't know why we have to adjust this, thought we caught the index
// before it it was incremeneted
function adjustColorIndex(index) {
if (index>0) {
return (index-1)
}
else {
return (iconList.length-1)
}
}
// CRL: lookup the color of the tree highlight by retrieving the color of the corresponding
// map icon and adjusting it (why adjustment needed?)
function getIconColor(id) {
var colorToUse;
// if this has been called before any markers are created for this node, set the indexes appropriately
if (typeof markerColorIndex[id] == "undefined") {
markerColorIndex[id] = iconIndex;
}
colorToUse = treeHighlightColorList[adjustColorIndex(markerColorIndex[id])];
//console.log("getIconColor: id=",id," markerColorIndex=",markerColorIndex[id]," treeColor=",colorToUse)
return colorToUse
}
function createMarker(latlng, name, text, id, icon) {
var html = "<b>" + name + "</b><br>" + text;
// save the color of this icon in a color index array
markerColorIndex[id] = iconIndex;
//console.log('saving marketColorIndex[',id,'] = ',iconIndex)
icon = ((icon != null) ? icon : getIcon());
var marker = new google.maps.Marker({
map: map,
position: latlng,
icon: icon,
title: text
});
google.maps.event.addListener(marker, 'mouseover', function() {
var node = nodeFromId(id);
// CRL: color highlight path according to icon color
highlightParents(node, getIconColor(id), "3px");
textOn(node[0]);
});
google.maps.event.addListener(marker, 'mouseout', function() {
if (typeof clickedOn === "undefined" || !clickedOn) {
var node = nodeFromId(id);
highlightParents(node, "#ccc", "1.5px");
textOff(node[0], true);
}
});
// enable persistent highlight after click ?
google.maps.event.addListener(marker, 'click', function() {
clickedOn = clickedOn == true ? false : true;
var node = nodeFromId(id);
if (clickedOn) {
infoWindow.setContent(html);
infoWindow.open(map, marker);
// CRL: change so highlights match icon colors
highlightParents(node, getIconColor(id), "3px");
//highlightParents(node, "red", "3px");
textOn(node[0]);
} else {
infoWindow.close();
highlightParents(node, "#ccc", "1.5px");
textOff(node[0], true);
}
});
// store in index the id/markerIndex key/value pair, easier to delete later.
if (typeof markerIndex[id] === "object") {
markerIndex[id].push(markers.length);
} else {
markerIndex[id] = [markers.length];
}
markers.push(marker);
}
function highlightParents (node, color, size) {
// ensure our helper functions were included
if (typeof highlightPath != 'undefined' && typeof nodeFromId != 'undefined') {
if (node[0].length > 0) {
highlightPath(node.datum(), color, size);
}
}
}
var map;
var clickedOn;
var mapOptions;
var overlays = [];
var markers = [];
var markerColorIndex = [];
var markerIndex = {};
var infoWindow;
var locationSelect;
var phylotree = {};
var iconIndex = 0;
var iconList = [
'http://maps.google.com/mapfiles/ms/icons/red-dot.png',
'http://maps.google.com/mapfiles/ms/icons/blue-dot.png',
'http://maps.google.com/mapfiles/ms/icons/green-dot.png',
'http://maps.google.com/mapfiles/ms/icons/orange-dot.png',
'http://maps.google.com/mapfiles/ms/icons/pink-dot.png',
'http://maps.google.com/mapfiles/ms/icons/ltblue-dot.png',
'http://maps.google.com/mapfiles/ms/icons/purple-dot.png',
'http://maps.google.com/mapfiles/ms/icons/yellow-dot.png',
'http://maps.google.com/mapfiles/ms/icons/red-pushpin.png',
'http://maps.google.com/mapfiles/ms/icons/blue-pushpin.png',
'http://maps.google.com/mapfiles/ms/icons/grn-pushpin.png',
'http://maps.google.com/mapfiles/ms/icons/pink-pushpin.png',
'http://maps.google.com/mapfiles/ms/icons/ltblu-pushpin.png',
'http://maps.google.com/mapfiles/ms/icons/purple-pushpin.png',
'http://maps.google.com/mapfiles/ms/icons/ylw-pushpin.png',
]
// adding matching hightlight colors to match the icons, so the tree hightlight color matches the icon color
var treeHighlightColorList = [ "red","blue","green","orange","pink","lightblue","purple","yellow","red","blue","green","pink","lightblue","purple","yellow"]
//function load() {
addLoadEvent(function () {
mapOptions = {
//center: new google.maps.LatLng(18.994609, -71.345215),
//zoom: 6,
center: new google.maps.LatLng(9.65, -83.0),
zoom: 5,
mapTypeId: google.maps.MapTypeId.ROADMAP
};
map = new google.maps.Map(d3.select("#map_canvas").node(),
mapOptions);
var drawingManager = new google.maps.drawing.DrawingManager({
drawingMode: google.maps.drawing.OverlayType.MARKER,
drawingControl: true,
drawingControlOptions: {
position: google.maps.ControlPosition.TOP_CENTER,
drawingModes: ['marker', 'circle', 'rectangle']
},
markerOptions: {icon: 'https://developers.google.com/maps/documentation/javascript/examples/full/images/beachflag.png'},
});
drawingManager.setMap(map);
google.maps.event.addListener(drawingManager, 'overlaycomplete', function(event) {
var searchUrl;
// create the restful style url to load data. The named arguments we used to use had to be
// replaced by positional arguments with a later version of Tangelo. Notice the /rect/id/long, etc.
// using up positions in the URL since the names of the arguments didn't work anymore.
if (event.type == google.maps.drawing.OverlayType.CIRCLE) {
searchLocationsNearCircle(
event.overlay.getCenter().lat(),
event.overlay.getCenter().lng(),
event.overlay.getRadius())
} else if (event.type == google.maps.drawing.OverlayType.RECTANGLE) {
searchLocationsNearRect(
event.overlay.bounds.getSouthWest().lng(),
event.overlay.bounds.getSouthWest().lat(),
event.overlay.bounds.getNorthEast().lng(),
event.overlay.bounds.getNorthEast().lat())
}
overlays.push(event.overlay);
});
var mylatlng = new google.maps.LatLng(-25.363882,131.044922);
infoWindow = new google.maps.InfoWindow();
});
function old_updateGeoJSDisplay() {
var spec = {
center: {
x: 0,
y: 0
},
zoom: 4,
layers: [{
renderer: 'vgl',
features: [{
type: 'point',
size: function (d) { return 5; },
position: function (d) { return {x: d.lon, y: d.lat}; },
fill: true,
fillColor: function (d) { return 'red'; },
fillOpacity: function (d) { return 0.5 ; },
stroke: true,
strokeColor: function (d) { return 'black'; },
strokeOpacity: 1,
strokeWidth: 2
}]
}]
};
spec.data = phylomap.selectedOccurrences;
return $('#geojs_map_canvas').geojsMap(spec);
}
//------------------------------------
// this function looks at the first entry in the selection list and makes an entry for each
// attribute so it could be chosen as the one to color the occurrences by
function fillAttributeSelector() {
var sample = phylomap.selectedOccurrences[0]
var list = []
for (attrib in sample) {
list.push(attrib)
}
d3.select("#geojs_attribute").selectAll("option").remove();
d3.select("#geojs_attribute").selectAll("option")
.data(list)
.enter().append("option")
.text(function (d) { return d; });
}
function returnDataAsText(p) {
var text = ''
for (var attrib in p) {
if (attrib.length>0) {
text += attrib+':'+p[attrib]+'\n'
}
}
return text
}
// #bb5a00 - reddish/brows for low values
// #ffffff - white for top values
function geojs_addVectorLayer(points) |
// this function loops through all of the occurrence points and assigns colors depending on the value
// of the individual occurence point within the range across all the points
function updateOccurrencePointColors() {
var minRed = 160.0/256.0
var minGreen = 80.0/256.0
// find out which attribute has been selected
var attribSelector = d3.select("#geojs_attribute").node();
var selectedAttrib = attribSelector.options[attribSelector.selectedIndex].text;
console.log('selected attrib is:',selectedAttrib)
candela_addGeoDots(phylomap.selectedOccurrences,selectedAttrib)
}
function geojs_resize() {
phylomap.geojsmap.map.resize(0, 0, $('#geojs_map_canvas').width()*0.9, $('#geojs_map_canvas').height());
}
function geojs_addBaseLayer() {
var map;
// clear out the old map
$('#geojs_map_canvas').empty()
map = geo.map({
node: '#geojs_map_canvas',
zoom: 2
});
map.createLayer('osm');
phylomap.geojsmap.map = map;
}
// this function is called as soon as the page is finished loading
function updateGeoJSDisplay() {
phylomap.geojsmap = {}
phylomap.geojsmap.map = null
phylomap.geojsmap.markers = null
phylomap.geojsmap.map = null
phylomap.geojsmap.markers = null
phylomap.geojsmap.previouscolor = null
//Proj4js.defs["EPSG:4326"] = "+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs";
//Proj4js.defs["EPSG:3031"] = "+proj=stere +lat_0=-90 +lat_ts=-71 +lon_0=0 +k=1 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs";
//Proj4js.defs["EPSG:900913"] = "+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null +no_defs";
$(window).resize(geojs_resize);
fillAttributeSelector();
d3.select("#geojs_attribute")
.on("change", updateOccurrencePointColors);
geojs_addBaseLayer();
geojs_resize();
geojs_addVectorLayer(phylomap.selectedOccurrences);
}
// ---------- Candela plotting functions
// this function is called as soon as the page is finished loading
function updateCandelaDisplay() {
fillAttributeSelector();
d3.select("#geojs_attribute")
.on("change", updateOccurrencePointColors);
//geojs_addBaseLayer();
//candela_resize();
candela_addGeoDots(phylomap.selectedOccurrences);
}
function candela_resize() {
phylomap.geojsmap.map.resize(0, 0, $('#geojs_map_canvas').width()*0.7, $('#geojs_map_canvas').height());
}
function candela_addGeoDots(points, attrib='Poll') {
//console.log('geodots:',points,"\n");
//var markers = phylomap.geojsmap.map.createLayer("feature",{"renderer":"vgl"})
//var uiLayer = phylomap.geojsmap.map.createLayer('ui', {"renderer":"vgl"});
//var tooltip = uiLayer.createWidget('dom', {position: {x: 0, y: 0}});
//tooltipElem = $(tooltip.canvas()).attr('id', 'tooltip').addClass('hidden');
// Add a vector layer to the map. Fill the layer with all the points that are currently selected
$("#candela_map_canvas").empty();
var el = document.getElementById('candela_map_canvas')
el.style.width = '900px';
el.style.height = '1000px';
document.body.appendChild(el);
phylomap.candela_map = new candela.components.GeoDots(el, {
zoom: 7,
center: {
longitude: -82.948,
latitude: 9.9725
},
data: points,
width: 1000,
height: 700,
latitude: 'lat',
longitude: 'lon',
size: 'renderSize',
//tileUrl: 'http://c.tiles.wmflabs.org/hillshading/${z}/${x}/${y}.png',
//tileUrl: 'http://tile.stamen.com/terrain/${z}/${x}/${y}.jpg',
tileUrl: 'https://{s}.tile.thunderforest.com/landscape/{z}/{x}/{y}.png?apikey=6548db9e547c4c5eacc2304ee947ebbe',
color: attrib
});
phylomap.candela_map.render();
}
| {
//console.log(points,"\n");
var markers = phylomap.geojsmap.map.createLayer("feature",{"renderer":"vgl"})
var uiLayer = phylomap.geojsmap.map.createLayer('ui', {"renderer":"vgl"});
var tooltip = uiLayer.createWidget('dom', {position: {x: 0, y: 0}});
tooltipElem = $(tooltip.canvas()).attr('id', 'tooltip').addClass('hidden');
// Add a vector layer to the map. Fill the layer with all the points that are currently selected
for (var i = 0; i < points.length; i++) {
//console.log(points[0])
var lng_float = points[i]['lon']
var lat_float = points[i]['lat']
var pointText = returnDataAsText(points[i])
// add a point to the d3 layer
markers.createFeature("point",{selectionAPI:true})
.data([{x:lng_float, y:lat_float,text:pointText }])
.position(function(d) { return {x: d.x, y: d.y};} )
.style("fillColor", function(d) { return {r: 0, g: 1, b: 0};})
.style('strokeColor', 'black')
.geoOn(geo.event.feature.mouseclick, function (evt) {
console.log(evt)
phylomap.geojsmap.map.center({x: evt.data.x, y: evt.data.y});
})
.geoOn(geo.event.feature.mouseover, function (evt) {
this.modified();
markers.map().draw();
//tooltip.position({x: evt.data.x+0.015, y: evt.data.y+0.015});
tooltip.position({x: evt.data.x+0.0015, y: evt.data.y+0.0015});
tooltipElem.text(' '+evt.data.text);
tooltipElem.removeClass('hidden');
})
.geoOn(geo.event.feature.mouseout, function (evt) {
// evt.data.opacity = 1.0;
// evt.data.strokeOpacity = 1.0;
// this.modified();
//markers.map().draw();
tooltipElem.addClass('hidden');
})
.style('fillOpacity', 1.0)
.style('strokeOpacity', 1.0)
}
// save markers layer globally
phylomap.geojsmap.markers = markers
phylomap.geojsmap.map.draw();
} | identifier_body |
phylomap.js | function addLoadEvent(func) {
var oldonload = window.onload;
if (typeof window.onload != 'function') {
window.onload = func;
} else {
window.onload = function() {
if (oldonload) {
oldonload();
}
func();
}
}
}
function log(msg) {
setTimeout(function() {
throw new Error(msg);
}, 0);
}
function clearLocations() {
infoWindow.close();
for (i in markers) {
markers[i].setMap(null);
// CRL
delete markerColorIndex[i];
}
markers.length = 0;
markerIndex = {};
for (i in overlays) {
overlays[i].setMap(null);
}
// set index to last item so it will reset on next element
iconIndex = 0;
// clear occurrence compilation list
phylomap.selectedOccurrences = []
//updateTableDisplay(phylomap.selectedOccurrences)
//updateGeoJSDisplay()
}
// Can create serious problems as it doesn't delete markerIndex references!
function clearOneLocation(index) {
var marker = markers[index];
if (marker) {
marker.setMap(null);
}
delete markers[index];
}
function clearOneId(id) {
// if a marker has ID = id
if (typeof markerIndex[id] != "undefined") {
markerIndex[id].forEach(function(d) {
clearOneLocation(d);
});
delete markerIndex[id];
}
}
// returns true if there is at least 1 marker on the map with ID = _id
function markerExists(id) {
if (typeof markerIndex[id] != "undefined" && markerIndex[id].length > 0)
return true;
else
return false;
}
// ---- build node lists to aid traversal, finding locations or matching nodes
function addTaxaToTaxaList(treenode) {
if ('children' in treenode) {
// case for intermediate nodes, continue traversal
for (var i=0;i<treenode.children.length;i++) {
addTaxaToTaxaList(treenode.children[i])
}
} else {
// case for a taxon
phylomap.taxalist.push(treenode)
}
}
// this is a pre-processing function that is called once each time a tree is loaded. It runs throgh
// the tree and builds a list of all nodes, so searching doesn't have to involve recursive searching
// each time. The routine assumes phylomap.currentTree is valid.
function addAllNodesToAllNodeList(treenode) {
phylomap.allnodelist.push(treenode)
if ('children' in treenode) {
// case for intermediate nodes, continue traversal
for (var i=0;i<treenode.children.length;i++) {
addAllNodesToAllNodeList(treenode.children[i])
}
}
}
// this is a pre-processing function that is called once each time a tree is loaded. It runs throgh
// the tree and builds a list of the taxa, so searching doesn't have to involve recursive searching
// each time. The routine assumes phylomap.currentTree is valid.
function processTreeForMapLocations() {
// clear out the previous list if there is one
while (phylomap.taxalist.length > 0) {
phylomap.taxalist.pop()
}
while (phylomap.allnodelist.length > 0) {
phylomap.allnodelist.pop()
}
// start a recursive traversals to build lists of just taxa and of all the nodes for use later
addTaxaToTaxaList(phylomap.currentTree)
addAllNodesToAllNodeList(phylomap.currentTree)
//console.log(phylomap.taxalist)
}
//------ end of build node lists
function searchLocationsNear(searchUrl) {
d3.json(searchUrl, function(json) {
var icon = getIcon();
var bounds = new google.maps.LatLngBounds();
json.result.data.forEach(function(d){
var name = d.name;
var id = d.ID.$oid;
var latlng = new google.maps.LatLng(
parseFloat(d.lat),
parseFloat(d.lng));
var text = 'name: ' + name + '\n';
text = text + "location: " + latlng + "\n"
text = text + "id: " + id;
createMarker(latlng, name, text, id, icon);
bounds.extend(latlng);
});
});
}
// The next few routines below are involved in handling circles drawn on the map by the user.
// These routines check for observation points that lie within the radius of the drawn circle.
// determine if a point (lat,lng) is inside the circle with center clat,clng, and given radius
function pointInCircle(lat,lng,clat,clng,radius) {
var KPiDouble = 3.141592654
var KDegreesToRadiansDouble = 0.01745329 // KPiDouble / 180.0
var earthRadius = 6378137 // in meters
clng = clng * KDegreesToRadiansDouble
clat = clat * KDegreesToRadiansDouble
var cradius = radius / earthRadius
var lng = lng * KDegreesToRadiansDouble
var lat = lat * KDegreesToRadiansDouble
var angle = Math.acos(Math.sin(clat) * Math.sin(lat) + Math.cos(clat) * Math.cos(lat) * Math.cos(lng - clng))
var decision = (angle < cradius)
//if (decision) {
// console.log(lat,lng,clat,clng,angle,cradius,(angle < cradius))
//}
return decision
}
// This call adds markers to the map for all occurrence points within the boundaries of a circle.
function searchLocationsNearCircle(lat,lon,radius) {
var geomap;
var icon = getIcon();
var bounds = new google.maps.LatLngBounds();
// look through all taxa in precompiled list
for (var i=0;i<phylomap.taxalist.length;i++) {
var name = phylomap.taxalist[i].node_data['node name'];
var id = phylomap.taxalist[i].node_data['nodeid'];
if ('loc' in phylomap.taxalist[i].node_data) {
for (var j = phylomap.taxalist[i].node_data['loc'].length - 1; j >= 0; j--) {
var point = phylomap.taxalist[i].node_data['loc'][j]
// if this point is inside the target circle, then add a marker
if (pointInCircle(point[1],point[0],lat,lon,radius)) {
var latlng = new google.maps.LatLng(
parseFloat(point[1]),
parseFloat(point[0]));
// the id field is used internally to phylomap for highlighting, it doesn't
// need to be displayed to the user generally
//var text = "species: " + name + " <br>id: " + id;
var text = "name: " + name + "\n";
// add other attributes to display tag if they are present in the taxon nodes
var attribs = []
if ('attributes' in phylomap.taxalist[i].node_data) {
if (phylomap.taxalist[i].node_data['attributes'].length >= j) {
attribs = phylomap.taxalist[i].node_data['attributes'][j]
// add descriptions to the text markers
for (var attrib in attribs) {
text = text + ' [' + attrib+']:'+attribs[attrib] + '\n'
};
}
}
createMarker(latlng, name, text, id, icon);
addLocationToSelectedList(phylomap.taxalist[i],attribs,point[1],point[0])
bounds.extend(latlng);
var colorToUse = getIconColor()
highlightPath(phylomap.taxalist[i],phylomap.currentTree,colorToUse)
// Candela is too slow to always update automatically
//updateCandelaDisplay()
}
}
}
}
//updateTableDisplay(phylomap.selectedOccurrences)
//geomap = updateGeoJSDisplay()
//geomap.pan({x:0.01,y:0.01})
}
function addLocationToSelectedList(node,attribs,lat,lon) {
//console.log('adding node to selection list. Length now:',phylomap.selectedOccurrences.length)
var record = {}
// if there are extra attributes on this node, copy them over to the trait matrix selection entry
for (attrib in attribs) {
record[attrib] = attribs[attrib]
}
record['lat'] = lat
record['lon'] = lon
record['species'] = node.node_data['node name']
record['renderSize'] = 10
phylomap.selectedOccurrences.push(record)
}
function findNodeInTreeByNodeId(currentTreeNode, nodeID) {
for (var i = phylomap.allnodelist.length - 1; i >= 0; i--) {
if (phylomap.allnodelist[i].node_data['nodeid'] == nodeID) {
return phylomap.allnodelist[i]
}
}
}
function mapSingleNode(treeNode, rootNode,icon,selectionID) {
var bounds = new google.maps.LatLngBounds();
var name = treeNode.node_data['node name'];
//console.log('map single node of id=',id, treeNode)
// if this node has locations, then add them to the map
if ('loc' in treeNode.node_data) {
for (var i = 0; i< treeNode.node_data['loc'].length; i++) {
var thisloc = treeNode.node_data['loc'][i]
var latlng = new google.maps.LatLng(
parseFloat(thisloc[1]),
parseFloat(thisloc[0]));
var text = "name: " + name + "\nid: " + selectionID + '\n';
// add other attributes to display tag if they are present in the taxon node
var attribs = []
if ('attributes' in treeNode.node_data) {
if (treeNode.node_data['attributes'].length >= i) |
}
createMarker(latlng, name, text, selectionID, icon);
bounds.extend(latlng);
addLocationToSelectedList(treeNode,attribs,thisloc[1],thisloc[0])
};
}
}
// recursive traversal of the current tree to uncover all nodes below the passed node and
// map them. The clade root is passed so highlighting can be performed by lighting nodes between
// the clade root and the current node
// *** had to use _children instead of children because of how the accessor algorithm
// in phylotree re-names the attributes. This search might fail sometimes, so testing
// for valid children references under either name
function mapAllNodesInClade(treeNode, cladeRootNode,icon,selectionID) {
//console.log('mapping everything below node:',treeNode.node_data['nodeid'])
// highlight the path on the tree between the rootId and this node if a valid id was passed
if (treeNode != null) {
var id = cladeRootNode.node_data['nodeid'];
var colorToUse = getIconColor(id)
highlightLimitedPath(treeNode,cladeRootNode,colorToUse)
}
if (('_children' in treeNode) && (treeNode._children.length>0)) {
for (var i = treeNode._children.length - 1; i >= 0; i--) {
mapAllNodesInClade(treeNode._children[i], cladeRootNode,icon,selectionID)
}
} else if (('children' in treeNode) && (treeNode.children.length>0)) {
//console.log('mapAllNodesInClade: traversing -children- attribute to follow clade')
for (var i = treeNode.children.length - 1; i >= 0; i--) {
mapAllNodesInClade(treeNode.children[i], cladeRootNode,icon,selectionID)
}
} else {
// we have reached the bottom of the hierarchy, write out the locations to the map
//
mapSingleNode(treeNode, cladeRootNode,icon,selectionID)
}
}
// This search is used only during highlight of entire clade.
// Processing moved from a service to inline javascript when integrated with Arbor/TangeloHub.
// the data element returned in the clicked node is a <g> element, so we need to look inside its
// '__data__' attribute to find the actual tree node record. This
function searchLocationsNearClade(selectedNode, callback) {
var selectedNodeID = selectedNode.node_data['nodeid']
//console.log("highlight clade below node id",selectedNodeID);
// find the node with the id that matches the one the user clicked on
rootOfClade = findNodeInTreeByNodeId(phylomap.currentTree, selectedNodeID)
// traverse tree recursively, adding all locations in all taxa below this. We create the
// icon here so each selection maps to just one type of icon
var icon = getIcon(selectedNodeID);
mapAllNodesInClade(rootOfClade, rootOfClade, icon, selectedNodeID)
//updateTableDisplay(phylomap.selectedOccurrences)
//updateGeoJSDisplay()
// Candela is too slow to always update automatically, user can invoke a render on demand
//updateCandelaDisplay()
// run the callback if one was passed. Use for setting and clearing processing badge
if (callback != null) callback();
}
function getIcon(nodeid) {
if (typeof iconIndex === "undefined" || iconIndex == null || iconIndex == iconList.length) {
iconIndex = 0;
}
return iconList[iconIndex++];
}
// CRL : since iconIndex is incremented immediately after each use, we handle the wraparound case and
// generate a lagging index value. Don't know why we have to adjust this, thought we caught the index
// before it it was incremeneted
function adjustColorIndex(index) {
if (index>0) {
return (index-1)
}
else {
return (iconList.length-1)
}
}
// CRL: lookup the color of the tree highlight by retrieving the color of the corresponding
// map icon and adjusting it (why adjustment needed?)
function getIconColor(id) {
var colorToUse;
// if this has been called before any markers are created for this node, set the indexes appropriately
if (typeof markerColorIndex[id] == "undefined") {
markerColorIndex[id] = iconIndex;
}
colorToUse = treeHighlightColorList[adjustColorIndex(markerColorIndex[id])];
//console.log("getIconColor: id=",id," markerColorIndex=",markerColorIndex[id]," treeColor=",colorToUse)
return colorToUse
}
function createMarker(latlng, name, text, id, icon) {
var html = "<b>" + name + "</b><br>" + text;
// save the color of this icon in a color index array
markerColorIndex[id] = iconIndex;
//console.log('saving marketColorIndex[',id,'] = ',iconIndex)
icon = ((icon != null) ? icon : getIcon());
var marker = new google.maps.Marker({
map: map,
position: latlng,
icon: icon,
title: text
});
google.maps.event.addListener(marker, 'mouseover', function() {
var node = nodeFromId(id);
// CRL: color highlight path according to icon color
highlightParents(node, getIconColor(id), "3px");
textOn(node[0]);
});
google.maps.event.addListener(marker, 'mouseout', function() {
if (typeof clickedOn === "undefined" || !clickedOn) {
var node = nodeFromId(id);
highlightParents(node, "#ccc", "1.5px");
textOff(node[0], true);
}
});
// enable persistent highlight after click ?
google.maps.event.addListener(marker, 'click', function() {
clickedOn = clickedOn == true ? false : true;
var node = nodeFromId(id);
if (clickedOn) {
infoWindow.setContent(html);
infoWindow.open(map, marker);
// CRL: change so highlights match icon colors
highlightParents(node, getIconColor(id), "3px");
//highlightParents(node, "red", "3px");
textOn(node[0]);
} else {
infoWindow.close();
highlightParents(node, "#ccc", "1.5px");
textOff(node[0], true);
}
});
// store in index the id/markerIndex key/value pair, easier to delete later.
if (typeof markerIndex[id] === "object") {
markerIndex[id].push(markers.length);
} else {
markerIndex[id] = [markers.length];
}
markers.push(marker);
}
function highlightParents (node, color, size) {
// ensure our helper functions were included
if (typeof highlightPath != 'undefined' && typeof nodeFromId != 'undefined') {
if (node[0].length > 0) {
highlightPath(node.datum(), color, size);
}
}
}
var map;
var clickedOn;
var mapOptions;
var overlays = [];
var markers = [];
var markerColorIndex = [];
var markerIndex = {};
var infoWindow;
var locationSelect;
var phylotree = {};
var iconIndex = 0;
var iconList = [
'http://maps.google.com/mapfiles/ms/icons/red-dot.png',
'http://maps.google.com/mapfiles/ms/icons/blue-dot.png',
'http://maps.google.com/mapfiles/ms/icons/green-dot.png',
'http://maps.google.com/mapfiles/ms/icons/orange-dot.png',
'http://maps.google.com/mapfiles/ms/icons/pink-dot.png',
'http://maps.google.com/mapfiles/ms/icons/ltblue-dot.png',
'http://maps.google.com/mapfiles/ms/icons/purple-dot.png',
'http://maps.google.com/mapfiles/ms/icons/yellow-dot.png',
'http://maps.google.com/mapfiles/ms/icons/red-pushpin.png',
'http://maps.google.com/mapfiles/ms/icons/blue-pushpin.png',
'http://maps.google.com/mapfiles/ms/icons/grn-pushpin.png',
'http://maps.google.com/mapfiles/ms/icons/pink-pushpin.png',
'http://maps.google.com/mapfiles/ms/icons/ltblu-pushpin.png',
'http://maps.google.com/mapfiles/ms/icons/purple-pushpin.png',
'http://maps.google.com/mapfiles/ms/icons/ylw-pushpin.png',
]
// adding matching hightlight colors to match the icons, so the tree hightlight color matches the icon color
var treeHighlightColorList = [ "red","blue","green","orange","pink","lightblue","purple","yellow","red","blue","green","pink","lightblue","purple","yellow"]
//function load() {
addLoadEvent(function () {
mapOptions = {
//center: new google.maps.LatLng(18.994609, -71.345215),
//zoom: 6,
center: new google.maps.LatLng(9.65, -83.0),
zoom: 5,
mapTypeId: google.maps.MapTypeId.ROADMAP
};
map = new google.maps.Map(d3.select("#map_canvas").node(),
mapOptions);
var drawingManager = new google.maps.drawing.DrawingManager({
drawingMode: google.maps.drawing.OverlayType.MARKER,
drawingControl: true,
drawingControlOptions: {
position: google.maps.ControlPosition.TOP_CENTER,
drawingModes: ['marker', 'circle', 'rectangle']
},
markerOptions: {icon: 'https://developers.google.com/maps/documentation/javascript/examples/full/images/beachflag.png'},
});
drawingManager.setMap(map);
google.maps.event.addListener(drawingManager, 'overlaycomplete', function(event) {
var searchUrl;
// create the restful style url to load data. The named arguments we used to use had to be
// replaced by positional arguments with a later version of Tangelo. Notice the /rect/id/long, etc.
// using up positions in the URL since the names of the arguments didn't work anymore.
if (event.type == google.maps.drawing.OverlayType.CIRCLE) {
searchLocationsNearCircle(
event.overlay.getCenter().lat(),
event.overlay.getCenter().lng(),
event.overlay.getRadius())
} else if (event.type == google.maps.drawing.OverlayType.RECTANGLE) {
searchLocationsNearRect(
event.overlay.bounds.getSouthWest().lng(),
event.overlay.bounds.getSouthWest().lat(),
event.overlay.bounds.getNorthEast().lng(),
event.overlay.bounds.getNorthEast().lat())
}
overlays.push(event.overlay);
});
var mylatlng = new google.maps.LatLng(-25.363882,131.044922);
infoWindow = new google.maps.InfoWindow();
});
function old_updateGeoJSDisplay() {
var spec = {
center: {
x: 0,
y: 0
},
zoom: 4,
layers: [{
renderer: 'vgl',
features: [{
type: 'point',
size: function (d) { return 5; },
position: function (d) { return {x: d.lon, y: d.lat}; },
fill: true,
fillColor: function (d) { return 'red'; },
fillOpacity: function (d) { return 0.5 ; },
stroke: true,
strokeColor: function (d) { return 'black'; },
strokeOpacity: 1,
strokeWidth: 2
}]
}]
};
spec.data = phylomap.selectedOccurrences;
return $('#geojs_map_canvas').geojsMap(spec);
}
//------------------------------------
// this function looks at the first entry in the selection list and makes an entry for each
// attribute so it could be chosen as the one to color the occurrences by
function fillAttributeSelector() {
var sample = phylomap.selectedOccurrences[0]
var list = []
for (attrib in sample) {
list.push(attrib)
}
d3.select("#geojs_attribute").selectAll("option").remove();
d3.select("#geojs_attribute").selectAll("option")
.data(list)
.enter().append("option")
.text(function (d) { return d; });
}
function returnDataAsText(p) {
var text = ''
for (var attrib in p) {
if (attrib.length>0) {
text += attrib+':'+p[attrib]+'\n'
}
}
return text
}
// #bb5a00 - reddish/brows for low values
// #ffffff - white for top values
function geojs_addVectorLayer(points) {
//console.log(points,"\n");
var markers = phylomap.geojsmap.map.createLayer("feature",{"renderer":"vgl"})
var uiLayer = phylomap.geojsmap.map.createLayer('ui', {"renderer":"vgl"});
var tooltip = uiLayer.createWidget('dom', {position: {x: 0, y: 0}});
tooltipElem = $(tooltip.canvas()).attr('id', 'tooltip').addClass('hidden');
// Add a vector layer to the map. Fill the layer with all the points that are currently selected
for (var i = 0; i < points.length; i++) {
//console.log(points[0])
var lng_float = points[i]['lon']
var lat_float = points[i]['lat']
var pointText = returnDataAsText(points[i])
// add a point to the d3 layer
markers.createFeature("point",{selectionAPI:true})
.data([{x:lng_float, y:lat_float,text:pointText }])
.position(function(d) { return {x: d.x, y: d.y};} )
.style("fillColor", function(d) { return {r: 0, g: 1, b: 0};})
.style('strokeColor', 'black')
.geoOn(geo.event.feature.mouseclick, function (evt) {
console.log(evt)
phylomap.geojsmap.map.center({x: evt.data.x, y: evt.data.y});
})
.geoOn(geo.event.feature.mouseover, function (evt) {
this.modified();
markers.map().draw();
//tooltip.position({x: evt.data.x+0.015, y: evt.data.y+0.015});
tooltip.position({x: evt.data.x+0.0015, y: evt.data.y+0.0015});
tooltipElem.text(' '+evt.data.text);
tooltipElem.removeClass('hidden');
})
.geoOn(geo.event.feature.mouseout, function (evt) {
// evt.data.opacity = 1.0;
// evt.data.strokeOpacity = 1.0;
// this.modified();
//markers.map().draw();
tooltipElem.addClass('hidden');
})
.style('fillOpacity', 1.0)
.style('strokeOpacity', 1.0)
}
// save markers layer globally
phylomap.geojsmap.markers = markers
phylomap.geojsmap.map.draw();
}
// this function loops through all of the occurrence points and assigns colors depending on the value
// of the individual occurence point within the range across all the points
function updateOccurrencePointColors() {
var minRed = 160.0/256.0
var minGreen = 80.0/256.0
// find out which attribute has been selected
var attribSelector = d3.select("#geojs_attribute").node();
var selectedAttrib = attribSelector.options[attribSelector.selectedIndex].text;
console.log('selected attrib is:',selectedAttrib)
candela_addGeoDots(phylomap.selectedOccurrences,selectedAttrib)
}
function geojs_resize() {
phylomap.geojsmap.map.resize(0, 0, $('#geojs_map_canvas').width()*0.9, $('#geojs_map_canvas').height());
}
function geojs_addBaseLayer() {
var map;
// clear out the old map
$('#geojs_map_canvas').empty()
map = geo.map({
node: '#geojs_map_canvas',
zoom: 2
});
map.createLayer('osm');
phylomap.geojsmap.map = map;
}
// this function is called as soon as the page is finished loading
function updateGeoJSDisplay() {
phylomap.geojsmap = {}
phylomap.geojsmap.map = null
phylomap.geojsmap.markers = null
phylomap.geojsmap.map = null
phylomap.geojsmap.markers = null
phylomap.geojsmap.previouscolor = null
//Proj4js.defs["EPSG:4326"] = "+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs";
//Proj4js.defs["EPSG:3031"] = "+proj=stere +lat_0=-90 +lat_ts=-71 +lon_0=0 +k=1 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs";
//Proj4js.defs["EPSG:900913"] = "+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null +no_defs";
$(window).resize(geojs_resize);
fillAttributeSelector();
d3.select("#geojs_attribute")
.on("change", updateOccurrencePointColors);
geojs_addBaseLayer();
geojs_resize();
geojs_addVectorLayer(phylomap.selectedOccurrences);
}
// ---------- Candela plotting functions
// this function is called as soon as the page is finished loading
function updateCandelaDisplay() {
fillAttributeSelector();
d3.select("#geojs_attribute")
.on("change", updateOccurrencePointColors);
//geojs_addBaseLayer();
//candela_resize();
candela_addGeoDots(phylomap.selectedOccurrences);
}
function candela_resize() {
phylomap.geojsmap.map.resize(0, 0, $('#geojs_map_canvas').width()*0.7, $('#geojs_map_canvas').height());
}
function candela_addGeoDots(points, attrib='Poll') {
//console.log('geodots:',points,"\n");
//var markers = phylomap.geojsmap.map.createLayer("feature",{"renderer":"vgl"})
//var uiLayer = phylomap.geojsmap.map.createLayer('ui', {"renderer":"vgl"});
//var tooltip = uiLayer.createWidget('dom', {position: {x: 0, y: 0}});
//tooltipElem = $(tooltip.canvas()).attr('id', 'tooltip').addClass('hidden');
// Add a vector layer to the map. Fill the layer with all the points that are currently selected
$("#candela_map_canvas").empty();
var el = document.getElementById('candela_map_canvas')
el.style.width = '900px';
el.style.height = '1000px';
document.body.appendChild(el);
phylomap.candela_map = new candela.components.GeoDots(el, {
zoom: 7,
center: {
longitude: -82.948,
latitude: 9.9725
},
data: points,
width: 1000,
height: 700,
latitude: 'lat',
longitude: 'lon',
size: 'renderSize',
//tileUrl: 'http://c.tiles.wmflabs.org/hillshading/${z}/${x}/${y}.png',
//tileUrl: 'http://tile.stamen.com/terrain/${z}/${x}/${y}.jpg',
tileUrl: 'https://{s}.tile.thunderforest.com/landscape/{z}/{x}/{y}.png?apikey=6548db9e547c4c5eacc2304ee947ebbe',
color: attrib
});
phylomap.candela_map.render();
}
| {
attribs = treeNode.node_data['attributes'][i]
// add descriptions to the text markers
for (var attrib in attribs) {
text = text + ' [' + attrib+']:'+attribs[attrib] + '\n'
};
} | conditional_block |
phylomap.js | function addLoadEvent(func) {
var oldonload = window.onload;
if (typeof window.onload != 'function') {
window.onload = func;
} else {
window.onload = function() {
if (oldonload) {
oldonload();
}
func();
}
}
}
function log(msg) {
setTimeout(function() {
throw new Error(msg);
}, 0);
}
function clearLocations() {
infoWindow.close();
for (i in markers) {
markers[i].setMap(null);
// CRL
delete markerColorIndex[i];
}
markers.length = 0;
markerIndex = {};
for (i in overlays) {
overlays[i].setMap(null);
}
// set index to last item so it will reset on next element
iconIndex = 0;
// clear occurrence compilation list
phylomap.selectedOccurrences = []
//updateTableDisplay(phylomap.selectedOccurrences)
//updateGeoJSDisplay()
}
// Can create serious problems as it doesn't delete markerIndex references!
function clearOneLocation(index) {
var marker = markers[index];
if (marker) {
marker.setMap(null);
}
delete markers[index];
}
function clearOneId(id) {
// if a marker has ID = id
if (typeof markerIndex[id] != "undefined") {
markerIndex[id].forEach(function(d) {
clearOneLocation(d);
});
delete markerIndex[id];
}
}
// returns true if there is at least 1 marker on the map with ID = _id
function markerExists(id) {
if (typeof markerIndex[id] != "undefined" && markerIndex[id].length > 0)
return true;
else
return false;
}
// ---- build node lists to aid traversal, finding locations or matching nodes
function addTaxaToTaxaList(treenode) {
if ('children' in treenode) {
// case for intermediate nodes, continue traversal
for (var i=0;i<treenode.children.length;i++) {
addTaxaToTaxaList(treenode.children[i])
}
} else {
// case for a taxon
phylomap.taxalist.push(treenode)
}
}
// this is a pre-processing function that is called once each time a tree is loaded. It runs throgh
// the tree and builds a list of all nodes, so searching doesn't have to involve recursive searching
// each time. The routine assumes phylomap.currentTree is valid.
function addAllNodesToAllNodeList(treenode) {
phylomap.allnodelist.push(treenode)
if ('children' in treenode) {
// case for intermediate nodes, continue traversal
for (var i=0;i<treenode.children.length;i++) {
addAllNodesToAllNodeList(treenode.children[i])
}
}
}
// this is a pre-processing function that is called once each time a tree is loaded. It runs throgh
// the tree and builds a list of the taxa, so searching doesn't have to involve recursive searching
// each time. The routine assumes phylomap.currentTree is valid.
function processTreeForMapLocations() {
// clear out the previous list if there is one
while (phylomap.taxalist.length > 0) {
phylomap.taxalist.pop()
}
while (phylomap.allnodelist.length > 0) {
phylomap.allnodelist.pop()
}
// start a recursive traversals to build lists of just taxa and of all the nodes for use later
addTaxaToTaxaList(phylomap.currentTree)
addAllNodesToAllNodeList(phylomap.currentTree)
//console.log(phylomap.taxalist)
}
//------ end of build node lists
function searchLocationsNear(searchUrl) {
d3.json(searchUrl, function(json) {
var icon = getIcon();
var bounds = new google.maps.LatLngBounds();
json.result.data.forEach(function(d){
var name = d.name;
var id = d.ID.$oid;
var latlng = new google.maps.LatLng(
parseFloat(d.lat),
parseFloat(d.lng));
var text = 'name: ' + name + '\n';
text = text + "location: " + latlng + "\n"
text = text + "id: " + id;
createMarker(latlng, name, text, id, icon);
bounds.extend(latlng);
});
});
}
// The next few routines below are involved in handling circles drawn on the map by the user.
// These routines check for observation points that lie within the radius of the drawn circle.
// determine if a point (lat,lng) is inside the circle with center clat,clng, and given radius
function pointInCircle(lat,lng,clat,clng,radius) {
var KPiDouble = 3.141592654
var KDegreesToRadiansDouble = 0.01745329 // KPiDouble / 180.0
var earthRadius = 6378137 // in meters
clng = clng * KDegreesToRadiansDouble
clat = clat * KDegreesToRadiansDouble
var cradius = radius / earthRadius
var lng = lng * KDegreesToRadiansDouble
var lat = lat * KDegreesToRadiansDouble
var angle = Math.acos(Math.sin(clat) * Math.sin(lat) + Math.cos(clat) * Math.cos(lat) * Math.cos(lng - clng))
var decision = (angle < cradius)
//if (decision) {
// console.log(lat,lng,clat,clng,angle,cradius,(angle < cradius))
//}
return decision
}
// This call adds markers to the map for all occurrence points within the boundaries of a circle.
function searchLocationsNearCircle(lat,lon,radius) {
var geomap;
var icon = getIcon();
var bounds = new google.maps.LatLngBounds();
// look through all taxa in precompiled list
for (var i=0;i<phylomap.taxalist.length;i++) {
var name = phylomap.taxalist[i].node_data['node name'];
var id = phylomap.taxalist[i].node_data['nodeid'];
if ('loc' in phylomap.taxalist[i].node_data) {
for (var j = phylomap.taxalist[i].node_data['loc'].length - 1; j >= 0; j--) {
var point = phylomap.taxalist[i].node_data['loc'][j]
// if this point is inside the target circle, then add a marker
if (pointInCircle(point[1],point[0],lat,lon,radius)) {
var latlng = new google.maps.LatLng(
parseFloat(point[1]),
parseFloat(point[0]));
// the id field is used internally to phylomap for highlighting, it doesn't
// need to be displayed to the user generally
//var text = "species: " + name + " <br>id: " + id;
var text = "name: " + name + "\n";
// add other attributes to display tag if they are present in the taxon nodes
var attribs = []
if ('attributes' in phylomap.taxalist[i].node_data) {
if (phylomap.taxalist[i].node_data['attributes'].length >= j) {
attribs = phylomap.taxalist[i].node_data['attributes'][j]
// add descriptions to the text markers
for (var attrib in attribs) {
text = text + ' [' + attrib+']:'+attribs[attrib] + '\n'
};
}
}
createMarker(latlng, name, text, id, icon);
addLocationToSelectedList(phylomap.taxalist[i],attribs,point[1],point[0])
bounds.extend(latlng);
var colorToUse = getIconColor()
highlightPath(phylomap.taxalist[i],phylomap.currentTree,colorToUse)
// Candela is too slow to always update automatically
//updateCandelaDisplay()
}
}
}
}
//updateTableDisplay(phylomap.selectedOccurrences)
//geomap = updateGeoJSDisplay()
//geomap.pan({x:0.01,y:0.01})
}
function addLocationToSelectedList(node,attribs,lat,lon) {
//console.log('adding node to selection list. Length now:',phylomap.selectedOccurrences.length)
var record = {}
// if there are extra attributes on this node, copy them over to the trait matrix selection entry
for (attrib in attribs) {
record[attrib] = attribs[attrib]
}
record['lat'] = lat
record['lon'] = lon
record['species'] = node.node_data['node name']
record['renderSize'] = 10
phylomap.selectedOccurrences.push(record)
}
function findNodeInTreeByNodeId(currentTreeNode, nodeID) {
for (var i = phylomap.allnodelist.length - 1; i >= 0; i--) {
if (phylomap.allnodelist[i].node_data['nodeid'] == nodeID) {
return phylomap.allnodelist[i]
}
}
}
function mapSingleNode(treeNode, rootNode,icon,selectionID) {
var bounds = new google.maps.LatLngBounds();
var name = treeNode.node_data['node name'];
//console.log('map single node of id=',id, treeNode)
// if this node has locations, then add them to the map
if ('loc' in treeNode.node_data) {
for (var i = 0; i< treeNode.node_data['loc'].length; i++) {
var thisloc = treeNode.node_data['loc'][i]
var latlng = new google.maps.LatLng(
parseFloat(thisloc[1]),
parseFloat(thisloc[0]));
var text = "name: " + name + "\nid: " + selectionID + '\n';
// add other attributes to display tag if they are present in the taxon node
var attribs = []
if ('attributes' in treeNode.node_data) {
if (treeNode.node_data['attributes'].length >= i) {
attribs = treeNode.node_data['attributes'][i]
// add descriptions to the text markers
for (var attrib in attribs) {
text = text + ' [' + attrib+']:'+attribs[attrib] + '\n'
};
}
}
createMarker(latlng, name, text, selectionID, icon);
bounds.extend(latlng);
addLocationToSelectedList(treeNode,attribs,thisloc[1],thisloc[0])
};
}
}
// recursive traversal of the current tree to uncover all nodes below the passed node and
// map them. The clade root is passed so highlighting can be performed by lighting nodes between
// the clade root and the current node
// *** had to use _children instead of children because of how the accessor algorithm
// in phylotree re-names the attributes. This search might fail sometimes, so testing
// for valid children references under either name
function mapAllNodesInClade(treeNode, cladeRootNode,icon,selectionID) {
//console.log('mapping everything below node:',treeNode.node_data['nodeid'])
// highlight the path on the tree between the rootId and this node if a valid id was passed
if (treeNode != null) {
var id = cladeRootNode.node_data['nodeid'];
var colorToUse = getIconColor(id)
highlightLimitedPath(treeNode,cladeRootNode,colorToUse)
}
if (('_children' in treeNode) && (treeNode._children.length>0)) {
for (var i = treeNode._children.length - 1; i >= 0; i--) {
mapAllNodesInClade(treeNode._children[i], cladeRootNode,icon,selectionID)
}
} else if (('children' in treeNode) && (treeNode.children.length>0)) {
//console.log('mapAllNodesInClade: traversing -children- attribute to follow clade')
for (var i = treeNode.children.length - 1; i >= 0; i--) {
mapAllNodesInClade(treeNode.children[i], cladeRootNode,icon,selectionID)
}
} else {
// we have reached the bottom of the hierarchy, write out the locations to the map
//
mapSingleNode(treeNode, cladeRootNode,icon,selectionID)
}
}
// This search is used only during highlight of entire clade.
// Processing moved from a service to inline javascript when integrated with Arbor/TangeloHub.
// the data element returned in the clicked node is a <g> element, so we need to look inside its
// '__data__' attribute to find the actual tree node record. This
function searchLocationsNearClade(selectedNode, callback) {
var selectedNodeID = selectedNode.node_data['nodeid']
//console.log("highlight clade below node id",selectedNodeID);
// find the node with the id that matches the one the user clicked on
rootOfClade = findNodeInTreeByNodeId(phylomap.currentTree, selectedNodeID)
// traverse tree recursively, adding all locations in all taxa below this. We create the
// icon here so each selection maps to just one type of icon
var icon = getIcon(selectedNodeID);
mapAllNodesInClade(rootOfClade, rootOfClade, icon, selectedNodeID)
//updateTableDisplay(phylomap.selectedOccurrences)
//updateGeoJSDisplay()
// Candela is too slow to always update automatically, user can invoke a render on demand
//updateCandelaDisplay()
// run the callback if one was passed. Use for setting and clearing processing badge
if (callback != null) callback();
}
function getIcon(nodeid) {
if (typeof iconIndex === "undefined" || iconIndex == null || iconIndex == iconList.length) {
iconIndex = 0;
}
return iconList[iconIndex++];
}
// CRL : since iconIndex is incremented immediately after each use, we handle the wraparound case and
// generate a lagging index value. Don't know why we have to adjust this, thought we caught the index
// before it it was incremeneted
function adjustColorIndex(index) {
if (index>0) {
return (index-1)
}
else {
return (iconList.length-1)
}
}
// CRL: lookup the color of the tree highlight by retrieving the color of the corresponding
// map icon and adjusting it (why adjustment needed?)
function getIconColor(id) {
var colorToUse;
// if this has been called before any markers are created for this node, set the indexes appropriately
if (typeof markerColorIndex[id] == "undefined") {
markerColorIndex[id] = iconIndex;
}
colorToUse = treeHighlightColorList[adjustColorIndex(markerColorIndex[id])];
//console.log("getIconColor: id=",id," markerColorIndex=",markerColorIndex[id]," treeColor=",colorToUse)
return colorToUse
}
function createMarker(latlng, name, text, id, icon) {
var html = "<b>" + name + "</b><br>" + text;
// save the color of this icon in a color index array
markerColorIndex[id] = iconIndex;
//console.log('saving marketColorIndex[',id,'] = ',iconIndex)
icon = ((icon != null) ? icon : getIcon());
var marker = new google.maps.Marker({
map: map,
position: latlng,
icon: icon,
title: text
});
google.maps.event.addListener(marker, 'mouseover', function() {
var node = nodeFromId(id);
// CRL: color highlight path according to icon color
highlightParents(node, getIconColor(id), "3px");
textOn(node[0]);
});
google.maps.event.addListener(marker, 'mouseout', function() {
if (typeof clickedOn === "undefined" || !clickedOn) {
var node = nodeFromId(id);
highlightParents(node, "#ccc", "1.5px");
textOff(node[0], true);
}
});
// enable persistent highlight after click ?
google.maps.event.addListener(marker, 'click', function() {
clickedOn = clickedOn == true ? false : true;
var node = nodeFromId(id);
if (clickedOn) {
infoWindow.setContent(html);
infoWindow.open(map, marker);
// CRL: change so highlights match icon colors
highlightParents(node, getIconColor(id), "3px");
//highlightParents(node, "red", "3px");
textOn(node[0]);
} else {
infoWindow.close();
highlightParents(node, "#ccc", "1.5px");
textOff(node[0], true);
}
});
// store in index the id/markerIndex key/value pair, easier to delete later.
if (typeof markerIndex[id] === "object") {
markerIndex[id].push(markers.length);
} else {
markerIndex[id] = [markers.length];
}
markers.push(marker);
}
function highlightParents (node, color, size) {
// ensure our helper functions were included
if (typeof highlightPath != 'undefined' && typeof nodeFromId != 'undefined') {
if (node[0].length > 0) {
highlightPath(node.datum(), color, size);
}
}
}
var map;
var clickedOn;
var mapOptions;
var overlays = [];
var markers = [];
var markerColorIndex = [];
var markerIndex = {};
var infoWindow;
var locationSelect;
var phylotree = {};
var iconIndex = 0;
var iconList = [
'http://maps.google.com/mapfiles/ms/icons/red-dot.png',
'http://maps.google.com/mapfiles/ms/icons/blue-dot.png',
'http://maps.google.com/mapfiles/ms/icons/green-dot.png',
'http://maps.google.com/mapfiles/ms/icons/orange-dot.png',
'http://maps.google.com/mapfiles/ms/icons/pink-dot.png',
'http://maps.google.com/mapfiles/ms/icons/ltblue-dot.png',
'http://maps.google.com/mapfiles/ms/icons/purple-dot.png',
'http://maps.google.com/mapfiles/ms/icons/yellow-dot.png',
'http://maps.google.com/mapfiles/ms/icons/red-pushpin.png',
'http://maps.google.com/mapfiles/ms/icons/blue-pushpin.png',
'http://maps.google.com/mapfiles/ms/icons/grn-pushpin.png',
'http://maps.google.com/mapfiles/ms/icons/pink-pushpin.png',
'http://maps.google.com/mapfiles/ms/icons/ltblu-pushpin.png',
'http://maps.google.com/mapfiles/ms/icons/purple-pushpin.png',
'http://maps.google.com/mapfiles/ms/icons/ylw-pushpin.png',
]
// adding matching hightlight colors to match the icons, so the tree hightlight color matches the icon color
var treeHighlightColorList = [ "red","blue","green","orange","pink","lightblue","purple","yellow","red","blue","green","pink","lightblue","purple","yellow"]
//function load() {
addLoadEvent(function () {
mapOptions = {
//center: new google.maps.LatLng(18.994609, -71.345215),
//zoom: 6,
center: new google.maps.LatLng(9.65, -83.0),
zoom: 5,
mapTypeId: google.maps.MapTypeId.ROADMAP
};
map = new google.maps.Map(d3.select("#map_canvas").node(),
mapOptions);
var drawingManager = new google.maps.drawing.DrawingManager({
drawingMode: google.maps.drawing.OverlayType.MARKER,
drawingControl: true,
drawingControlOptions: {
position: google.maps.ControlPosition.TOP_CENTER,
drawingModes: ['marker', 'circle', 'rectangle']
},
markerOptions: {icon: 'https://developers.google.com/maps/documentation/javascript/examples/full/images/beachflag.png'},
});
drawingManager.setMap(map);
google.maps.event.addListener(drawingManager, 'overlaycomplete', function(event) {
var searchUrl;
// create the restful style url to load data. The named arguments we used to use had to be
// replaced by positional arguments with a later version of Tangelo. Notice the /rect/id/long, etc.
// using up positions in the URL since the names of the arguments didn't work anymore.
if (event.type == google.maps.drawing.OverlayType.CIRCLE) {
searchLocationsNearCircle(
event.overlay.getCenter().lat(),
event.overlay.getCenter().lng(),
event.overlay.getRadius())
} else if (event.type == google.maps.drawing.OverlayType.RECTANGLE) {
searchLocationsNearRect(
event.overlay.bounds.getSouthWest().lng(),
event.overlay.bounds.getSouthWest().lat(),
event.overlay.bounds.getNorthEast().lng(),
event.overlay.bounds.getNorthEast().lat())
}
overlays.push(event.overlay);
});
var mylatlng = new google.maps.LatLng(-25.363882,131.044922);
infoWindow = new google.maps.InfoWindow();
});
function old_updateGeoJSDisplay() {
var spec = {
center: {
x: 0,
y: 0
},
zoom: 4,
layers: [{
renderer: 'vgl',
features: [{
type: 'point',
size: function (d) { return 5; },
position: function (d) { return {x: d.lon, y: d.lat}; },
fill: true,
fillColor: function (d) { return 'red'; },
fillOpacity: function (d) { return 0.5 ; },
stroke: true,
strokeColor: function (d) { return 'black'; },
strokeOpacity: 1,
strokeWidth: 2
}]
}]
};
spec.data = phylomap.selectedOccurrences;
return $('#geojs_map_canvas').geojsMap(spec);
}
//------------------------------------
// this function looks at the first entry in the selection list and makes an entry for each
// attribute so it could be chosen as the one to color the occurrences by
function fillAttributeSelector() {
var sample = phylomap.selectedOccurrences[0]
var list = []
for (attrib in sample) {
list.push(attrib)
}
d3.select("#geojs_attribute").selectAll("option").remove();
d3.select("#geojs_attribute").selectAll("option")
.data(list)
.enter().append("option")
.text(function (d) { return d; });
}
function returnDataAsText(p) {
var text = ''
for (var attrib in p) {
if (attrib.length>0) {
text += attrib+':'+p[attrib]+'\n'
}
}
return text
}
// #bb5a00 - reddish/brows for low values
// #ffffff - white for top values
function geojs_addVectorLayer(points) {
//console.log(points,"\n");
var markers = phylomap.geojsmap.map.createLayer("feature",{"renderer":"vgl"})
var uiLayer = phylomap.geojsmap.map.createLayer('ui', {"renderer":"vgl"});
var tooltip = uiLayer.createWidget('dom', {position: {x: 0, y: 0}});
tooltipElem = $(tooltip.canvas()).attr('id', 'tooltip').addClass('hidden');
// Add a vector layer to the map. Fill the layer with all the points that are currently selected
for (var i = 0; i < points.length; i++) {
//console.log(points[0])
var lng_float = points[i]['lon']
var lat_float = points[i]['lat']
var pointText = returnDataAsText(points[i])
// add a point to the d3 layer
markers.createFeature("point",{selectionAPI:true})
.data([{x:lng_float, y:lat_float,text:pointText }])
.position(function(d) { return {x: d.x, y: d.y};} )
.style("fillColor", function(d) { return {r: 0, g: 1, b: 0};})
.style('strokeColor', 'black')
.geoOn(geo.event.feature.mouseclick, function (evt) {
console.log(evt)
phylomap.geojsmap.map.center({x: evt.data.x, y: evt.data.y});
})
.geoOn(geo.event.feature.mouseover, function (evt) {
this.modified();
markers.map().draw();
//tooltip.position({x: evt.data.x+0.015, y: evt.data.y+0.015});
tooltip.position({x: evt.data.x+0.0015, y: evt.data.y+0.0015});
tooltipElem.text(' '+evt.data.text);
tooltipElem.removeClass('hidden');
})
.geoOn(geo.event.feature.mouseout, function (evt) {
// evt.data.opacity = 1.0;
// evt.data.strokeOpacity = 1.0;
// this.modified();
//markers.map().draw();
tooltipElem.addClass('hidden');
})
.style('fillOpacity', 1.0)
.style('strokeOpacity', 1.0)
}
// save markers layer globally
phylomap.geojsmap.markers = markers
phylomap.geojsmap.map.draw();
}
// this function loops through all of the occurrence points and assigns colors depending on the value
// of the individual occurence point within the range across all the points
function updateOccurrencePointColors() {
var minRed = 160.0/256.0
var minGreen = 80.0/256.0
// find out which attribute has been selected
var attribSelector = d3.select("#geojs_attribute").node();
var selectedAttrib = attribSelector.options[attribSelector.selectedIndex].text;
console.log('selected attrib is:',selectedAttrib)
candela_addGeoDots(phylomap.selectedOccurrences,selectedAttrib)
}
function geojs_resize() {
phylomap.geojsmap.map.resize(0, 0, $('#geojs_map_canvas').width()*0.9, $('#geojs_map_canvas').height());
}
function geojs_addBaseLayer() {
var map;
// clear out the old map
$('#geojs_map_canvas').empty()
map = geo.map({
node: '#geojs_map_canvas',
zoom: 2
});
map.createLayer('osm');
phylomap.geojsmap.map = map;
}
// this function is called as soon as the page is finished loading
function updateGeoJSDisplay() {
phylomap.geojsmap = {}
phylomap.geojsmap.map = null
phylomap.geojsmap.markers = null
phylomap.geojsmap.map = null
phylomap.geojsmap.markers = null
phylomap.geojsmap.previouscolor = null
//Proj4js.defs["EPSG:4326"] = "+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs";
//Proj4js.defs["EPSG:3031"] = "+proj=stere +lat_0=-90 +lat_ts=-71 +lon_0=0 +k=1 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs";
//Proj4js.defs["EPSG:900913"] = "+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null +no_defs";
$(window).resize(geojs_resize);
fillAttributeSelector();
d3.select("#geojs_attribute")
.on("change", updateOccurrencePointColors);
geojs_addBaseLayer();
geojs_resize();
geojs_addVectorLayer(phylomap.selectedOccurrences);
}
// ---------- Candela plotting functions
// this function is called as soon as the page is finished loading
function updateCandelaDisplay() {
fillAttributeSelector();
d3.select("#geojs_attribute")
.on("change", updateOccurrencePointColors);
//geojs_addBaseLayer();
//candela_resize();
candela_addGeoDots(phylomap.selectedOccurrences);
}
function candela_resize() {
phylomap.geojsmap.map.resize(0, 0, $('#geojs_map_canvas').width()*0.7, $('#geojs_map_canvas').height());
}
function candela_addGeoDots(points, attrib='Poll') {
//console.log('geodots:',points,"\n");
//var markers = phylomap.geojsmap.map.createLayer("feature",{"renderer":"vgl"})
//var uiLayer = phylomap.geojsmap.map.createLayer('ui', {"renderer":"vgl"});
//var tooltip = uiLayer.createWidget('dom', {position: {x: 0, y: 0}});
//tooltipElem = $(tooltip.canvas()).attr('id', 'tooltip').addClass('hidden');
// Add a vector layer to the map. Fill the layer with all the points that are currently selected
$("#candela_map_canvas").empty();
var el = document.getElementById('candela_map_canvas')
el.style.width = '900px';
el.style.height = '1000px';
document.body.appendChild(el);
phylomap.candela_map = new candela.components.GeoDots(el, {
zoom: 7,
center: {
longitude: -82.948, | },
data: points,
width: 1000,
height: 700,
latitude: 'lat',
longitude: 'lon',
size: 'renderSize',
//tileUrl: 'http://c.tiles.wmflabs.org/hillshading/${z}/${x}/${y}.png',
//tileUrl: 'http://tile.stamen.com/terrain/${z}/${x}/${y}.jpg',
tileUrl: 'https://{s}.tile.thunderforest.com/landscape/{z}/{x}/{y}.png?apikey=6548db9e547c4c5eacc2304ee947ebbe',
color: attrib
});
phylomap.candela_map.render();
} | latitude: 9.9725 | random_line_split |
update-tuple.spec.tsx | import app, { Component, Update } from '../src/apprun';
describe('Component', () => {
it('should support non-event-typed update tuple', () => {
class | extends Component {
state = 0;
update = [
['+1', state => ++state, { once: true }],
['+1a', state => ++state],
];
}
const t = new Test().start() as any;
t.run('+1');
t.run('+1');
t.run('+1a');
expect(t.state).toEqual(2);
})
it('should support state-typed update tuple and event alias', () => {
class Test extends Component {
state = 0;
update: Update<number> = [
['method1, method2', state => ++state]
];
}
const t = new Test().start() as any;
t.run('method1');
t.run('method2');
expect(t.state).toEqual(2);
})
it('should support event-typed update tuple', () => {
type Events = '+1-once' | '+1';
class Test extends Component<number, Events> {
state = 0;
update: Update<number, Events> = [
['+1-once', state => ++state, { once: true }],
['+1', state => ++state],
];
}
const t = new Test().start() as any;
t.run('+1-once');
t.run('+1-once');
t.run('+1');
expect(t.state).toEqual(2);
})
}) | Test | identifier_name |
update-tuple.spec.tsx | import app, { Component, Update } from '../src/apprun';
describe('Component', () => {
it('should support non-event-typed update tuple', () => {
class Test extends Component {
state = 0;
update = [
['+1', state => ++state, { once: true }],
['+1a', state => ++state],
];
}
const t = new Test().start() as any;
t.run('+1');
t.run('+1');
t.run('+1a');
expect(t.state).toEqual(2);
})
it('should support state-typed update tuple and event alias', () => {
class Test extends Component {
state = 0;
update: Update<number> = [
['method1, method2', state => ++state]
];
}
const t = new Test().start() as any;
t.run('method1'); |
type Events = '+1-once' | '+1';
class Test extends Component<number, Events> {
state = 0;
update: Update<number, Events> = [
['+1-once', state => ++state, { once: true }],
['+1', state => ++state],
];
}
const t = new Test().start() as any;
t.run('+1-once');
t.run('+1-once');
t.run('+1');
expect(t.state).toEqual(2);
})
}) | t.run('method2');
expect(t.state).toEqual(2);
})
it('should support event-typed update tuple', () => { | random_line_split |
style.ts | import styled, { css } from 'styled-components';
import { centerIcon } from '~/renderer/mixins';
import {
TOOLBAR_BUTTON_WIDTH,
TOOLBAR_BUTTON_HEIGHT,
} from '~/constants/design';
import { ITheme } from '~/interfaces';
export const Icon = styled.div`
width: 100%;
height: 100%;
will-change: background-image;
transition: 0.15s background-image;
backface-visibility: hidden;
${({
size,
disabled,
opacity,
autoInvert,
theme,
dense,
}: {
size: number;
disabled: boolean;
opacity: number;
autoInvert?: boolean;
dense?: boolean;
theme?: ITheme;
}) => css`
${centerIcon(size)};
opacity: ${disabled ? 0.25 : opacity};
filter: ${autoInvert && theme['toolbar.lightForeground']
? 'invert(100%)'
: 'none'};
`};
`;
export const Button = styled.div`
border-radius: 2px;
position: relative;
transition: 0.2s background-color;
backface-visibility: hidden;
margin: 0 1px;
${({
theme,
toggled,
disabled,
dense,
}: {
theme: ITheme;
toggled: boolean;
disabled: boolean;
dense: boolean;
}) => css`
border-radius: ${dense ? 2 : 4}px;
height: ${dense ? 26 : TOOLBAR_BUTTON_HEIGHT}px;
min-width: ${dense ? 34 : TOOLBAR_BUTTON_WIDTH}px;
pointer-events: ${disabled ? 'none' : 'inherit'};
-webkit-app-region: ${disabled ? 'drag' : 'no-drag'};
background-color: ${toggled
? theme['toolbar.lightForeground']
? 'rgba(255, 255, 255, 0.12)'
: 'rgba(0, 0, 0, 0.1)'
: 'none'};
&:active {
background-color: ${theme['toolbar.lightForeground']
? 'rgba(255, 255, 255, 0.12)'
: 'rgba(0, 0, 0, 0.1)'} !important;
}
${!toggled &&
css`
&:hover {
background-color: ${theme['toolbar.lightForeground']
? 'rgba(255, 255, 255, 0.08)'
: 'rgba(0, 0, 0, 0.06)'};
}
`};
`};
`;
interface BadgeProps {
background: string;
color: string;
right: number;
top: number;
}
export const Badge = styled.div`
position: absolute;
padding: 1px 3px;
border-radius: 8px;
min-height: 6px;
pointer-events: none;
z-index: 5;
font-size: 8px;
${({ background, color, top, right }: BadgeProps) => css`
background-color: ${background};
color: ${color};
right: ${right}px;
top: ${top}px;
`};
`;
export const PreloaderBg = styled.div`
width: 32px;
height: 32px;
pointer-events: none;
position: absolute;
left: 50%;
top: 50%;
transform: translate(-50%, -50%);
border-radius: 50%; | ? 'rgba(255, 255, 255, 0.1)'
: 'rgba(0, 0, 0, 0.06)'};
`};
`; |
${({ theme }: { theme: ITheme }) => css`
border: 3px solid
${theme['toolbar.lightForeground'] | random_line_split |
GeopointInput.js | import PropTypes from 'prop-types'
import React from 'react'
import config from 'config:@lyra/google-maps-input'
import Button from 'part:@lyra/components/buttons/default'
import Dialog from 'part:@lyra/components/dialogs/default'
import Fieldset from 'part:@lyra/components/fieldsets/default'
import {
PatchEvent,
set,
setIfMissing,
unset
} from 'part:@lyra/form-builder/patch-event'
import styles from '../styles/GeopointInput.css'
import GeopointSelect from './GeopointSelect'
import GoogleMapsLoadProxy from './GoogleMapsLoadProxy'
const getLocale = context => {
const intl = context.intl || {}
return (
intl.locale ||
(typeof window !== 'undefined' && window.navigator.language) ||
'en'
)
}
const getStaticImageUrl = value => {
const loc = `${value.lat},${value.lng}`
const params = {
key: config.apiKey,
center: loc,
markers: loc,
zoom: 13,
scale: 2,
size: '640x300'
}
const qs = Object.keys(params).reduce((res, param) => {
return res.concat(`${param}=${encodeURIComponent(params[param])}`)
}, [])
return `https://maps.googleapis.com/maps/api/staticmap?${qs.join('&')}`
}
class GeopointInput extends React.Component {
static propTypes = {
onChange: PropTypes.func.isRequired,
markers: PropTypes.arrayOf(
PropTypes.shape({
type: PropTypes.string
})
),
value: PropTypes.shape({
lat: PropTypes.number,
lng: PropTypes.number
}),
type: PropTypes.shape({
title: PropTypes.string.isRequired,
description: PropTypes.string
})
}
static defaultProps = {
markers: []
}
static contextTypes = {
intl: PropTypes.shape({
locale: PropTypes.string
})
}
constructor() {
super()
this.handleToggleModal = this.handleToggleModal.bind(this)
this.handleCloseModal = this.handleCloseModal.bind(this)
this.state = {
modalOpen: false
}
}
handleToggleModal() {
this.setState(prevState => ({modalOpen: !prevState.modalOpen}))
}
handleChange = latLng => {
const {type, onChange} = this.props
onChange(
PatchEvent.from([
setIfMissing({
_type: type.name
}),
set(latLng.lat(), ['lat']),
set(latLng.lng(), ['lng'])
])
)
}
handleClear = () => {
const {onChange} = this.props
onChange(PatchEvent.from(unset()))
}
handleCloseModal() {
this.setState({modalOpen: false})
}
| () {
const {value, type, markers} = this.props
if (!config || !config.apiKey) {
return (
<div>
<p>
The{' '}
<a href="https://vegapublish.com/docs/schema-types/geopoint-type">
Geopoint type
</a>{' '}
needs a Google Maps API key with access to:
</p>
<ul>
<li>Google Maps JavaScript API</li>
<li>Google Places API Web Service</li>
<li>Google Static Maps API</li>
</ul>
<p>
Please enter the API key with access to these services in
<code style={{whitespace: 'nowrap'}}>
`<project-root>/config/@lyra/google-maps-input.json`
</code>
</p>
</div>
)
}
return (
<Fieldset
legend={type.title}
description={type.description}
className={styles.root}
markers={markers}
>
{value && (
<div>
<img
className={styles.previewImage}
src={getStaticImageUrl(value)}
/>
</div>
)}
<div className={styles.functions}>
<Button onClick={this.handleToggleModal}>
{value ? 'Edit' : 'Set location'}
</Button>
{value && (
<Button type="button" onClick={this.handleClear}>
Remove
</Button>
)}
</div>
{this.state.modalOpen && (
<Dialog
title="Place on map"
onClose={this.handleCloseModal}
onCloseClick={this.handleCloseModal}
onOpen={this.handleOpenModal}
message="Select location by dragging the marker or search for a place"
isOpen={this.state.modalOpen}
>
<div className={styles.dialogInner}>
<GoogleMapsLoadProxy
value={value}
apiKey={config.apiKey}
onChange={this.handleChange}
defaultLocation={config.defaultLocation}
defaultZoom={config.defaultZoom}
locale={getLocale(this.context)}
component={GeopointSelect}
/>
</div>
</Dialog>
)}
</Fieldset>
)
}
}
export default GeopointInput
| render | identifier_name |
GeopointInput.js | import PropTypes from 'prop-types'
import React from 'react'
import config from 'config:@lyra/google-maps-input'
import Button from 'part:@lyra/components/buttons/default'
import Dialog from 'part:@lyra/components/dialogs/default'
import Fieldset from 'part:@lyra/components/fieldsets/default'
import {
PatchEvent,
set,
setIfMissing,
unset
} from 'part:@lyra/form-builder/patch-event'
import styles from '../styles/GeopointInput.css'
import GeopointSelect from './GeopointSelect'
import GoogleMapsLoadProxy from './GoogleMapsLoadProxy'
const getLocale = context => {
const intl = context.intl || {}
return (
intl.locale ||
(typeof window !== 'undefined' && window.navigator.language) ||
'en'
)
}
const getStaticImageUrl = value => {
const loc = `${value.lat},${value.lng}`
const params = {
key: config.apiKey,
center: loc,
markers: loc,
zoom: 13,
scale: 2,
size: '640x300'
}
const qs = Object.keys(params).reduce((res, param) => {
return res.concat(`${param}=${encodeURIComponent(params[param])}`)
}, [])
return `https://maps.googleapis.com/maps/api/staticmap?${qs.join('&')}`
}
class GeopointInput extends React.Component {
static propTypes = {
onChange: PropTypes.func.isRequired,
markers: PropTypes.arrayOf(
PropTypes.shape({
type: PropTypes.string
})
),
value: PropTypes.shape({
lat: PropTypes.number,
lng: PropTypes.number
}),
type: PropTypes.shape({
title: PropTypes.string.isRequired,
description: PropTypes.string
})
}
static defaultProps = {
markers: []
}
static contextTypes = {
intl: PropTypes.shape({
locale: PropTypes.string
})
}
constructor() {
super()
this.handleToggleModal = this.handleToggleModal.bind(this)
this.handleCloseModal = this.handleCloseModal.bind(this)
this.state = {
modalOpen: false
}
}
handleToggleModal() {
this.setState(prevState => ({modalOpen: !prevState.modalOpen}))
}
handleChange = latLng => {
const {type, onChange} = this.props
onChange(
PatchEvent.from([
setIfMissing({
_type: type.name
}),
set(latLng.lat(), ['lat']),
set(latLng.lng(), ['lng'])
])
)
}
handleClear = () => {
const {onChange} = this.props
onChange(PatchEvent.from(unset()))
}
handleCloseModal() {
this.setState({modalOpen: false})
}
render() {
const {value, type, markers} = this.props
if (!config || !config.apiKey) |
return (
<Fieldset
legend={type.title}
description={type.description}
className={styles.root}
markers={markers}
>
{value && (
<div>
<img
className={styles.previewImage}
src={getStaticImageUrl(value)}
/>
</div>
)}
<div className={styles.functions}>
<Button onClick={this.handleToggleModal}>
{value ? 'Edit' : 'Set location'}
</Button>
{value && (
<Button type="button" onClick={this.handleClear}>
Remove
</Button>
)}
</div>
{this.state.modalOpen && (
<Dialog
title="Place on map"
onClose={this.handleCloseModal}
onCloseClick={this.handleCloseModal}
onOpen={this.handleOpenModal}
message="Select location by dragging the marker or search for a place"
isOpen={this.state.modalOpen}
>
<div className={styles.dialogInner}>
<GoogleMapsLoadProxy
value={value}
apiKey={config.apiKey}
onChange={this.handleChange}
defaultLocation={config.defaultLocation}
defaultZoom={config.defaultZoom}
locale={getLocale(this.context)}
component={GeopointSelect}
/>
</div>
</Dialog>
)}
</Fieldset>
)
}
}
export default GeopointInput
| {
return (
<div>
<p>
The{' '}
<a href="https://vegapublish.com/docs/schema-types/geopoint-type">
Geopoint type
</a>{' '}
needs a Google Maps API key with access to:
</p>
<ul>
<li>Google Maps JavaScript API</li>
<li>Google Places API Web Service</li>
<li>Google Static Maps API</li>
</ul>
<p>
Please enter the API key with access to these services in
<code style={{whitespace: 'nowrap'}}>
`<project-root>/config/@lyra/google-maps-input.json`
</code>
</p>
</div>
)
} | conditional_block |
GeopointInput.js | import PropTypes from 'prop-types'
import React from 'react'
import config from 'config:@lyra/google-maps-input'
import Button from 'part:@lyra/components/buttons/default'
import Dialog from 'part:@lyra/components/dialogs/default'
import Fieldset from 'part:@lyra/components/fieldsets/default'
import {
PatchEvent,
set,
setIfMissing,
unset
} from 'part:@lyra/form-builder/patch-event'
import styles from '../styles/GeopointInput.css'
import GeopointSelect from './GeopointSelect'
import GoogleMapsLoadProxy from './GoogleMapsLoadProxy'
const getLocale = context => {
const intl = context.intl || {}
return (
intl.locale ||
(typeof window !== 'undefined' && window.navigator.language) ||
'en'
)
}
const getStaticImageUrl = value => {
const loc = `${value.lat},${value.lng}`
const params = {
key: config.apiKey,
center: loc,
markers: loc,
zoom: 13,
scale: 2,
size: '640x300'
}
const qs = Object.keys(params).reduce((res, param) => {
return res.concat(`${param}=${encodeURIComponent(params[param])}`)
}, [])
return `https://maps.googleapis.com/maps/api/staticmap?${qs.join('&')}`
}
class GeopointInput extends React.Component {
static propTypes = {
onChange: PropTypes.func.isRequired,
markers: PropTypes.arrayOf(
PropTypes.shape({
type: PropTypes.string
})
),
value: PropTypes.shape({
lat: PropTypes.number,
lng: PropTypes.number
}),
type: PropTypes.shape({
title: PropTypes.string.isRequired,
description: PropTypes.string
})
}
static defaultProps = {
markers: []
}
static contextTypes = {
intl: PropTypes.shape({
locale: PropTypes.string
})
}
constructor() {
super()
this.handleToggleModal = this.handleToggleModal.bind(this)
this.handleCloseModal = this.handleCloseModal.bind(this)
this.state = {
modalOpen: false
}
}
handleToggleModal() |
handleChange = latLng => {
const {type, onChange} = this.props
onChange(
PatchEvent.from([
setIfMissing({
_type: type.name
}),
set(latLng.lat(), ['lat']),
set(latLng.lng(), ['lng'])
])
)
}
handleClear = () => {
const {onChange} = this.props
onChange(PatchEvent.from(unset()))
}
handleCloseModal() {
this.setState({modalOpen: false})
}
render() {
const {value, type, markers} = this.props
if (!config || !config.apiKey) {
return (
<div>
<p>
The{' '}
<a href="https://vegapublish.com/docs/schema-types/geopoint-type">
Geopoint type
</a>{' '}
needs a Google Maps API key with access to:
</p>
<ul>
<li>Google Maps JavaScript API</li>
<li>Google Places API Web Service</li>
<li>Google Static Maps API</li>
</ul>
<p>
Please enter the API key with access to these services in
<code style={{whitespace: 'nowrap'}}>
`<project-root>/config/@lyra/google-maps-input.json`
</code>
</p>
</div>
)
}
return (
<Fieldset
legend={type.title}
description={type.description}
className={styles.root}
markers={markers}
>
{value && (
<div>
<img
className={styles.previewImage}
src={getStaticImageUrl(value)}
/>
</div>
)}
<div className={styles.functions}>
<Button onClick={this.handleToggleModal}>
{value ? 'Edit' : 'Set location'}
</Button>
{value && (
<Button type="button" onClick={this.handleClear}>
Remove
</Button>
)}
</div>
{this.state.modalOpen && (
<Dialog
title="Place on map"
onClose={this.handleCloseModal}
onCloseClick={this.handleCloseModal}
onOpen={this.handleOpenModal}
message="Select location by dragging the marker or search for a place"
isOpen={this.state.modalOpen}
>
<div className={styles.dialogInner}>
<GoogleMapsLoadProxy
value={value}
apiKey={config.apiKey}
onChange={this.handleChange}
defaultLocation={config.defaultLocation}
defaultZoom={config.defaultZoom}
locale={getLocale(this.context)}
component={GeopointSelect}
/>
</div>
</Dialog>
)}
</Fieldset>
)
}
}
export default GeopointInput
| {
this.setState(prevState => ({modalOpen: !prevState.modalOpen}))
} | identifier_body |
GeopointInput.js | import PropTypes from 'prop-types'
import React from 'react'
import config from 'config:@lyra/google-maps-input'
import Button from 'part:@lyra/components/buttons/default'
import Dialog from 'part:@lyra/components/dialogs/default'
import Fieldset from 'part:@lyra/components/fieldsets/default'
import {
PatchEvent,
set,
setIfMissing,
unset
} from 'part:@lyra/form-builder/patch-event'
import styles from '../styles/GeopointInput.css'
import GeopointSelect from './GeopointSelect' | import GoogleMapsLoadProxy from './GoogleMapsLoadProxy'
const getLocale = context => {
const intl = context.intl || {}
return (
intl.locale ||
(typeof window !== 'undefined' && window.navigator.language) ||
'en'
)
}
const getStaticImageUrl = value => {
const loc = `${value.lat},${value.lng}`
const params = {
key: config.apiKey,
center: loc,
markers: loc,
zoom: 13,
scale: 2,
size: '640x300'
}
const qs = Object.keys(params).reduce((res, param) => {
return res.concat(`${param}=${encodeURIComponent(params[param])}`)
}, [])
return `https://maps.googleapis.com/maps/api/staticmap?${qs.join('&')}`
}
class GeopointInput extends React.Component {
static propTypes = {
onChange: PropTypes.func.isRequired,
markers: PropTypes.arrayOf(
PropTypes.shape({
type: PropTypes.string
})
),
value: PropTypes.shape({
lat: PropTypes.number,
lng: PropTypes.number
}),
type: PropTypes.shape({
title: PropTypes.string.isRequired,
description: PropTypes.string
})
}
static defaultProps = {
markers: []
}
static contextTypes = {
intl: PropTypes.shape({
locale: PropTypes.string
})
}
constructor() {
super()
this.handleToggleModal = this.handleToggleModal.bind(this)
this.handleCloseModal = this.handleCloseModal.bind(this)
this.state = {
modalOpen: false
}
}
handleToggleModal() {
this.setState(prevState => ({modalOpen: !prevState.modalOpen}))
}
handleChange = latLng => {
const {type, onChange} = this.props
onChange(
PatchEvent.from([
setIfMissing({
_type: type.name
}),
set(latLng.lat(), ['lat']),
set(latLng.lng(), ['lng'])
])
)
}
handleClear = () => {
const {onChange} = this.props
onChange(PatchEvent.from(unset()))
}
handleCloseModal() {
this.setState({modalOpen: false})
}
render() {
const {value, type, markers} = this.props
if (!config || !config.apiKey) {
return (
<div>
<p>
The{' '}
<a href="https://vegapublish.com/docs/schema-types/geopoint-type">
Geopoint type
</a>{' '}
needs a Google Maps API key with access to:
</p>
<ul>
<li>Google Maps JavaScript API</li>
<li>Google Places API Web Service</li>
<li>Google Static Maps API</li>
</ul>
<p>
Please enter the API key with access to these services in
<code style={{whitespace: 'nowrap'}}>
`<project-root>/config/@lyra/google-maps-input.json`
</code>
</p>
</div>
)
}
return (
<Fieldset
legend={type.title}
description={type.description}
className={styles.root}
markers={markers}
>
{value && (
<div>
<img
className={styles.previewImage}
src={getStaticImageUrl(value)}
/>
</div>
)}
<div className={styles.functions}>
<Button onClick={this.handleToggleModal}>
{value ? 'Edit' : 'Set location'}
</Button>
{value && (
<Button type="button" onClick={this.handleClear}>
Remove
</Button>
)}
</div>
{this.state.modalOpen && (
<Dialog
title="Place on map"
onClose={this.handleCloseModal}
onCloseClick={this.handleCloseModal}
onOpen={this.handleOpenModal}
message="Select location by dragging the marker or search for a place"
isOpen={this.state.modalOpen}
>
<div className={styles.dialogInner}>
<GoogleMapsLoadProxy
value={value}
apiKey={config.apiKey}
onChange={this.handleChange}
defaultLocation={config.defaultLocation}
defaultZoom={config.defaultZoom}
locale={getLocale(this.context)}
component={GeopointSelect}
/>
</div>
</Dialog>
)}
</Fieldset>
)
}
}
export default GeopointInput | random_line_split | |
translate-text.service.spec.ts | // Copyright 2020 The Oppia Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* @fileoverview Tests for translate-text service.
*/
require(
'pages/contributor-dashboard-page/services/translate-text.service.ts');
describe('TranslateTextService', function() {
let TranslateTextService;
let $httpBackend;
beforeEach(angular.mock.module('oppia'));
beforeEach(angular.mock.inject(function($injector, $q) {
TranslateTextService = $injector.get('TranslateTextService');
$httpBackend = $injector.get('$httpBackend');
}));
afterEach(function() {
$httpBackend.verifyNoOutstandingExpectation();
$httpBackend.verifyNoOutstandingRequest();
});
describe('getTextToTranslate', function() {
it('should return all texts per state', function() {
let textAndAvailability;
$httpBackend.expect(
'GET', '/gettranslatabletexthandler?exp_id=1&language_code=en')
.respond({
state_names_to_content_id_mapping: {
stateName1: {contentId1: 'text1', contentId2: 'text2'},
stateName2: {contentId3: 'text3'}
},
version: 1
});
TranslateTextService.init('1', 'en', () => {});
$httpBackend.flush();
const expectedTextAndAvailability1 = {
text: 'text2',
more: true
};
textAndAvailability = TranslateTextService.getTextToTranslate();
expect(textAndAvailability).toEqual(expectedTextAndAvailability1);
const expectedTextAndAvailability2 = {
text: 'text1',
more: true
};
textAndAvailability = TranslateTextService.getTextToTranslate();
expect(textAndAvailability).toEqual(expectedTextAndAvailability2);
const expectedTextAndAvailability3 = {
text: 'text3',
more: false
};
textAndAvailability = TranslateTextService.getTextToTranslate();
expect(textAndAvailability).toEqual(expectedTextAndAvailability3);
});
it('should return no more available for states with no texts', function() {
const expectedTextAndAvailability = {
text: 'text1',
more: false
};
$httpBackend.expect(
'GET', '/gettranslatabletexthandler?exp_id=1&language_code=en')
.respond({
state_names_to_content_id_mapping: {
stateName1: {contentId1: 'text1'},
stateName2: {contentId2: ''}
},
version: 1
}); | const textAndAvailability = TranslateTextService.getTextToTranslate();
expect(textAndAvailability).toEqual(expectedTextAndAvailability);
});
it('should return {null, False} for completely empty states', function() {
const expectedTextAndAvailability = {
text: null,
more: false
};
$httpBackend.expect(
'GET', '/gettranslatabletexthandler?exp_id=1&language_code=en')
.respond({
state_names_to_content_id_mapping: {
stateName1: {contentId1: ''},
stateName2: {contentId2: ''}
},
version: 1
});
TranslateTextService.init('1', 'en', () => {});
$httpBackend.flush();
const textAndAvailability = TranslateTextService.getTextToTranslate();
expect(textAndAvailability).toEqual(expectedTextAndAvailability);
});
});
}); | TranslateTextService.init('1', 'en', () => {});
$httpBackend.flush();
| random_line_split |
view_utils.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {assertDataInRange, assertDefined, assertGreaterThan, assertLessThan} from '../../util/assert';
import {LContainer, TYPE} from '../interfaces/container';
import {LContext, MONKEY_PATCH_KEY_NAME} from '../interfaces/context';
import {ComponentDef, DirectiveDef} from '../interfaces/definition';
import {TNode, TNodeFlags} from '../interfaces/node';
import {RNode} from '../interfaces/renderer';
import {StylingContext} from '../interfaces/styling';
import {FLAGS, HEADER_OFFSET, HOST, LView, LViewFlags, PARENT, PREORDER_HOOK_FLAGS, TData, TVIEW} from '../interfaces/view';
/**
* For efficiency reasons we often put several different data types (`RNode`, `LView`, `LContainer`,
* `StylingContext`) in same location in `LView`. This is because we don't want to pre-allocate
* space for it because the storage is sparse. This file contains utilities for dealing with such
* data types.
*
* How do we know what is stored at a given location in `LView`.
* - `Array.isArray(value) === false` => `RNode` (The normal storage value)
* - `Array.isArray(value) === true` => then the `value[0]` represents the wrapped value.
* - `typeof value[TYPE] === 'object'` => `LView`
* - This happens when we have a component at a given location
* - `typeof value[TYPE] === 'number'` => `StylingContext`
* - This happens when we have style/class binding at a given location.
* - `typeof value[TYPE] === true` => `LContainer`
* - This happens when we have `LContainer` binding at a given location.
*
*
* NOTE: it is assumed that `Array.isArray` and `typeof` operations are very efficient.
*/
/**
* Returns `RNode`.
* @param value wrapped value of `RNode`, `LView`, `LContainer`, `StylingContext`
*/
export function unwrapRNode(value: RNode | LView | LContainer | StylingContext): RNode {
while (Array.isArray(value)) {
value = value[HOST] as any;
}
return value as RNode;
}
/**
* Returns `LView` or `null` if not found.
* @param value wrapped value of `RNode`, `LView`, `LContainer`, `StylingContext`
*/
export function unwrapLView(value: RNode | LView | LContainer | StylingContext): LView|null {
while (Array.isArray(value)) {
// This check is same as `isLView()` but we don't call at as we don't want to call
// `Array.isArray()` twice and give JITer more work for inlining.
if (typeof value[TYPE] === 'object') return value as LView;
value = value[HOST] as any;
}
return null;
}
/**
* Returns `LContainer` or `null` if not found.
* @param value wrapped value of `RNode`, `LView`, `LContainer`, `StylingContext`
*/
export function unwrapLContainer(value: RNode | LView | LContainer | StylingContext): LContainer|
null {
while (Array.isArray(value)) {
// This check is same as `isLContainer()` but we don't call at as we don't want to call
// `Array.isArray()` twice and give JITer more work for inlining.
if (value[TYPE] === true) return value as LContainer;
value = value[HOST] as any;
}
return null;
}
/**
* Returns `StylingContext` or `null` if not found.
* @param value wrapped value of `RNode`, `LView`, `LContainer`, `StylingContext`
*/
export function | (value: RNode | LView | LContainer | StylingContext):
StylingContext|null {
while (Array.isArray(value)) {
// This check is same as `isStylingContext()` but we don't call at as we don't want to call
// `Array.isArray()` twice and give JITer more work for inlining.
if (typeof value[TYPE] === 'number') return value as StylingContext;
value = value[HOST] as any;
}
return null;
}
/**
* True if `value` is `LView`.
* @param value wrapped value of `RNode`, `LView`, `LContainer`, `StylingContext`
*/
export function isLView(value: RNode | LView | LContainer | StylingContext | {} | null):
value is LView {
return Array.isArray(value) && typeof value[TYPE] === 'object';
}
/**
* True if `value` is `LContainer`.
* @param value wrapped value of `RNode`, `LView`, `LContainer`, `StylingContext`
*/
export function isLContainer(value: RNode | LView | LContainer | StylingContext | {} | null):
value is LContainer {
return Array.isArray(value) && value[TYPE] === true;
}
/**
* True if `value` is `StylingContext`.
* @param value wrapped value of `RNode`, `LView`, `LContainer`, `StylingContext`
*/
export function isStylingContext(value: RNode | LView | LContainer | StylingContext | {} | null):
value is StylingContext {
return Array.isArray(value) && typeof value[TYPE] === 'number';
}
/**
* Retrieves an element value from the provided `viewData`, by unwrapping
* from any containers, component views, or style contexts.
*/
export function getNativeByIndex(index: number, lView: LView): RNode {
return unwrapRNode(lView[index + HEADER_OFFSET]);
}
export function getNativeByTNode(tNode: TNode, hostView: LView): RNode {
return unwrapRNode(hostView[tNode.index]);
}
/**
* A helper function that returns `true` if a given `TNode` has any matching directives.
*/
export function hasDirectives(tNode: TNode): boolean {
return tNode.directiveEnd > tNode.directiveStart;
}
export function getTNode(index: number, view: LView): TNode {
ngDevMode && assertGreaterThan(index, -1, 'wrong index for TNode');
ngDevMode && assertLessThan(index, view[TVIEW].data.length, 'wrong index for TNode');
return view[TVIEW].data[index + HEADER_OFFSET] as TNode;
}
/** Retrieves a value from any `LView` or `TData`. */
export function loadInternal<T>(view: LView | TData, index: number): T {
ngDevMode && assertDataInRange(view, index + HEADER_OFFSET);
return view[index + HEADER_OFFSET];
}
export function getComponentViewByIndex(nodeIndex: number, hostView: LView): LView {
// Could be an LView or an LContainer. If LContainer, unwrap to find LView.
const slotValue = hostView[nodeIndex];
const lView = isLView(slotValue) ? slotValue : slotValue[HOST];
return lView;
}
export function isContentQueryHost(tNode: TNode): boolean {
return (tNode.flags & TNodeFlags.hasContentQuery) !== 0;
}
export function isComponent(tNode: TNode): boolean {
return (tNode.flags & TNodeFlags.isComponent) === TNodeFlags.isComponent;
}
export function isComponentDef<T>(def: DirectiveDef<T>): def is ComponentDef<T> {
return (def as ComponentDef<T>).template !== null;
}
export function isRootView(target: LView): boolean {
return (target[FLAGS] & LViewFlags.IsRoot) !== 0;
}
/**
* Returns the monkey-patch value data present on the target (which could be
* a component, directive or a DOM node).
*/
export function readPatchedData(target: any): LView|LContext|null {
ngDevMode && assertDefined(target, 'Target expected');
return target[MONKEY_PATCH_KEY_NAME];
}
export function readPatchedLView(target: any): LView|null {
const value = readPatchedData(target);
if (value) {
return Array.isArray(value) ? value : (value as LContext).lView;
}
return null;
}
/**
* Returns a boolean for whether the view is attached to the change detection tree.
*
* Note: This determines whether a view should be checked, not whether it's inserted
* into a container. For that, you'll want `viewAttachedToContainer` below.
*/
export function viewAttachedToChangeDetector(view: LView): boolean {
return (view[FLAGS] & LViewFlags.Attached) === LViewFlags.Attached;
}
/** Returns a boolean for whether the view is attached to a container. */
export function viewAttachedToContainer(view: LView): boolean {
return isLContainer(view[PARENT]);
}
/**
* Resets the pre-order hook flags of the view.
* @param lView the LView on which the flags are reset
*/
export function resetPreOrderHookFlags(lView: LView) {
lView[PREORDER_HOOK_FLAGS] = 0;
}
| unwrapStylingContext | identifier_name |
view_utils.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {assertDataInRange, assertDefined, assertGreaterThan, assertLessThan} from '../../util/assert';
import {LContainer, TYPE} from '../interfaces/container';
import {LContext, MONKEY_PATCH_KEY_NAME} from '../interfaces/context';
import {ComponentDef, DirectiveDef} from '../interfaces/definition';
import {TNode, TNodeFlags} from '../interfaces/node';
import {RNode} from '../interfaces/renderer';
import {StylingContext} from '../interfaces/styling';
import {FLAGS, HEADER_OFFSET, HOST, LView, LViewFlags, PARENT, PREORDER_HOOK_FLAGS, TData, TVIEW} from '../interfaces/view';
/**
* For efficiency reasons we often put several different data types (`RNode`, `LView`, `LContainer`,
* `StylingContext`) in same location in `LView`. This is because we don't want to pre-allocate
* space for it because the storage is sparse. This file contains utilities for dealing with such
* data types.
*
* How do we know what is stored at a given location in `LView`.
* - `Array.isArray(value) === false` => `RNode` (The normal storage value)
* - `Array.isArray(value) === true` => then the `value[0]` represents the wrapped value.
* - `typeof value[TYPE] === 'object'` => `LView`
* - This happens when we have a component at a given location
* - `typeof value[TYPE] === 'number'` => `StylingContext`
* - This happens when we have style/class binding at a given location.
* - `typeof value[TYPE] === true` => `LContainer`
* - This happens when we have `LContainer` binding at a given location.
*
*
* NOTE: it is assumed that `Array.isArray` and `typeof` operations are very efficient.
*/
/**
* Returns `RNode`.
* @param value wrapped value of `RNode`, `LView`, `LContainer`, `StylingContext`
*/
export function unwrapRNode(value: RNode | LView | LContainer | StylingContext): RNode {
while (Array.isArray(value)) {
value = value[HOST] as any;
}
return value as RNode;
}
/**
* Returns `LView` or `null` if not found.
* @param value wrapped value of `RNode`, `LView`, `LContainer`, `StylingContext`
*/
export function unwrapLView(value: RNode | LView | LContainer | StylingContext): LView|null {
while (Array.isArray(value)) {
// This check is same as `isLView()` but we don't call at as we don't want to call
// `Array.isArray()` twice and give JITer more work for inlining.
if (typeof value[TYPE] === 'object') return value as LView;
value = value[HOST] as any;
}
return null;
}
/**
* Returns `LContainer` or `null` if not found.
* @param value wrapped value of `RNode`, `LView`, `LContainer`, `StylingContext`
*/
export function unwrapLContainer(value: RNode | LView | LContainer | StylingContext): LContainer|
null {
while (Array.isArray(value)) {
// This check is same as `isLContainer()` but we don't call at as we don't want to call
// `Array.isArray()` twice and give JITer more work for inlining.
if (value[TYPE] === true) return value as LContainer;
value = value[HOST] as any;
}
return null;
}
/**
* Returns `StylingContext` or `null` if not found.
* @param value wrapped value of `RNode`, `LView`, `LContainer`, `StylingContext`
*/
export function unwrapStylingContext(value: RNode | LView | LContainer | StylingContext):
StylingContext|null {
while (Array.isArray(value)) {
// This check is same as `isStylingContext()` but we don't call at as we don't want to call
// `Array.isArray()` twice and give JITer more work for inlining.
if (typeof value[TYPE] === 'number') return value as StylingContext;
value = value[HOST] as any;
}
return null;
}
/**
* True if `value` is `LView`.
* @param value wrapped value of `RNode`, `LView`, `LContainer`, `StylingContext`
*/
export function isLView(value: RNode | LView | LContainer | StylingContext | {} | null):
value is LView {
return Array.isArray(value) && typeof value[TYPE] === 'object';
}
/**
* True if `value` is `LContainer`.
* @param value wrapped value of `RNode`, `LView`, `LContainer`, `StylingContext`
*/
export function isLContainer(value: RNode | LView | LContainer | StylingContext | {} | null):
value is LContainer {
return Array.isArray(value) && value[TYPE] === true;
}
/**
* True if `value` is `StylingContext`.
* @param value wrapped value of `RNode`, `LView`, `LContainer`, `StylingContext`
*/
export function isStylingContext(value: RNode | LView | LContainer | StylingContext | {} | null):
value is StylingContext {
return Array.isArray(value) && typeof value[TYPE] === 'number';
}
/**
* Retrieves an element value from the provided `viewData`, by unwrapping
* from any containers, component views, or style contexts.
*/
export function getNativeByIndex(index: number, lView: LView): RNode {
return unwrapRNode(lView[index + HEADER_OFFSET]);
}
export function getNativeByTNode(tNode: TNode, hostView: LView): RNode {
return unwrapRNode(hostView[tNode.index]);
}
/**
* A helper function that returns `true` if a given `TNode` has any matching directives.
*/
export function hasDirectives(tNode: TNode): boolean {
return tNode.directiveEnd > tNode.directiveStart;
}
export function getTNode(index: number, view: LView): TNode {
ngDevMode && assertGreaterThan(index, -1, 'wrong index for TNode');
ngDevMode && assertLessThan(index, view[TVIEW].data.length, 'wrong index for TNode');
return view[TVIEW].data[index + HEADER_OFFSET] as TNode;
}
/** Retrieves a value from any `LView` or `TData`. */
export function loadInternal<T>(view: LView | TData, index: number): T {
ngDevMode && assertDataInRange(view, index + HEADER_OFFSET);
return view[index + HEADER_OFFSET];
}
export function getComponentViewByIndex(nodeIndex: number, hostView: LView): LView |
export function isContentQueryHost(tNode: TNode): boolean {
return (tNode.flags & TNodeFlags.hasContentQuery) !== 0;
}
export function isComponent(tNode: TNode): boolean {
return (tNode.flags & TNodeFlags.isComponent) === TNodeFlags.isComponent;
}
export function isComponentDef<T>(def: DirectiveDef<T>): def is ComponentDef<T> {
return (def as ComponentDef<T>).template !== null;
}
export function isRootView(target: LView): boolean {
return (target[FLAGS] & LViewFlags.IsRoot) !== 0;
}
/**
* Returns the monkey-patch value data present on the target (which could be
* a component, directive or a DOM node).
*/
export function readPatchedData(target: any): LView|LContext|null {
ngDevMode && assertDefined(target, 'Target expected');
return target[MONKEY_PATCH_KEY_NAME];
}
export function readPatchedLView(target: any): LView|null {
const value = readPatchedData(target);
if (value) {
return Array.isArray(value) ? value : (value as LContext).lView;
}
return null;
}
/**
* Returns a boolean for whether the view is attached to the change detection tree.
*
* Note: This determines whether a view should be checked, not whether it's inserted
* into a container. For that, you'll want `viewAttachedToContainer` below.
*/
export function viewAttachedToChangeDetector(view: LView): boolean {
return (view[FLAGS] & LViewFlags.Attached) === LViewFlags.Attached;
}
/** Returns a boolean for whether the view is attached to a container. */
export function viewAttachedToContainer(view: LView): boolean {
return isLContainer(view[PARENT]);
}
/**
* Resets the pre-order hook flags of the view.
* @param lView the LView on which the flags are reset
*/
export function resetPreOrderHookFlags(lView: LView) {
lView[PREORDER_HOOK_FLAGS] = 0;
}
| {
// Could be an LView or an LContainer. If LContainer, unwrap to find LView.
const slotValue = hostView[nodeIndex];
const lView = isLView(slotValue) ? slotValue : slotValue[HOST];
return lView;
} | identifier_body |
view_utils.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {assertDataInRange, assertDefined, assertGreaterThan, assertLessThan} from '../../util/assert';
import {LContainer, TYPE} from '../interfaces/container';
import {LContext, MONKEY_PATCH_KEY_NAME} from '../interfaces/context';
import {ComponentDef, DirectiveDef} from '../interfaces/definition';
import {TNode, TNodeFlags} from '../interfaces/node';
import {RNode} from '../interfaces/renderer';
import {StylingContext} from '../interfaces/styling';
import {FLAGS, HEADER_OFFSET, HOST, LView, LViewFlags, PARENT, PREORDER_HOOK_FLAGS, TData, TVIEW} from '../interfaces/view';
/**
* For efficiency reasons we often put several different data types (`RNode`, `LView`, `LContainer`,
* `StylingContext`) in same location in `LView`. This is because we don't want to pre-allocate
* space for it because the storage is sparse. This file contains utilities for dealing with such
* data types.
*
* How do we know what is stored at a given location in `LView`.
* - `Array.isArray(value) === false` => `RNode` (The normal storage value)
* - `Array.isArray(value) === true` => then the `value[0]` represents the wrapped value.
* - `typeof value[TYPE] === 'object'` => `LView`
* - This happens when we have a component at a given location
* - `typeof value[TYPE] === 'number'` => `StylingContext`
* - This happens when we have style/class binding at a given location.
* - `typeof value[TYPE] === true` => `LContainer`
* - This happens when we have `LContainer` binding at a given location.
*
*
* NOTE: it is assumed that `Array.isArray` and `typeof` operations are very efficient.
*/
/**
* Returns `RNode`.
* @param value wrapped value of `RNode`, `LView`, `LContainer`, `StylingContext`
*/
export function unwrapRNode(value: RNode | LView | LContainer | StylingContext): RNode {
while (Array.isArray(value)) {
value = value[HOST] as any;
}
return value as RNode;
}
/**
* Returns `LView` or `null` if not found.
* @param value wrapped value of `RNode`, `LView`, `LContainer`, `StylingContext`
*/
export function unwrapLView(value: RNode | LView | LContainer | StylingContext): LView|null {
while (Array.isArray(value)) {
// This check is same as `isLView()` but we don't call at as we don't want to call
// `Array.isArray()` twice and give JITer more work for inlining.
if (typeof value[TYPE] === 'object') return value as LView;
value = value[HOST] as any;
}
return null;
}
/**
* Returns `LContainer` or `null` if not found.
* @param value wrapped value of `RNode`, `LView`, `LContainer`, `StylingContext`
*/
export function unwrapLContainer(value: RNode | LView | LContainer | StylingContext): LContainer|
null {
while (Array.isArray(value)) {
// This check is same as `isLContainer()` but we don't call at as we don't want to call
// `Array.isArray()` twice and give JITer more work for inlining.
if (value[TYPE] === true) return value as LContainer;
value = value[HOST] as any;
}
return null;
}
/**
* Returns `StylingContext` or `null` if not found.
* @param value wrapped value of `RNode`, `LView`, `LContainer`, `StylingContext`
*/
export function unwrapStylingContext(value: RNode | LView | LContainer | StylingContext):
StylingContext|null {
while (Array.isArray(value)) {
// This check is same as `isStylingContext()` but we don't call at as we don't want to call
// `Array.isArray()` twice and give JITer more work for inlining.
if (typeof value[TYPE] === 'number') return value as StylingContext;
value = value[HOST] as any;
}
return null;
}
/**
* True if `value` is `LView`.
* @param value wrapped value of `RNode`, `LView`, `LContainer`, `StylingContext`
*/
export function isLView(value: RNode | LView | LContainer | StylingContext | {} | null):
value is LView {
return Array.isArray(value) && typeof value[TYPE] === 'object';
}
/**
* True if `value` is `LContainer`.
* @param value wrapped value of `RNode`, `LView`, `LContainer`, `StylingContext`
*/
export function isLContainer(value: RNode | LView | LContainer | StylingContext | {} | null):
value is LContainer {
return Array.isArray(value) && value[TYPE] === true;
}
/**
* True if `value` is `StylingContext`.
* @param value wrapped value of `RNode`, `LView`, `LContainer`, `StylingContext`
*/
export function isStylingContext(value: RNode | LView | LContainer | StylingContext | {} | null):
value is StylingContext {
return Array.isArray(value) && typeof value[TYPE] === 'number';
}
/**
* Retrieves an element value from the provided `viewData`, by unwrapping
* from any containers, component views, or style contexts.
*/
export function getNativeByIndex(index: number, lView: LView): RNode {
return unwrapRNode(lView[index + HEADER_OFFSET]);
}
export function getNativeByTNode(tNode: TNode, hostView: LView): RNode {
return unwrapRNode(hostView[tNode.index]);
}
/**
* A helper function that returns `true` if a given `TNode` has any matching directives.
*/
export function hasDirectives(tNode: TNode): boolean {
return tNode.directiveEnd > tNode.directiveStart;
}
export function getTNode(index: number, view: LView): TNode {
ngDevMode && assertGreaterThan(index, -1, 'wrong index for TNode');
ngDevMode && assertLessThan(index, view[TVIEW].data.length, 'wrong index for TNode');
return view[TVIEW].data[index + HEADER_OFFSET] as TNode;
}
/** Retrieves a value from any `LView` or `TData`. */
export function loadInternal<T>(view: LView | TData, index: number): T {
ngDevMode && assertDataInRange(view, index + HEADER_OFFSET);
return view[index + HEADER_OFFSET];
}
export function getComponentViewByIndex(nodeIndex: number, hostView: LView): LView {
// Could be an LView or an LContainer. If LContainer, unwrap to find LView.
const slotValue = hostView[nodeIndex];
const lView = isLView(slotValue) ? slotValue : slotValue[HOST];
return lView;
}
export function isContentQueryHost(tNode: TNode): boolean {
return (tNode.flags & TNodeFlags.hasContentQuery) !== 0;
}
export function isComponent(tNode: TNode): boolean {
return (tNode.flags & TNodeFlags.isComponent) === TNodeFlags.isComponent;
}
export function isComponentDef<T>(def: DirectiveDef<T>): def is ComponentDef<T> {
return (def as ComponentDef<T>).template !== null;
}
export function isRootView(target: LView): boolean {
return (target[FLAGS] & LViewFlags.IsRoot) !== 0;
} | * Returns the monkey-patch value data present on the target (which could be
* a component, directive or a DOM node).
*/
export function readPatchedData(target: any): LView|LContext|null {
ngDevMode && assertDefined(target, 'Target expected');
return target[MONKEY_PATCH_KEY_NAME];
}
export function readPatchedLView(target: any): LView|null {
const value = readPatchedData(target);
if (value) {
return Array.isArray(value) ? value : (value as LContext).lView;
}
return null;
}
/**
* Returns a boolean for whether the view is attached to the change detection tree.
*
* Note: This determines whether a view should be checked, not whether it's inserted
* into a container. For that, you'll want `viewAttachedToContainer` below.
*/
export function viewAttachedToChangeDetector(view: LView): boolean {
return (view[FLAGS] & LViewFlags.Attached) === LViewFlags.Attached;
}
/** Returns a boolean for whether the view is attached to a container. */
export function viewAttachedToContainer(view: LView): boolean {
return isLContainer(view[PARENT]);
}
/**
* Resets the pre-order hook flags of the view.
* @param lView the LView on which the flags are reset
*/
export function resetPreOrderHookFlags(lView: LView) {
lView[PREORDER_HOOK_FLAGS] = 0;
} |
/** | random_line_split |
emulators.py | # encoding: utf-8
import os
def emulator_rom_launch_command(emulator, rom):
"""Generates a command string that will launch `rom` with `emulator` (using
the format provided by the user). The return value of this function should
be suitable to use as the `Exe` field of a Steam shortcut"""
# Normalizing the strings is just removing any leading/trailing quotes.
# The beautiful thing is that strip does nothing if it doesnt contain quotes,
# so normalizing it then adding quotes should do what I want 100% of the time
normalize = lambda s: s.strip("\"")
add_quotes = lambda s: "\"%s\"" % s
# We don't know if the user put quotes around the emulator location. If
# so, we dont want to add another pair and screw things up.
#
# The user didnt give us the ROM information, but screw it, I already
# have some code to add quotes to a string, might as well use it.
quoted_location = add_quotes(normalize(emulator.location))
quoted_rom = add_quotes(normalize(rom.path))
# The format string contains a bunch of specifies that users can use to
# substitute values in at runtime. Right now the only supported values are:
# %l - The location of the emulator (to avoid sync bugs)
# %r - The location of the ROM (so the emulator knows what to launch)
# %fn - The ROM filename without its extension (for emulators that utilize separete configuration files)
#
# More may be added in the future, but for now this is what we support
return (
emulator.format
.replace("%l", quoted_location)
.replace("%r", quoted_rom)
.replace("%fn", os.path.splitext(os.path.basename(rom.path))[0])
)
def | (emulator):
"""Returns the directory which stores the emulator. The return value of this
function should be suitable to use as the 'StartDir' field of a Steam
shortcut"""
return os.path.dirname(emulator.location)
| emulator_startdir | identifier_name |
emulators.py | # encoding: utf-8
import os
def emulator_rom_launch_command(emulator, rom):
|
def emulator_startdir(emulator):
"""Returns the directory which stores the emulator. The return value of this
function should be suitable to use as the 'StartDir' field of a Steam
shortcut"""
return os.path.dirname(emulator.location)
| """Generates a command string that will launch `rom` with `emulator` (using
the format provided by the user). The return value of this function should
be suitable to use as the `Exe` field of a Steam shortcut"""
# Normalizing the strings is just removing any leading/trailing quotes.
# The beautiful thing is that strip does nothing if it doesnt contain quotes,
# so normalizing it then adding quotes should do what I want 100% of the time
normalize = lambda s: s.strip("\"")
add_quotes = lambda s: "\"%s\"" % s
# We don't know if the user put quotes around the emulator location. If
# so, we dont want to add another pair and screw things up.
#
# The user didnt give us the ROM information, but screw it, I already
# have some code to add quotes to a string, might as well use it.
quoted_location = add_quotes(normalize(emulator.location))
quoted_rom = add_quotes(normalize(rom.path))
# The format string contains a bunch of specifies that users can use to
# substitute values in at runtime. Right now the only supported values are:
# %l - The location of the emulator (to avoid sync bugs)
# %r - The location of the ROM (so the emulator knows what to launch)
# %fn - The ROM filename without its extension (for emulators that utilize separete configuration files)
#
# More may be added in the future, but for now this is what we support
return (
emulator.format
.replace("%l", quoted_location)
.replace("%r", quoted_rom)
.replace("%fn", os.path.splitext(os.path.basename(rom.path))[0])
) | identifier_body |
emulators.py | # encoding: utf-8
import os
def emulator_rom_launch_command(emulator, rom):
"""Generates a command string that will launch `rom` with `emulator` (using
the format provided by the user). The return value of this function should
be suitable to use as the `Exe` field of a Steam shortcut"""
# Normalizing the strings is just removing any leading/trailing quotes.
# The beautiful thing is that strip does nothing if it doesnt contain quotes,
# so normalizing it then adding quotes should do what I want 100% of the time
normalize = lambda s: s.strip("\"")
add_quotes = lambda s: "\"%s\"" % s
# We don't know if the user put quotes around the emulator location. If
# so, we dont want to add another pair and screw things up.
#
# The user didnt give us the ROM information, but screw it, I already
# have some code to add quotes to a string, might as well use it.
quoted_location = add_quotes(normalize(emulator.location))
quoted_rom = add_quotes(normalize(rom.path))
# The format string contains a bunch of specifies that users can use to | # substitute values in at runtime. Right now the only supported values are:
# %l - The location of the emulator (to avoid sync bugs)
# %r - The location of the ROM (so the emulator knows what to launch)
# %fn - The ROM filename without its extension (for emulators that utilize separete configuration files)
#
# More may be added in the future, but for now this is what we support
return (
emulator.format
.replace("%l", quoted_location)
.replace("%r", quoted_rom)
.replace("%fn", os.path.splitext(os.path.basename(rom.path))[0])
)
def emulator_startdir(emulator):
"""Returns the directory which stores the emulator. The return value of this
function should be suitable to use as the 'StartDir' field of a Steam
shortcut"""
return os.path.dirname(emulator.location) | random_line_split | |
variables_12.js | var searchData=
[
['sample',['Sample',['../a04155.html#aee7b23bef7e0cdd87974d3c9d36e9d73',1,'SAMPLELIST']]],
['sample_5fiteration_5f',['sample_iteration_',['../a04551.html#aef5b9a97853a4d8a409b451b445bbd0a',1,'tesseract::LSTMRecognizer']]],
['samplecount',['SampleCount',['../a04123.html#a67f65514626f5d21844400c52fcfea4f',1,'BUCKETS::SampleCount()'],['../a04135.html#a71cac0608666e1abfdfb3ab40e26b249',1,'sample::SampleCount()'],['../a04859.html#aab481329945e65c4aeee79b145e4de51',1,'LABELEDLISTNODE::SampleCount()']]],
['samplesize',['SampleSize',['../a04151.html#ac106d560d67893d8c05c31da22dc0869',1,'CLUSTERER']]],
['save_5falt_5fchoices',['save_alt_choices',['../a05075.html#acb91792cc7855324b0a6f495defe3218',1,'tesseract::Wordrec']]],
['save_5fdoc_5fwords',['save_doc_words',['../a04403.html#a00bc956997b48bd63a56915d331f8add',1,'tesseract::Dict']]],
['scale_5f',['scale_',['../a02399.html#ae72a89d863ac231b9cfbab4c6004ad3d',1,'tesseract::PageIterator::scale_()'],['../a02483.html#abf0aec155d23b51c245c4163582cd82c',1,'tesseract::ImageThresholder::scale_()']]],
['scaled_5fyres_5f',['scaled_yres_',['../a02399.html#a43f0c51fac0f0e60139487d5868812a3',1,'tesseract::PageIterator']]],
['scaling_5ffactor',['SCALING_FACTOR',['../a04471.html#ab4bfd9dc2dd26b3dcb8ce27dc919a87f',1,'com::google::scrollview::ui::SVWindow']]],
['sconfidence',['sconfidence',['../a02383.html#ae39a2c718b010ed27eb7f820546da6d6',1,'OSBestResult']]],
['score',['score',['../a02563.html#a24c0132ad02b9864259fac5ab9a63d8e',1,'tesseract::ScoredFont::score()'],['../a04603.html#a5602bae92b5d5498840a0dbb49dd5b25',1,'tesseract::RecodeNode::score()']]],
['scratch_5fspace_5f',['scratch_space_',['../a04551.html#a4e3ffb8df501ac1988cfc3751c5adfbc',1,'tesseract::LSTMRecognizer']]],
['script_5f',['script_',['../a04935.html#a2a1442cc1af416358ae510c8502c1de1',1,'tesseract::Validator']]],
['script_5fid',['script_id',['../a02383.html#a0647ea3dc4a399a472c48ab1eacd31e1',1,'OSBestResult::script_id()'],['../a05031.html#a6f5df6285567c2959c3b8303792ac2f0',1,'tesseract::LMConsistencyInfo::script_id()']]],
['scripts_5fna',['scripts_na',['../a02387.html#af612cc25d8432678d5b87a5911544792',1,'OSResults']]],
['seam_5farray',['seam_array',['../a02659.html#a3d0de828a382a88581dafb5a304141f1',1,'WERD_RES']]],
['search_5f',['search_',['../a04551.html#aa12337ff92e33b3549ef5d9a49c9babb',1,'tesseract::LSTMRecognizer']]],
['segment_5fadjust_5fdebug',['segment_adjust_debug',['../a05075.html#a2d79e213378c782ac3de5e92ae2ee41c',1,'tesseract::Wordrec']]],
['segment_5fnonalphabetic_5fscript',['segment_nonalphabetic_script',['../a04403.html#a7ba94a461d09ad96991ccdf43b1b53e0',1,'tesseract::Dict']]],
['segment_5fpenalty_5fdict_5fcase_5fbad',['segment_penalty_dict_case_bad',['../a04403.html#a3aa39d0dfa6e33481953461fdece4d38',1,'tesseract::Dict']]],
['segment_5fpenalty_5fdict_5fcase_5fok',['segment_penalty_dict_case_ok',['../a04403.html#a1fa994ce1b5b86d436eb2ab3f2ffbe1b',1,'tesseract::Dict']]],
['segment_5fpenalty_5fdict_5ffrequent_5fword',['segment_penalty_dict_frequent_word',['../a04403.html#a6c5895e0ed2eae93f360b36efd78dfe5',1,'tesseract::Dict']]],
['segment_5fpenalty_5fdict_5fnonword',['segment_penalty_dict_nonword',['../a04403.html#a4a192a4266de4fecbf296101d60ace2b',1,'tesseract::Dict']]],
['segment_5fpenalty_5fgarbage',['segment_penalty_garbage',['../a04403.html#a683a1781be97f4376fa1c01bded08e66',1,'tesseract::Dict']]],
['segsearch_5fdebug_5flevel',['segsearch_debug_level',['../a05075.html#a30bd78320448ef425666157d2ff672db',1,'tesseract::Wordrec']]],
['segsearch_5fmax_5fchar_5fwh_5fratio',['segsearch_max_char_wh_ratio',['../a05075.html#a8fa843163c3e8785a957e422b9256d35',1,'tesseract::Wordrec']]],
['segsearch_5fmax_5ffutile_5fclassifications',['segsearch_max_futile_classifications',['../a05075.html#a7d7641f6361f78664fb541f5ce33fe1b',1,'tesseract::Wordrec']]],
['segsearch_5fmax_5fpain_5fpoints',['segsearch_max_pain_points',['../a05075.html#a0a0cfc9fb11d9ecf6961d0cc655f9070',1,'tesseract::Wordrec']]],
['selectfailed',['SELECTFAILED',['../a00527.html#acb152f0afa6871eda23b333b0023c6dc',1,'fileerr.h']]],
['serialise_5flinks',['SERIALISE_LINKS',['../a00557.html#a9efe4a42d25d2d35f3d234485e96f47a',1,'lsterr.h']]],
['server_5fport',['SERVER_PORT',['../a04435.html#a43b2942a0675f6f9dcc57217d55f9cbc',1,'com::google::scrollview::ScrollView']]],
['shape1',['shape1',['../a04267.html#a1d5a17739850a65f1c9367eeb3878a98',1,'tesseract::ShapeDist']]],
['shape2',['shape2',['../a04267.html#ad73ba476dc29d61c40a08512f9698138',1,'tesseract::ShapeDist']]],
['shape_5fcost',['shape_cost',['../a05019.html#a0185040dea9a2fadac41feb45729e444',1,'tesseract::AssociateStats']]],
['shape_5fid',['shape_id',['../a04319.html#a988673a089e6c60dfe067c3626437ebd',1,'tesseract::ShapeRating']]],
['shape_5ftable_5f',['shape_table_',['../a04111.html#a471b3e8f03ddc5281c167b37d8ddc5b5',1,'tesseract::Classify']]],
['shaped_5fw_5f',['shaped_w_',['../a02347.html#a6de2b2f60bec1231ebebee433ed900f7',1,'tesseract::IntSimdMatrix']]],
['shortname',['ShortName',['../a04287.html#a11853141d8e7d72bdb9bc320001fa3d0',1,'FEATURE_DESC_STRUCT']]],
['significant',['Significant',['../a04147.html#a5d94887aaa0acd7e4a0c04c420af5670',1,'PROTOTYPE']]],
['size',['size',['../a02575.html#a2f474cedd75889b35a8b5f401eab3c01',1,'tesseract::FontSet']]],
['size_5fallocated_5f',['size_allocated_',['../a02607.html#ae7e5179f795bae2765e51cab5b7e4943',1,'GENERIC_2D_ARRAY']]],
['size_5freserved_5f',['size_reserved_',['../a02307.html#a4a02eb2a4ed31e8454cd8ae06eb8d7c5',1,'GenericVector']]],
['size_5fused_5f',['size_used_',['../a02307.html#a99185b084a6ace7536818ce2f17b11fb',1,'GenericVector']]],
['slope',['Slope',['../a04275.html#a75770cf1960863e6937f2ca578ede6ef',1,'MFEDGEPT']]],
['small_5fblobs',['small_blobs',['../a02499.html#ad16b1c18225b44c50674ca46841b0f5f',1,'TO_BLOCK']]],
['small_5fcaps',['small_caps',['../a02659.html#ac14d59010b792d434b39e991b0c8a962',1,'WERD_RES']]],
['source_5ft_5f',['source_t_',['../a04483.html#ae1b56782fd7594e0bd65963f6134a6f8',1,'tesseract::FullyConnected']]],
['space_5fabove_5f',['space_above_',['../a04815.html#aae33c28056468e9da3b8b652e3cf0c94',1,'tesseract::StructuredTable']]],
['space_5fbelow_5f',['space_below_',['../a04815.html#a5d0168255bb07d28f1e30276d3fb751c',1,'tesseract::StructuredTable']]],
['space_5fcertainty',['space_certainty',['../a02659.html#a35d1ebe6dbdc0fe2943b67ea7b69117f',1,'WERD_RES']]],
['space_5fleft_5f',['space_left_',['../a04815.html#a369e1cab57c1cf20f454d31bc2d9b69c',1,'tesseract::StructuredTable']]],
['space_5fright_5f',['space_right_',['../a04815.html#a950d1abfe05a5582cc71617d3e689c80',1,'tesseract::StructuredTable']]],
['space_5fsize',['space_size',['../a02495.html#af15ca7af4b650c4089de86ecf8b6f6b2',1,'TO_ROW::space_size()'],['../a02499.html#ab2dbae642d172fae61f5adba3a4dc480',1,'TO_BLOCK::space_size()']]],
['space_5fthreshold',['space_threshold',['../a02495.html#a8115cceab7336a622ce9db23f86e488a',1,'TO_ROW']]],
['spacing',['spacing',['../a02495.html#a54a1cf2bfa1478d255a8b79f51ca8e59',1,'TO_ROW']]],
['spacing_5fvec',['spacing_vec',['../a02571.html#ade637da68278d3bff4bae5eba7634169',1,'tesseract::FontInfo']]],
['sparse_5fsize_5f',['sparse_size_',['../a02843.html#ae3ff257a84297cf8c67ee4bb27e54194',1,'tesseract::IndexMap']]],
['speckle_5flarge_5fmax_5fsize',['speckle_large_max_size',['../a04111.html#a49b9210610b2eb4c1b2bceb8b7814797',1,'tesseract::Classify']]],
['speckle_5frating_5fpenalty',['speckle_rating_penalty',['../a04111.html#a01a9b767032f48360fb57ba58efe00dd',1,'tesseract::Classify']]],
['spherical',['Spherical',['../a04143.html#ad883f0258d034e4fd77ce450c3e058d2',1,'FLOATUNION']]],
['src_5foutline',['src_outline',['../a02507.html#a6c94887c0e59312611b9cbc9edf9d5fb',1,'EDGEPT']]],
['stack_5f',['stack_',['../a04599.html#a0c99262aaa769cfd313c3cf5c0a97876',1,'tesseract::Plumbing']]],
['stall_5fiteration_5f',['stall_iteration_',['../a04555.html#a1384068ebb6b9c2c5fb7b73bf2ad9c54',1,'tesseract::LSTMTrainer']]],
['start',['start',['../a02511.html#a6eddfefc04de79fe19712d90925de2fb',1,'TESSLINE::start()'],['../a04763.html#ab73dd2ebe4d7766121e62353da385ae2',1,'C_OUTLINE_FRAG::start()']]], | ['startdelta',['StartDelta',['../a04215.html#a2ff3e161c404e6647da48c724fecaf41',1,'TABLE_FILLER']]],
['step_5fcount',['step_count',['../a02507.html#a34a59945d65b7db4f0f8aa11a3cfe8c0',1,'EDGEPT']]],
['stepcount',['stepcount',['../a04763.html#a5619d323d56f236d28852959d8898769',1,'C_OUTLINE_FRAG']]],
['stepdir',['stepdir',['../a02539.html#adea4a1f04a9e940ac794b63cbed9f627',1,'CRACKEDGE']]],
['steps',['steps',['../a04763.html#aa3d89d6ff984029b96313eb1ffedf171',1,'C_OUTLINE_FRAG']]],
['stepx',['stepx',['../a02539.html#a03c0a210ff23807dfe4db58cac4cbf44',1,'CRACKEDGE']]],
['stepy',['stepy',['../a02539.html#aae0c7fcaf4f55516548ff42a495b6d98',1,'CRACKEDGE']]],
['still_5flinked',['STILL_LINKED',['../a00557.html#a79ccb5823fc9ffa52f94155cedb17796',1,'lsterr.h']]],
['stillrunning',['stillRunning',['../a00158.html#adcfaa97ce4970c7cbfdd833c705e75d8',1,'pgedit.cpp']]],
['stopper_5fallowable_5fcharacter_5fbadness',['stopper_allowable_character_badness',['../a04403.html#a105fa57ee5545f36fc0f1baf89d2d3af',1,'tesseract::Dict']]],
['stopper_5fcertainty_5fper_5fchar',['stopper_certainty_per_char',['../a04403.html#aa996712ed754700ede71a883d4211019',1,'tesseract::Dict']]],
['stopper_5fdebug_5flevel',['stopper_debug_level',['../a04403.html#aae528f9618755dee3ab974f06f1794ea',1,'tesseract::Dict']]],
['stopper_5fno_5facceptable_5fchoices',['stopper_no_acceptable_choices',['../a04403.html#a444f71f744f92cfcd58dea0a2255e044',1,'tesseract::Dict']]],
['stopper_5fnondict_5fcertainty_5fbase',['stopper_nondict_certainty_base',['../a04403.html#a793fa27917d4728c9d07b667a039c1e5',1,'tesseract::Dict']]],
['stopper_5fphase2_5fcertainty_5frejection_5foffset',['stopper_phase2_certainty_rejection_offset',['../a04403.html#a73282067f0bb86fcaf689b9202bb6702',1,'tesseract::Dict']]],
['stopper_5fsmallword_5fsize',['stopper_smallword_size',['../a04403.html#af37cc8127c3872088d93aabaf3453e25',1,'tesseract::Dict']]],
['str',['str',['../a02667.html#a5dee966c19cad16f4eba3efc879f51b3',1,'tesseract::ParamsTrainingHypothesis']]],
['stream_5ffilelist',['stream_filelist',['../a00005.html#ad341e2a0385debc2342a18dfa9e5b3ec',1,'baseapi.cpp']]],
['string_5fparams',['string_params',['../a02895.html#a446a70d7d7c85af255d053ae54c5df8a',1,'tesseract::ParamsVectors']]],
['strip_5funrenderable_5fwords_5f',['strip_unrenderable_words_',['../a04907.html#abad42e9d49c5d76a07419085cfbc6f8d',1,'tesseract::StringRenderer']]],
['style',['Style',['../a04147.html#aad81f08b6e171698866b445c0b3d4b35',1,'PROTOTYPE']]],
['sub_5ftrainer_5f',['sub_trainer_',['../a04555.html#a83b014eb1f67c01296b1685e537f4f48',1,'tesseract::LSTMTrainer']]],
['subscript_5fmax_5fy_5ftop',['subscript_max_y_top',['../a02479.html#a6d45a1e0e32a0952e4d9bdecbc074650',1,'tesseract::Tesseract']]],
['sum_5ffeature_5fevidence_5f',['sum_feature_evidence_',['../a04203.html#ab308dd0dfb19adb21841cd771470de26',1,'ScratchEvidence']]],
['sum_5fof_5fsamples',['sum_of_samples',['../a05059.html#accb64bf2ff469735b75ec5100a7be3b6',1,'MEASUREMENT']]],
['sum_5fof_5fsquares',['sum_of_squares',['../a05059.html#a948e70eb70109e56d7ea77f671496ec5',1,'MEASUREMENT']]],
['superscript_5fbettered_5fcertainty',['superscript_bettered_certainty',['../a02479.html#a1afe871da2bd4a0edbd933675fb8c197',1,'tesseract::Tesseract']]],
['superscript_5fdebug',['superscript_debug',['../a02479.html#a8cf75373c38d6dd6cbc067d83de375ff',1,'tesseract::Tesseract']]],
['superscript_5fmin_5fy_5fbottom',['superscript_min_y_bottom',['../a02479.html#a5340c5f169b77fd0fc2c80487cfabb4f',1,'tesseract::Tesseract']]],
['superscript_5fscaledown_5fratio',['superscript_scaledown_ratio',['../a02479.html#ab267ebdd77645dc24bff3e7f276761bf',1,'tesseract::Tesseract']]],
['superscript_5fworse_5fcertainty',['superscript_worse_certainty',['../a02479.html#a1b3c330b0bf35cdce460f2cd744395c9',1,'tesseract::Tesseract']]],
['surface_5f',['surface_',['../a04907.html#aa91ab1059208b0d0490399cfca0d9bde',1,'tesseract::StringRenderer']]],
['suspect_5faccept_5frating',['suspect_accept_rating',['../a02479.html#a68431bcaa838b2dfe4fb5e76d9e40c7d',1,'tesseract::Tesseract']]],
['suspect_5fconstrain_5f1il',['suspect_constrain_1Il',['../a02479.html#a14dea85bdc59f17b92df2640d8c314e1',1,'tesseract::Tesseract']]],
['suspect_5flevel',['suspect_level',['../a02479.html#aa58f48fb367af8722afcf8ef4abb9354',1,'tesseract::Tesseract']]],
['suspect_5frating_5fper_5fch',['suspect_rating_per_ch',['../a02479.html#a3249cdbe54d31c142385047272c858b4',1,'tesseract::Tesseract']]],
['suspect_5fshort_5fwords',['suspect_short_words',['../a02479.html#a479ae1f088e7bc92b7f16b4340e7567b',1,'tesseract::Tesseract']]],
['suspect_5fspace_5flevel',['suspect_space_level',['../a02479.html#a2e8f788ddfd45bab1e1519f64c793f3c',1,'tesseract::Tesseract']]],
['svet_5fclick',['SVET_CLICK',['../a04431.html#ac3e0d2c401b640288c63b5670d1bf21f',1,'com::google::scrollview::events::SVEventType']]],
['svet_5fdestroy',['SVET_DESTROY',['../a04431.html#ac5f6843331c24507b3d8dd6e2cb37d8e',1,'com::google::scrollview::events::SVEventType']]],
['svet_5fexit',['SVET_EXIT',['../a04431.html#a4d60ed89f72fd8103a04e7dd4652334e',1,'com::google::scrollview::events::SVEventType']]],
['svet_5fhover',['SVET_HOVER',['../a04431.html#a35fb43480deca21bab915141ce977961',1,'com::google::scrollview::events::SVEventType']]],
['svet_5finput',['SVET_INPUT',['../a04431.html#a2eca708979b21c9ceadace51242c1802',1,'com::google::scrollview::events::SVEventType']]],
['svet_5fmotion',['SVET_MOTION',['../a04431.html#a668cdd791ad9dbcc13341f3d128232fd',1,'com::google::scrollview::events::SVEventType']]],
['svet_5fmouse',['SVET_MOUSE',['../a04431.html#a357896cf69f10ee77eaba41445a09e51',1,'com::google::scrollview::events::SVEventType']]],
['svet_5fpopup',['SVET_POPUP',['../a04431.html#a3b4e95cfba6c081a1a78fda75c2676b5',1,'com::google::scrollview::events::SVEventType']]],
['svet_5fselection',['SVET_SELECTION',['../a04431.html#a145589fcfc800530e6d57c69f7e7f18d',1,'com::google::scrollview::events::SVEventType']]],
['svpumenu',['svPuMenu',['../a04471.html#a139a5a8fb2c4b7924c08bd50a6ce7901',1,'com::google::scrollview::ui::SVWindow']]],
['switch',['Switch',['../a04215.html#aa9c4aa7eda49284aa105296e138ff379',1,'TABLE_FILLER']]]
]; | ['start_5fbox_5f',['start_box_',['../a04907.html#a340698db98509f605d57c88efcb62a9a',1,'tesseract::StringRenderer']]],
['start_5fof_5fdawg',['start_of_dawg',['../a04603.html#addd4c725934785d09cdc821098dfe79e',1,'tesseract::RecodeNode']]],
['start_5fof_5fword',['start_of_word',['../a04603.html#a0ba756b9c78f3c3dc04797f86678b5ac',1,'tesseract::RecodeNode']]],
['start_5fstep',['start_step',['../a02507.html#a0c73291ef68cb65a10f973ff98a10424',1,'EDGEPT']]], | random_line_split |
sectors.rs | use rand::{seq, ChaChaRng, SeedableRng};
use rayon::prelude::*;
use std::{
collections::HashMap,
sync::atomic::{AtomicBool, Ordering},
time::Instant,
usize::MAX,
};
use config::GameConfig;
use entities::Faction;
use entities::Sector;
use utils::Point;
/// Used for generating sectors.
pub struct SectorGen {}
impl SectorGen {
/// Create a new sector generator.
pub fn new() -> SectorGen {
SectorGen {}
}
/// Split the systems in to a set number of clusters using K-means.
pub fn generate(&self, config: &GameConfig, system_locations: Vec<Point>) -> Vec<Sector> {
// Measure time for generation.
let now = Instant::now();
info!("Simulating expansion for initial sectors...");
let seed: &[_] = &[config.map_seed as u32];
let mut rng: ChaChaRng = ChaChaRng::from_seed(seed);
// Setup initial centroids
let mut centroids =
seq::sample_iter(&mut rng, system_locations.iter(), config.number_of_sectors)
.unwrap()
.into_iter() | .into_iter()
.map(|point| (point, 0))
.collect();
// Run K means until convergence, i.e until no reassignments
let mut has_assigned = true;
while has_assigned {
let wrapped_assigned = AtomicBool::new(false);
// Assign to closest centroid
cluster_map
.par_iter_mut()
.for_each(|(system_location, cluster_id)| {
let mut closest_cluster = *cluster_id;
let mut closest_distance = system_location.distance(¢roids[*cluster_id]);
for (i, centroid) in centroids.iter().enumerate() {
let distance = system_location.distance(centroid);
if distance < closest_distance {
wrapped_assigned.store(true, Ordering::Relaxed);
closest_cluster = i;
closest_distance = distance;
}
}
*cluster_id = closest_cluster;
});
has_assigned = wrapped_assigned.load(Ordering::Relaxed);
// Calculate new centroids
centroids
//.par_iter_mut()
.iter_mut()
.enumerate()
.for_each(|(id, centroid)| {
let mut count = 0.;
let mut new_centroid = Point::origin();
for (system_location, _) in cluster_map.iter().filter(|&(_, c_id)| *c_id == id)
{
new_centroid += *system_location;
count += 1.;
}
new_centroid *= 1. / count;
*centroid = new_centroid;
});
}
// Setup cluster vectors
let mut sector_vecs =
(0..config.number_of_sectors).fold(Vec::<Vec<Point>>::new(), |mut sectors, _| {
sectors.push(vec![]);
sectors
});
// Map systems to final cluster
for (system_location, id) in cluster_map {
sector_vecs[id].push(system_location);
}
// Create sector for each cluster
let sectors = sector_vecs
.into_iter()
.map(|system_locations| {
let sector_seed: &[_] = &[system_locations.len() as u32];
let mut faction_rng: ChaChaRng = SeedableRng::from_seed(sector_seed);
Sector {
system_locations,
faction: Faction::random_faction(&mut faction_rng),
}
})
.collect::<Vec<Sector>>();
info!(
"Mapped galaxy into {} sectors of {} systems, avg size: {},
max size {}, min size {}, taking {} ms \n
Sectors include: {} Cartel, {} Empire, {} Federation, {} Independent",
sectors.len(),
sectors
.iter()
.fold(0, |acc, sec| acc + sec.system_locations.len()),
sectors
.iter()
.fold(0, |acc, sec| acc + sec.system_locations.len())
/ sectors.len(),
sectors
.iter()
.fold(0, |acc, sec| acc.max(sec.system_locations.len())),
sectors
.iter()
.fold(MAX, |acc, sec| acc.min(sec.system_locations.len())),
((now.elapsed().as_secs() * 1_000) + u64::from(now.elapsed().subsec_millis())),
sectors.iter().fold(0, |acc, sec| acc
+ match sec.faction {
Faction::Cartel => 1,
_ => 0,
}),
sectors.iter().fold(0, |acc, sec| acc
+ match sec.faction {
Faction::Empire => 1,
_ => 0,
}),
sectors.iter().fold(0, |acc, sec| acc
+ match sec.faction {
Faction::Federation => 1,
_ => 0,
}),
sectors.iter().fold(0, |acc, sec| acc
+ match sec.faction {
Faction::Independent => 1,
_ => 0,
})
);
sectors
}
} | .cloned()
.collect::<Vec<_>>();
// System to cluster_id mapping
let mut cluster_map: HashMap<Point, usize> = system_locations | random_line_split |
sectors.rs | use rand::{seq, ChaChaRng, SeedableRng};
use rayon::prelude::*;
use std::{
collections::HashMap,
sync::atomic::{AtomicBool, Ordering},
time::Instant,
usize::MAX,
};
use config::GameConfig;
use entities::Faction;
use entities::Sector;
use utils::Point;
/// Used for generating sectors.
pub struct SectorGen {}
impl SectorGen {
/// Create a new sector generator.
pub fn new() -> SectorGen {
SectorGen {}
}
/// Split the systems in to a set number of clusters using K-means.
pub fn | (&self, config: &GameConfig, system_locations: Vec<Point>) -> Vec<Sector> {
// Measure time for generation.
let now = Instant::now();
info!("Simulating expansion for initial sectors...");
let seed: &[_] = &[config.map_seed as u32];
let mut rng: ChaChaRng = ChaChaRng::from_seed(seed);
// Setup initial centroids
let mut centroids =
seq::sample_iter(&mut rng, system_locations.iter(), config.number_of_sectors)
.unwrap()
.into_iter()
.cloned()
.collect::<Vec<_>>();
// System to cluster_id mapping
let mut cluster_map: HashMap<Point, usize> = system_locations
.into_iter()
.map(|point| (point, 0))
.collect();
// Run K means until convergence, i.e until no reassignments
let mut has_assigned = true;
while has_assigned {
let wrapped_assigned = AtomicBool::new(false);
// Assign to closest centroid
cluster_map
.par_iter_mut()
.for_each(|(system_location, cluster_id)| {
let mut closest_cluster = *cluster_id;
let mut closest_distance = system_location.distance(¢roids[*cluster_id]);
for (i, centroid) in centroids.iter().enumerate() {
let distance = system_location.distance(centroid);
if distance < closest_distance {
wrapped_assigned.store(true, Ordering::Relaxed);
closest_cluster = i;
closest_distance = distance;
}
}
*cluster_id = closest_cluster;
});
has_assigned = wrapped_assigned.load(Ordering::Relaxed);
// Calculate new centroids
centroids
//.par_iter_mut()
.iter_mut()
.enumerate()
.for_each(|(id, centroid)| {
let mut count = 0.;
let mut new_centroid = Point::origin();
for (system_location, _) in cluster_map.iter().filter(|&(_, c_id)| *c_id == id)
{
new_centroid += *system_location;
count += 1.;
}
new_centroid *= 1. / count;
*centroid = new_centroid;
});
}
// Setup cluster vectors
let mut sector_vecs =
(0..config.number_of_sectors).fold(Vec::<Vec<Point>>::new(), |mut sectors, _| {
sectors.push(vec![]);
sectors
});
// Map systems to final cluster
for (system_location, id) in cluster_map {
sector_vecs[id].push(system_location);
}
// Create sector for each cluster
let sectors = sector_vecs
.into_iter()
.map(|system_locations| {
let sector_seed: &[_] = &[system_locations.len() as u32];
let mut faction_rng: ChaChaRng = SeedableRng::from_seed(sector_seed);
Sector {
system_locations,
faction: Faction::random_faction(&mut faction_rng),
}
})
.collect::<Vec<Sector>>();
info!(
"Mapped galaxy into {} sectors of {} systems, avg size: {},
max size {}, min size {}, taking {} ms \n
Sectors include: {} Cartel, {} Empire, {} Federation, {} Independent",
sectors.len(),
sectors
.iter()
.fold(0, |acc, sec| acc + sec.system_locations.len()),
sectors
.iter()
.fold(0, |acc, sec| acc + sec.system_locations.len())
/ sectors.len(),
sectors
.iter()
.fold(0, |acc, sec| acc.max(sec.system_locations.len())),
sectors
.iter()
.fold(MAX, |acc, sec| acc.min(sec.system_locations.len())),
((now.elapsed().as_secs() * 1_000) + u64::from(now.elapsed().subsec_millis())),
sectors.iter().fold(0, |acc, sec| acc
+ match sec.faction {
Faction::Cartel => 1,
_ => 0,
}),
sectors.iter().fold(0, |acc, sec| acc
+ match sec.faction {
Faction::Empire => 1,
_ => 0,
}),
sectors.iter().fold(0, |acc, sec| acc
+ match sec.faction {
Faction::Federation => 1,
_ => 0,
}),
sectors.iter().fold(0, |acc, sec| acc
+ match sec.faction {
Faction::Independent => 1,
_ => 0,
})
);
sectors
}
}
| generate | identifier_name |
sectors.rs | use rand::{seq, ChaChaRng, SeedableRng};
use rayon::prelude::*;
use std::{
collections::HashMap,
sync::atomic::{AtomicBool, Ordering},
time::Instant,
usize::MAX,
};
use config::GameConfig;
use entities::Faction;
use entities::Sector;
use utils::Point;
/// Used for generating sectors.
pub struct SectorGen {}
impl SectorGen {
/// Create a new sector generator.
pub fn new() -> SectorGen |
/// Split the systems in to a set number of clusters using K-means.
pub fn generate(&self, config: &GameConfig, system_locations: Vec<Point>) -> Vec<Sector> {
// Measure time for generation.
let now = Instant::now();
info!("Simulating expansion for initial sectors...");
let seed: &[_] = &[config.map_seed as u32];
let mut rng: ChaChaRng = ChaChaRng::from_seed(seed);
// Setup initial centroids
let mut centroids =
seq::sample_iter(&mut rng, system_locations.iter(), config.number_of_sectors)
.unwrap()
.into_iter()
.cloned()
.collect::<Vec<_>>();
// System to cluster_id mapping
let mut cluster_map: HashMap<Point, usize> = system_locations
.into_iter()
.map(|point| (point, 0))
.collect();
// Run K means until convergence, i.e until no reassignments
let mut has_assigned = true;
while has_assigned {
let wrapped_assigned = AtomicBool::new(false);
// Assign to closest centroid
cluster_map
.par_iter_mut()
.for_each(|(system_location, cluster_id)| {
let mut closest_cluster = *cluster_id;
let mut closest_distance = system_location.distance(¢roids[*cluster_id]);
for (i, centroid) in centroids.iter().enumerate() {
let distance = system_location.distance(centroid);
if distance < closest_distance {
wrapped_assigned.store(true, Ordering::Relaxed);
closest_cluster = i;
closest_distance = distance;
}
}
*cluster_id = closest_cluster;
});
has_assigned = wrapped_assigned.load(Ordering::Relaxed);
// Calculate new centroids
centroids
//.par_iter_mut()
.iter_mut()
.enumerate()
.for_each(|(id, centroid)| {
let mut count = 0.;
let mut new_centroid = Point::origin();
for (system_location, _) in cluster_map.iter().filter(|&(_, c_id)| *c_id == id)
{
new_centroid += *system_location;
count += 1.;
}
new_centroid *= 1. / count;
*centroid = new_centroid;
});
}
// Setup cluster vectors
let mut sector_vecs =
(0..config.number_of_sectors).fold(Vec::<Vec<Point>>::new(), |mut sectors, _| {
sectors.push(vec![]);
sectors
});
// Map systems to final cluster
for (system_location, id) in cluster_map {
sector_vecs[id].push(system_location);
}
// Create sector for each cluster
let sectors = sector_vecs
.into_iter()
.map(|system_locations| {
let sector_seed: &[_] = &[system_locations.len() as u32];
let mut faction_rng: ChaChaRng = SeedableRng::from_seed(sector_seed);
Sector {
system_locations,
faction: Faction::random_faction(&mut faction_rng),
}
})
.collect::<Vec<Sector>>();
info!(
"Mapped galaxy into {} sectors of {} systems, avg size: {},
max size {}, min size {}, taking {} ms \n
Sectors include: {} Cartel, {} Empire, {} Federation, {} Independent",
sectors.len(),
sectors
.iter()
.fold(0, |acc, sec| acc + sec.system_locations.len()),
sectors
.iter()
.fold(0, |acc, sec| acc + sec.system_locations.len())
/ sectors.len(),
sectors
.iter()
.fold(0, |acc, sec| acc.max(sec.system_locations.len())),
sectors
.iter()
.fold(MAX, |acc, sec| acc.min(sec.system_locations.len())),
((now.elapsed().as_secs() * 1_000) + u64::from(now.elapsed().subsec_millis())),
sectors.iter().fold(0, |acc, sec| acc
+ match sec.faction {
Faction::Cartel => 1,
_ => 0,
}),
sectors.iter().fold(0, |acc, sec| acc
+ match sec.faction {
Faction::Empire => 1,
_ => 0,
}),
sectors.iter().fold(0, |acc, sec| acc
+ match sec.faction {
Faction::Federation => 1,
_ => 0,
}),
sectors.iter().fold(0, |acc, sec| acc
+ match sec.faction {
Faction::Independent => 1,
_ => 0,
})
);
sectors
}
}
| {
SectorGen {}
} | identifier_body |
sectors.rs | use rand::{seq, ChaChaRng, SeedableRng};
use rayon::prelude::*;
use std::{
collections::HashMap,
sync::atomic::{AtomicBool, Ordering},
time::Instant,
usize::MAX,
};
use config::GameConfig;
use entities::Faction;
use entities::Sector;
use utils::Point;
/// Used for generating sectors.
pub struct SectorGen {}
impl SectorGen {
/// Create a new sector generator.
pub fn new() -> SectorGen {
SectorGen {}
}
/// Split the systems in to a set number of clusters using K-means.
pub fn generate(&self, config: &GameConfig, system_locations: Vec<Point>) -> Vec<Sector> {
// Measure time for generation.
let now = Instant::now();
info!("Simulating expansion for initial sectors...");
let seed: &[_] = &[config.map_seed as u32];
let mut rng: ChaChaRng = ChaChaRng::from_seed(seed);
// Setup initial centroids
let mut centroids =
seq::sample_iter(&mut rng, system_locations.iter(), config.number_of_sectors)
.unwrap()
.into_iter()
.cloned()
.collect::<Vec<_>>();
// System to cluster_id mapping
let mut cluster_map: HashMap<Point, usize> = system_locations
.into_iter()
.map(|point| (point, 0))
.collect();
// Run K means until convergence, i.e until no reassignments
let mut has_assigned = true;
while has_assigned {
let wrapped_assigned = AtomicBool::new(false);
// Assign to closest centroid
cluster_map
.par_iter_mut()
.for_each(|(system_location, cluster_id)| {
let mut closest_cluster = *cluster_id;
let mut closest_distance = system_location.distance(¢roids[*cluster_id]);
for (i, centroid) in centroids.iter().enumerate() {
let distance = system_location.distance(centroid);
if distance < closest_distance |
}
*cluster_id = closest_cluster;
});
has_assigned = wrapped_assigned.load(Ordering::Relaxed);
// Calculate new centroids
centroids
//.par_iter_mut()
.iter_mut()
.enumerate()
.for_each(|(id, centroid)| {
let mut count = 0.;
let mut new_centroid = Point::origin();
for (system_location, _) in cluster_map.iter().filter(|&(_, c_id)| *c_id == id)
{
new_centroid += *system_location;
count += 1.;
}
new_centroid *= 1. / count;
*centroid = new_centroid;
});
}
// Setup cluster vectors
let mut sector_vecs =
(0..config.number_of_sectors).fold(Vec::<Vec<Point>>::new(), |mut sectors, _| {
sectors.push(vec![]);
sectors
});
// Map systems to final cluster
for (system_location, id) in cluster_map {
sector_vecs[id].push(system_location);
}
// Create sector for each cluster
let sectors = sector_vecs
.into_iter()
.map(|system_locations| {
let sector_seed: &[_] = &[system_locations.len() as u32];
let mut faction_rng: ChaChaRng = SeedableRng::from_seed(sector_seed);
Sector {
system_locations,
faction: Faction::random_faction(&mut faction_rng),
}
})
.collect::<Vec<Sector>>();
info!(
"Mapped galaxy into {} sectors of {} systems, avg size: {},
max size {}, min size {}, taking {} ms \n
Sectors include: {} Cartel, {} Empire, {} Federation, {} Independent",
sectors.len(),
sectors
.iter()
.fold(0, |acc, sec| acc + sec.system_locations.len()),
sectors
.iter()
.fold(0, |acc, sec| acc + sec.system_locations.len())
/ sectors.len(),
sectors
.iter()
.fold(0, |acc, sec| acc.max(sec.system_locations.len())),
sectors
.iter()
.fold(MAX, |acc, sec| acc.min(sec.system_locations.len())),
((now.elapsed().as_secs() * 1_000) + u64::from(now.elapsed().subsec_millis())),
sectors.iter().fold(0, |acc, sec| acc
+ match sec.faction {
Faction::Cartel => 1,
_ => 0,
}),
sectors.iter().fold(0, |acc, sec| acc
+ match sec.faction {
Faction::Empire => 1,
_ => 0,
}),
sectors.iter().fold(0, |acc, sec| acc
+ match sec.faction {
Faction::Federation => 1,
_ => 0,
}),
sectors.iter().fold(0, |acc, sec| acc
+ match sec.faction {
Faction::Independent => 1,
_ => 0,
})
);
sectors
}
}
| {
wrapped_assigned.store(true, Ordering::Relaxed);
closest_cluster = i;
closest_distance = distance;
} | conditional_block |
course.py | # -*- coding: utf-8 -*-
#
# This file is part of INGInious. See the LICENSE and the COPYRIGHTS files for
# more information about the licensing of this file.
""" Course page """
import web
from inginious.frontend.pages.utils import INGIniousPage
class CoursePage(INGIniousPage):
""" Course page """
def get_course(self, courseid):
""" Return the course """
try:
course = self.course_factory.get_course(courseid)
except:
raise web.notfound()
return course
def POST(self, courseid): # pylint: disable=arguments-differ
""" POST request """
course = self.get_course(courseid)
user_input = web.input()
if "unregister" in user_input and course.allow_unregister():
self.user_manager.course_unregister_user(course, self.user_manager.session_username())
raise web.seeother(self.app.get_homepath() + '/mycourses')
return self.show_page(course)
def GET(self, courseid): # pylint: disable=arguments-differ
|
def show_page(self, course, current_page=0, current_tag=""):
""" Prepares and shows the course page """
username = self.user_manager.session_username()
if not self.user_manager.course_is_open_to_user(course, lti=False):
return self.template_helper.get_renderer().course_unavailable()
tasks = course.get_tasks()
last_submissions = self.submission_manager.get_user_last_submissions(5, {"courseid": course.get_id(),
"taskid": {"$in": list(tasks.keys())}})
for submission in last_submissions:
submission["taskname"] = tasks[submission['taskid']].get_name_or_id(self.user_manager.session_language())
tasks_data = {}
user_tasks = self.database.user_tasks.find(
{"username": username, "courseid": course.get_id(), "taskid": {"$in": list(tasks.keys())}})
is_admin = self.user_manager.has_staff_rights_on_course(course, username)
tasks_score = [0.0, 0.0]
for taskid, task in tasks.items():
tasks_data[taskid] = {"visible": task.get_accessible_time().after_start() or is_admin, "succeeded": False,
"grade": 0.0}
tasks_score[1] += task.get_grading_weight() if tasks_data[taskid]["visible"] else 0
for user_task in user_tasks:
tasks_data[user_task["taskid"]]["succeeded"] = user_task["succeeded"]
tasks_data[user_task["taskid"]]["grade"] = user_task["grade"]
weighted_score = user_task["grade"] * tasks[user_task["taskid"]].get_grading_weight()
tasks_score[0] += weighted_score if tasks_data[user_task["taskid"]]["visible"] else 0
course_grade = round(tasks_score[0] / tasks_score[1]) if tasks_score[1] > 0 else 0
tag_list = course.get_all_tags_names_as_list(is_admin, self.user_manager.session_language())
user_info = self.database.users.find_one({"username": username})
# Filter tasks with the tag in case the tasks are filtered
if not current_tag:
filtered_tasks = tasks
else:
filtered_tasks = {task_id: task for task_id, task in tasks.items() if
current_tag in map(lambda x: x.get_name(), task.get_tags()[2] + task.get_tags()[0])}
# Manage tasks pagination
page_limit = 20
total_tasks = len(filtered_tasks)
pages = total_tasks // page_limit
if (total_tasks % page_limit) != 0 or pages == 0:
pages += 1
if (page_limit * current_page + page_limit) < total_tasks:
page_tasks_ids = list(filtered_tasks.keys())[page_limit * current_page:
page_limit * current_page + page_limit]
else:
page_tasks_ids = list(filtered_tasks.keys())[page_limit * current_page:]
filtered_tasks = {task_id: tasks_data[task_id] for task_id, __ in filtered_tasks.items() if
task_id in page_tasks_ids}
return self.template_helper.get_renderer().course(user_info, course, last_submissions, tasks,
filtered_tasks, course_grade, tag_list, pages,
current_page + 1, current_tag)
| """ GET request """
course = self.get_course(courseid)
user_input = web.input()
page = int(user_input.get("page", 1)) - 1
tag = user_input.get("tag", "")
return self.show_page(course, page, tag) | identifier_body |
course.py | # -*- coding: utf-8 -*-
#
# This file is part of INGInious. See the LICENSE and the COPYRIGHTS files for
# more information about the licensing of this file.
""" Course page """
import web
from inginious.frontend.pages.utils import INGIniousPage
class CoursePage(INGIniousPage):
""" Course page """
def get_course(self, courseid):
""" Return the course """
try:
course = self.course_factory.get_course(courseid)
except:
raise web.notfound()
return course
def POST(self, courseid): # pylint: disable=arguments-differ
""" POST request """
course = self.get_course(courseid)
user_input = web.input()
if "unregister" in user_input and course.allow_unregister():
self.user_manager.course_unregister_user(course, self.user_manager.session_username())
raise web.seeother(self.app.get_homepath() + '/mycourses')
return self.show_page(course)
def GET(self, courseid): # pylint: disable=arguments-differ
""" GET request """
course = self.get_course(courseid)
user_input = web.input()
page = int(user_input.get("page", 1)) - 1
tag = user_input.get("tag", "")
return self.show_page(course, page, tag)
def show_page(self, course, current_page=0, current_tag=""):
""" Prepares and shows the course page """
username = self.user_manager.session_username()
if not self.user_manager.course_is_open_to_user(course, lti=False):
|
tasks = course.get_tasks()
last_submissions = self.submission_manager.get_user_last_submissions(5, {"courseid": course.get_id(),
"taskid": {"$in": list(tasks.keys())}})
for submission in last_submissions:
submission["taskname"] = tasks[submission['taskid']].get_name_or_id(self.user_manager.session_language())
tasks_data = {}
user_tasks = self.database.user_tasks.find(
{"username": username, "courseid": course.get_id(), "taskid": {"$in": list(tasks.keys())}})
is_admin = self.user_manager.has_staff_rights_on_course(course, username)
tasks_score = [0.0, 0.0]
for taskid, task in tasks.items():
tasks_data[taskid] = {"visible": task.get_accessible_time().after_start() or is_admin, "succeeded": False,
"grade": 0.0}
tasks_score[1] += task.get_grading_weight() if tasks_data[taskid]["visible"] else 0
for user_task in user_tasks:
tasks_data[user_task["taskid"]]["succeeded"] = user_task["succeeded"]
tasks_data[user_task["taskid"]]["grade"] = user_task["grade"]
weighted_score = user_task["grade"] * tasks[user_task["taskid"]].get_grading_weight()
tasks_score[0] += weighted_score if tasks_data[user_task["taskid"]]["visible"] else 0
course_grade = round(tasks_score[0] / tasks_score[1]) if tasks_score[1] > 0 else 0
tag_list = course.get_all_tags_names_as_list(is_admin, self.user_manager.session_language())
user_info = self.database.users.find_one({"username": username})
# Filter tasks with the tag in case the tasks are filtered
if not current_tag:
filtered_tasks = tasks
else:
filtered_tasks = {task_id: task for task_id, task in tasks.items() if
current_tag in map(lambda x: x.get_name(), task.get_tags()[2] + task.get_tags()[0])}
# Manage tasks pagination
page_limit = 20
total_tasks = len(filtered_tasks)
pages = total_tasks // page_limit
if (total_tasks % page_limit) != 0 or pages == 0:
pages += 1
if (page_limit * current_page + page_limit) < total_tasks:
page_tasks_ids = list(filtered_tasks.keys())[page_limit * current_page:
page_limit * current_page + page_limit]
else:
page_tasks_ids = list(filtered_tasks.keys())[page_limit * current_page:]
filtered_tasks = {task_id: tasks_data[task_id] for task_id, __ in filtered_tasks.items() if
task_id in page_tasks_ids}
return self.template_helper.get_renderer().course(user_info, course, last_submissions, tasks,
filtered_tasks, course_grade, tag_list, pages,
current_page + 1, current_tag)
| return self.template_helper.get_renderer().course_unavailable() | conditional_block |
course.py | # -*- coding: utf-8 -*-
#
# This file is part of INGInious. See the LICENSE and the COPYRIGHTS files for
# more information about the licensing of this file.
""" Course page """
import web
from inginious.frontend.pages.utils import INGIniousPage
class CoursePage(INGIniousPage):
""" Course page """
def get_course(self, courseid):
""" Return the course """
try:
course = self.course_factory.get_course(courseid)
except:
raise web.notfound()
return course
def POST(self, courseid): # pylint: disable=arguments-differ
""" POST request """
course = self.get_course(courseid)
user_input = web.input()
if "unregister" in user_input and course.allow_unregister():
self.user_manager.course_unregister_user(course, self.user_manager.session_username())
raise web.seeother(self.app.get_homepath() + '/mycourses')
return self.show_page(course)
def GET(self, courseid): # pylint: disable=arguments-differ
""" GET request """
course = self.get_course(courseid)
user_input = web.input()
page = int(user_input.get("page", 1)) - 1
tag = user_input.get("tag", "")
return self.show_page(course, page, tag)
def | (self, course, current_page=0, current_tag=""):
""" Prepares and shows the course page """
username = self.user_manager.session_username()
if not self.user_manager.course_is_open_to_user(course, lti=False):
return self.template_helper.get_renderer().course_unavailable()
tasks = course.get_tasks()
last_submissions = self.submission_manager.get_user_last_submissions(5, {"courseid": course.get_id(),
"taskid": {"$in": list(tasks.keys())}})
for submission in last_submissions:
submission["taskname"] = tasks[submission['taskid']].get_name_or_id(self.user_manager.session_language())
tasks_data = {}
user_tasks = self.database.user_tasks.find(
{"username": username, "courseid": course.get_id(), "taskid": {"$in": list(tasks.keys())}})
is_admin = self.user_manager.has_staff_rights_on_course(course, username)
tasks_score = [0.0, 0.0]
for taskid, task in tasks.items():
tasks_data[taskid] = {"visible": task.get_accessible_time().after_start() or is_admin, "succeeded": False,
"grade": 0.0}
tasks_score[1] += task.get_grading_weight() if tasks_data[taskid]["visible"] else 0
for user_task in user_tasks:
tasks_data[user_task["taskid"]]["succeeded"] = user_task["succeeded"]
tasks_data[user_task["taskid"]]["grade"] = user_task["grade"]
weighted_score = user_task["grade"] * tasks[user_task["taskid"]].get_grading_weight()
tasks_score[0] += weighted_score if tasks_data[user_task["taskid"]]["visible"] else 0
course_grade = round(tasks_score[0] / tasks_score[1]) if tasks_score[1] > 0 else 0
tag_list = course.get_all_tags_names_as_list(is_admin, self.user_manager.session_language())
user_info = self.database.users.find_one({"username": username})
# Filter tasks with the tag in case the tasks are filtered
if not current_tag:
filtered_tasks = tasks
else:
filtered_tasks = {task_id: task for task_id, task in tasks.items() if
current_tag in map(lambda x: x.get_name(), task.get_tags()[2] + task.get_tags()[0])}
# Manage tasks pagination
page_limit = 20
total_tasks = len(filtered_tasks)
pages = total_tasks // page_limit
if (total_tasks % page_limit) != 0 or pages == 0:
pages += 1
if (page_limit * current_page + page_limit) < total_tasks:
page_tasks_ids = list(filtered_tasks.keys())[page_limit * current_page:
page_limit * current_page + page_limit]
else:
page_tasks_ids = list(filtered_tasks.keys())[page_limit * current_page:]
filtered_tasks = {task_id: tasks_data[task_id] for task_id, __ in filtered_tasks.items() if
task_id in page_tasks_ids}
return self.template_helper.get_renderer().course(user_info, course, last_submissions, tasks,
filtered_tasks, course_grade, tag_list, pages,
current_page + 1, current_tag)
| show_page | identifier_name |
course.py | # -*- coding: utf-8 -*-
#
# This file is part of INGInious. See the LICENSE and the COPYRIGHTS files for
# more information about the licensing of this file.
""" Course page """
import web
from inginious.frontend.pages.utils import INGIniousPage
class CoursePage(INGIniousPage):
""" Course page """
def get_course(self, courseid):
""" Return the course """
try:
course = self.course_factory.get_course(courseid)
except:
raise web.notfound()
return course
def POST(self, courseid): # pylint: disable=arguments-differ
""" POST request """
course = self.get_course(courseid)
user_input = web.input()
if "unregister" in user_input and course.allow_unregister():
self.user_manager.course_unregister_user(course, self.user_manager.session_username())
raise web.seeother(self.app.get_homepath() + '/mycourses')
return self.show_page(course)
def GET(self, courseid): # pylint: disable=arguments-differ
""" GET request """
course = self.get_course(courseid)
user_input = web.input()
page = int(user_input.get("page", 1)) - 1
tag = user_input.get("tag", "")
return self.show_page(course, page, tag)
def show_page(self, course, current_page=0, current_tag=""):
""" Prepares and shows the course page """
username = self.user_manager.session_username()
if not self.user_manager.course_is_open_to_user(course, lti=False):
return self.template_helper.get_renderer().course_unavailable()
tasks = course.get_tasks()
last_submissions = self.submission_manager.get_user_last_submissions(5, {"courseid": course.get_id(),
"taskid": {"$in": list(tasks.keys())}})
for submission in last_submissions:
submission["taskname"] = tasks[submission['taskid']].get_name_or_id(self.user_manager.session_language())
tasks_data = {}
user_tasks = self.database.user_tasks.find(
{"username": username, "courseid": course.get_id(), "taskid": {"$in": list(tasks.keys())}})
is_admin = self.user_manager.has_staff_rights_on_course(course, username)
tasks_score = [0.0, 0.0]
for taskid, task in tasks.items():
tasks_data[taskid] = {"visible": task.get_accessible_time().after_start() or is_admin, "succeeded": False,
"grade": 0.0} | for user_task in user_tasks:
tasks_data[user_task["taskid"]]["succeeded"] = user_task["succeeded"]
tasks_data[user_task["taskid"]]["grade"] = user_task["grade"]
weighted_score = user_task["grade"] * tasks[user_task["taskid"]].get_grading_weight()
tasks_score[0] += weighted_score if tasks_data[user_task["taskid"]]["visible"] else 0
course_grade = round(tasks_score[0] / tasks_score[1]) if tasks_score[1] > 0 else 0
tag_list = course.get_all_tags_names_as_list(is_admin, self.user_manager.session_language())
user_info = self.database.users.find_one({"username": username})
# Filter tasks with the tag in case the tasks are filtered
if not current_tag:
filtered_tasks = tasks
else:
filtered_tasks = {task_id: task for task_id, task in tasks.items() if
current_tag in map(lambda x: x.get_name(), task.get_tags()[2] + task.get_tags()[0])}
# Manage tasks pagination
page_limit = 20
total_tasks = len(filtered_tasks)
pages = total_tasks // page_limit
if (total_tasks % page_limit) != 0 or pages == 0:
pages += 1
if (page_limit * current_page + page_limit) < total_tasks:
page_tasks_ids = list(filtered_tasks.keys())[page_limit * current_page:
page_limit * current_page + page_limit]
else:
page_tasks_ids = list(filtered_tasks.keys())[page_limit * current_page:]
filtered_tasks = {task_id: tasks_data[task_id] for task_id, __ in filtered_tasks.items() if
task_id in page_tasks_ids}
return self.template_helper.get_renderer().course(user_info, course, last_submissions, tasks,
filtered_tasks, course_grade, tag_list, pages,
current_page + 1, current_tag) | tasks_score[1] += task.get_grading_weight() if tasks_data[taskid]["visible"] else 0
| random_line_split |
app.js | /*
* Copyright 2016 e-UCM (http://www.e-ucm.es/)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* This project has received funding from the European Union’s Horizon
* 2020 research and innovation programme under grant agreement No 644187.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0 (link is external)
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
// Declare app level module which depends on filters, and services
angular.module('myApp', [
'ngRoute', 'toolbarApp', 'signupApp', 'loginApp', 'loginPluginApp', 'classApp', 'participantsApp', 'classesApp', 'activitiesApp',
'activityApp', 'gameApp', 'analysisApp', 'kibanaApp', 'gamesApp', 'activityApp', 'analyticsApp', 'devVisualizatorApp',
'services', 'xeditable', 'env-vars', 'ui.router', 'blockUI'
]).run(function (editableOptions, $localStorage, $cookies) {
editableOptions.theme = 'bs3';
if ($localStorage.user) {
$cookies.put('rageUserCookie', $localStorage.user.token, {
path: '/'
});
}
}).filter('prettyDateId', function () {
return function (_id) {
if (_id) {
return $.format.prettyDate(new Date(parseInt(_id.slice(0, 8), 16) * 1000));
}
};
}).filter('prettyDate', function () {
return function (date) {
if (date) {
return $.format.prettyDate(new Date(date));
}
};
}).filter('list', function () {
return function (list) {
if (!list || list.length === 0) {
return 'Empty list';
}
var result = '';
list.forEach(function (v) {
result += v + ', ';
});
return result;
};
}).filter('object2array', function () {
return function (input) {
var out = [];
for (var i in input) {
out.push(input[i]);
}
return out;
};
}).factory('httpRequestInterceptor', ['$localStorage',
function ($localStorage) {
return {
request: function (config) {
config.headers.Accept = 'application/json';
if ($localStorage.user) {
config.headers.Authorization = 'Bearer ' + $localStorage.user.token;
}
return config;
}
};
}
]).config(['$routeProvider', '$httpProvider', '$locationProvider', '$stateProvider', 'blockUIConfig',
function ($routeProvider, $httpProvider, $locationProvider, $stateProvider, blockUIConfig) {
$httpProvider.interceptors.push('httpRequestInterceptor');
$locationProvider.html5Mode({enabled: true, requireBase: false});
$stateProvider.state({
name: 'default',
url: '/',
templateUrl: 'view/home'
});
$stateProvider.state({
name: 'home',
url: '/home',
templateUrl: 'view/home' | });
$stateProvider.state({
name: 'login',
url: '/login',
templateUrl: 'view/login'
});
$stateProvider.state({
name: 'signup',
url: '/signup',
templateUrl: 'view/signup'
});
$stateProvider.state({
name: 'class',
url: '/class',
templateUrl: 'view/classactivity'
});
$stateProvider.state({
name: 'data',
url: '/data',
templateUrl: 'view/data'
});
$stateProvider.state({
name: 'game',
url: '/game',
templateUrl: 'view/gameactivity'
});
blockUIConfig.autoBlock = false;
blockUIConfig.message = 'Please wait...';
}
]).controller('AppCtrl', ['$rootScope', '$scope', '$location', '$http', '$timeout', '$localStorage', '$window',
'Games', 'Classes', 'Activities', 'Versions', 'Analysis', 'Role', 'CONSTANTS', 'QueryParams',
function ($rootScope, $scope, $location, $http, $timeout, $localStorage,
$window, Games, Classes, Activities, Versions, Analysis, Role, CONSTANTS, QueryParams) {
$scope.$storage = $localStorage;
$scope.DOCS = CONSTANTS.DOCS;
// Role determination
$scope.isUser = function () {
return Role.isUser();
};
$scope.isAdmin = function () {
return Role.isAdmin();
};
$scope.isStudent = function () {
return Role.isStudent();
};
$scope.isTeacher = function () {
return Role.isTeacher();
};
$scope.isOfflineActivity = function () {
return $scope.isOfflineActivityParam($scope.selectedActivity);
};
$scope.isOnlineActivity = function () {
return $scope.isOnlineActivityParam($scope.selectedActivity);
};
$scope.isOfflineActivityParam = function (activity) {
return activity && activity.offline;
};
$scope.isOnlineActivityParam = function (activity) {
return activity && !activity.offline;
};
$scope.isDeveloper = function () {
return Role.isDeveloper();
};
$scope.goToClass = function(c) {
$scope.$emit('selectClass', { class: c});
};
$scope.goToGame = function(game) {
$scope.$emit('selectGame', { game: game});
};
$scope.goToActivity = function(activity) {
$scope.$emit('selectActivity', { activity: activity});
};
var checkLogin = function() {
$scope.username = $scope.isUser() ? $scope.$storage.user.username : '';
};
checkLogin();
$scope.$on('login', checkLogin);
$scope.href = function (href) {
$window.location.href = href;
};
$scope.logout = function () {
$http.delete(CONSTANTS.APIPATH + '/logout').success(function () {
delete $scope.$storage.user;
$timeout(function () {
$location.url('login');
}, 50);
}).error(function (data, status) {
delete $scope.$storage.user;
console.error('Error on get /logout ' + JSON.stringify(data) + ', status: ' + status);
});
};
$scope.testIndex = 'default';
$scope.statementSubmitted = false;
$scope.submitStatementsFile = function () {
$scope.loadingDashboard = true;
$scope.statementsFile.contents = JSON.parse($scope.statementsFile.contents);
if ($scope.statementsFile.contents) {
$http.post(CONSTANTS.PROXY + '/activities/test/' + $scope.selectedGame._id, $scope.statementsFile.contents)
.success(function (data) {
$scope.testIndex = data.id;
$scope.statementSubmitted = true;
$scope.generateTestVisualization();
$scope.loadingDashboard = false;
}).error(function (data, status) {
$scope.statementSubmitted = true;
$scope.generateTestVisualization();
console.error('Error on post /activities/test/' + $scope.selectedGame._id + ' ' + JSON.stringify(data) + ', status: ' + status);
$scope.loadingDashboard = false;
});
}
};
if (!$scope.selectedConfigView) {
$scope.selectedConfigView = 'stormAnalysis';
}
$scope.getActiveClass = function (id) {
if (id === $scope.selectedConfigView) {
return 'active';
}
return null;
};
$scope.templateButtonMsg = function (opened) {
if (opened) {
return 'Hide default JSON';
}
return 'Show JSON';
};
$scope.$on('selectGame', function (event, params) {
if (params.game) {
$scope.selectedGame = params.game;
Versions.forGame({gameId: params.game._id}).$promise.then(function(versions) {
$scope.selectedVersion = versions[0];
if (Role.isDeveloper()) {
$location.url('data');
} else {
$location.url('game');
}
$location.search('game', params.game._id);
$location.search('version', $scope.selectedVersion._id);
});
}
});
$scope.$on('selectClass', function (event, params) {
if (params.class) {
$scope.selectedClass = params.class;
$location.url('class');
$location.search('class', params.class._id);
}
});
$scope.$on('selectActivity', function (event, params) {
if (params.activity) {
$scope.selectedActivity = params.activity;
$scope.selectedClass = Classes.get({classId: params.activity.classId});
$scope.selectedVersion = Versions.get({gameId: gameId, versionId: params.activity.versionId});
$scope.selectedGame = Games.get({gameId: params.activity.gameId});
$location.url('data');
$location.search('activity', params.activity._id);
}
});
$scope.developer = {
name: ''
};
// Load
if ($scope.isUser()) {
var gameId = QueryParams.getQueryParam('game');
if (gameId) {
$scope.selectedGame = Games.get({gameId: gameId});
}
var versionId = QueryParams.getQueryParam('version');
if (gameId && versionId) {
$scope.selectedVersion = Versions.get({gameId: gameId, versionId: versionId});
}
var classId = QueryParams.getQueryParam('class');
if (classId) {
$scope.selectedClass = Classes.get({classId: classId});
}
var activityId = QueryParams.getQueryParam('activity');
if (activityId) {
Activities.get({activityId: activityId}).$promise.then(function(activity) {
$scope.selectedActivity = activity;
$scope.selectedClass = Classes.get({classId: activity.classId});
$scope.selectedVersion = Versions.get({gameId: gameId, versionId: activity.versionId});
$scope.selectedGame = Games.get({gameId: activity.gameId});
});
}
} else if (!$window.location.pathname.endsWith('loginbyplugin')) {
$location.url('login');
}
}
]); | random_line_split | |
app.js | /*
* Copyright 2016 e-UCM (http://www.e-ucm.es/)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* This project has received funding from the European Union’s Horizon
* 2020 research and innovation programme under grant agreement No 644187.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0 (link is external)
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
// Declare app level module which depends on filters, and services
angular.module('myApp', [
'ngRoute', 'toolbarApp', 'signupApp', 'loginApp', 'loginPluginApp', 'classApp', 'participantsApp', 'classesApp', 'activitiesApp',
'activityApp', 'gameApp', 'analysisApp', 'kibanaApp', 'gamesApp', 'activityApp', 'analyticsApp', 'devVisualizatorApp',
'services', 'xeditable', 'env-vars', 'ui.router', 'blockUI'
]).run(function (editableOptions, $localStorage, $cookies) {
editableOptions.theme = 'bs3';
if ($localStorage.user) {
$cookies.put('rageUserCookie', $localStorage.user.token, {
path: '/'
});
}
}).filter('prettyDateId', function () {
return function (_id) {
if (_id) {
return $.format.prettyDate(new Date(parseInt(_id.slice(0, 8), 16) * 1000));
}
};
}).filter('prettyDate', function () {
return function (date) {
if (date) {
return $.format.prettyDate(new Date(date));
}
};
}).filter('list', function () {
return function (list) {
if (!list || list.length === 0) {
return 'Empty list';
}
var result = '';
list.forEach(function (v) {
result += v + ', ';
});
return result;
};
}).filter('object2array', function () {
return function (input) {
var out = [];
for (var i in input) {
out.push(input[i]);
}
return out;
};
}).factory('httpRequestInterceptor', ['$localStorage',
function ($localStorage) {
return {
request: function (config) {
config.headers.Accept = 'application/json';
if ($localStorage.user) {
config.headers.Authorization = 'Bearer ' + $localStorage.user.token;
}
return config;
}
};
}
]).config(['$routeProvider', '$httpProvider', '$locationProvider', '$stateProvider', 'blockUIConfig',
function ($routeProvider, $httpProvider, $locationProvider, $stateProvider, blockUIConfig) {
$httpProvider.interceptors.push('httpRequestInterceptor');
$locationProvider.html5Mode({enabled: true, requireBase: false});
$stateProvider.state({
name: 'default',
url: '/',
templateUrl: 'view/home'
});
$stateProvider.state({
name: 'home',
url: '/home',
templateUrl: 'view/home'
});
$stateProvider.state({
name: 'login',
url: '/login',
templateUrl: 'view/login'
});
$stateProvider.state({
name: 'signup',
url: '/signup',
templateUrl: 'view/signup'
});
$stateProvider.state({
name: 'class',
url: '/class',
templateUrl: 'view/classactivity'
});
$stateProvider.state({
name: 'data',
url: '/data',
templateUrl: 'view/data'
});
$stateProvider.state({
name: 'game',
url: '/game',
templateUrl: 'view/gameactivity'
});
blockUIConfig.autoBlock = false;
blockUIConfig.message = 'Please wait...';
}
]).controller('AppCtrl', ['$rootScope', '$scope', '$location', '$http', '$timeout', '$localStorage', '$window',
'Games', 'Classes', 'Activities', 'Versions', 'Analysis', 'Role', 'CONSTANTS', 'QueryParams',
function ($rootScope, $scope, $location, $http, $timeout, $localStorage,
$window, Games, Classes, Activities, Versions, Analysis, Role, CONSTANTS, QueryParams) {
$scope.$storage = $localStorage;
$scope.DOCS = CONSTANTS.DOCS;
// Role determination
$scope.isUser = function () {
return Role.isUser();
};
$scope.isAdmin = function () {
return Role.isAdmin();
};
$scope.isStudent = function () {
return Role.isStudent();
};
$scope.isTeacher = function () {
return Role.isTeacher();
};
$scope.isOfflineActivity = function () {
return $scope.isOfflineActivityParam($scope.selectedActivity);
};
$scope.isOnlineActivity = function () {
return $scope.isOnlineActivityParam($scope.selectedActivity);
};
$scope.isOfflineActivityParam = function (activity) {
return activity && activity.offline;
};
$scope.isOnlineActivityParam = function (activity) {
return activity && !activity.offline;
};
$scope.isDeveloper = function () {
return Role.isDeveloper();
};
$scope.goToClass = function(c) {
$scope.$emit('selectClass', { class: c});
};
$scope.goToGame = function(game) {
$scope.$emit('selectGame', { game: game});
};
$scope.goToActivity = function(activity) {
$scope.$emit('selectActivity', { activity: activity});
};
var checkLogin = function() {
$scope.username = $scope.isUser() ? $scope.$storage.user.username : '';
};
checkLogin();
$scope.$on('login', checkLogin);
$scope.href = function (href) {
$window.location.href = href;
};
$scope.logout = function () {
$http.delete(CONSTANTS.APIPATH + '/logout').success(function () {
delete $scope.$storage.user;
$timeout(function () {
$location.url('login');
}, 50);
}).error(function (data, status) {
delete $scope.$storage.user;
console.error('Error on get /logout ' + JSON.stringify(data) + ', status: ' + status);
});
};
$scope.testIndex = 'default';
$scope.statementSubmitted = false;
$scope.submitStatementsFile = function () {
$scope.loadingDashboard = true;
$scope.statementsFile.contents = JSON.parse($scope.statementsFile.contents);
if ($scope.statementsFile.contents) {
$http.post(CONSTANTS.PROXY + '/activities/test/' + $scope.selectedGame._id, $scope.statementsFile.contents)
.success(function (data) {
$scope.testIndex = data.id;
$scope.statementSubmitted = true;
$scope.generateTestVisualization();
$scope.loadingDashboard = false;
}).error(function (data, status) {
$scope.statementSubmitted = true;
$scope.generateTestVisualization();
console.error('Error on post /activities/test/' + $scope.selectedGame._id + ' ' + JSON.stringify(data) + ', status: ' + status);
$scope.loadingDashboard = false;
});
}
};
if (!$scope.selectedConfigView) {
$scope.selectedConfigView = 'stormAnalysis';
}
$scope.getActiveClass = function (id) {
if (id === $scope.selectedConfigView) {
return 'active';
}
return null;
};
$scope.templateButtonMsg = function (opened) {
if (opened) {
return 'Hide default JSON';
}
return 'Show JSON';
};
$scope.$on('selectGame', function (event, params) {
if (params.game) {
$scope.selectedGame = params.game;
Versions.forGame({gameId: params.game._id}).$promise.then(function(versions) {
$scope.selectedVersion = versions[0];
if (Role.isDeveloper()) {
$location.url('data');
} else {
| $location.search('game', params.game._id);
$location.search('version', $scope.selectedVersion._id);
});
}
});
$scope.$on('selectClass', function (event, params) {
if (params.class) {
$scope.selectedClass = params.class;
$location.url('class');
$location.search('class', params.class._id);
}
});
$scope.$on('selectActivity', function (event, params) {
if (params.activity) {
$scope.selectedActivity = params.activity;
$scope.selectedClass = Classes.get({classId: params.activity.classId});
$scope.selectedVersion = Versions.get({gameId: gameId, versionId: params.activity.versionId});
$scope.selectedGame = Games.get({gameId: params.activity.gameId});
$location.url('data');
$location.search('activity', params.activity._id);
}
});
$scope.developer = {
name: ''
};
// Load
if ($scope.isUser()) {
var gameId = QueryParams.getQueryParam('game');
if (gameId) {
$scope.selectedGame = Games.get({gameId: gameId});
}
var versionId = QueryParams.getQueryParam('version');
if (gameId && versionId) {
$scope.selectedVersion = Versions.get({gameId: gameId, versionId: versionId});
}
var classId = QueryParams.getQueryParam('class');
if (classId) {
$scope.selectedClass = Classes.get({classId: classId});
}
var activityId = QueryParams.getQueryParam('activity');
if (activityId) {
Activities.get({activityId: activityId}).$promise.then(function(activity) {
$scope.selectedActivity = activity;
$scope.selectedClass = Classes.get({classId: activity.classId});
$scope.selectedVersion = Versions.get({gameId: gameId, versionId: activity.versionId});
$scope.selectedGame = Games.get({gameId: activity.gameId});
});
}
} else if (!$window.location.pathname.endsWith('loginbyplugin')) {
$location.url('login');
}
}
]); | $location.url('game');
}
| conditional_block |
menu.e2e.ts | describe('Menu', () => {
beforeEach(() => {
cy.visit('e2e/standalone.html');
});
it('should have valid items count', () => {
cy.get('.menu-content')
.find('li')
.should('have.length', 34);
});
it('should sync active menu items while scroll', () => {
cy.contains('h1', 'Introduction')
.scrollIntoView()
.get('[role=menuitem].active')
.should('have.text', 'Introduction');
cy.contains('h2', 'Add a new pet to the store')
.scrollIntoView()
.wait(100)
.get('[role=menuitem].active')
.children()
.last()
.should('have.text', 'Add a new pet to the store')
.should('be.visible');
});
it('should sync active menu items while scroll back and scroll again', () => {
cy.contains('h2', 'Add a new pet to the store')
.scrollIntoView()
.wait(100)
.get('[role=menuitem].active')
.children()
.last()
.should('have.text', 'Add a new pet to the store')
.should('be.visible');
cy.contains('h1', 'Swagger Petstore')
.scrollIntoView()
.wait(100)
| .wait(100)
.get('[role=menuitem].active')
.should('have.text', 'Introduction');
cy.url().should('include', '#section/Introduction');
});
it('should update URL hash when clicking on menu items', () => {
cy.contains('[role=menuitem].-depth1', 'pet').click({ force: true });
cy.location('hash').should('equal', '#tag/pet');
cy.contains('[role=menuitem]', 'Find pet by ID').click({ force: true });
cy.location('hash').should('equal', '#operation/getPetById');
});
it('should deactivate tag when other is activated', () => {
const petItem = () => cy.contains('[role=menuitem].-depth1', 'pet');
petItem()
.click({ force: true })
.should('have.class', 'active');
cy.contains('[role=menuitem].-depth1', 'store').click({ force: true });
petItem().should('not.have.class', 'active');
});
}); | cy.contains('h1', 'Introduction')
.scrollIntoView() | random_line_split |
custom_build.rs | use std::collections::{HashMap, BTreeSet};
use std::fs;
use std::io::prelude::*;
use std::path::PathBuf;
use std::str;
use std::sync::{Mutex, Arc};
use core::{PackageId, PackageSet};
use util::{CargoResult, human, Human};
use util::{internal, ChainError, profile, paths};
use util::Freshness;
use super::job::Work;
use super::{fingerprint, process, Kind, Context, Unit};
use super::CommandType;
/// Contains the parsed output of a custom build script.
#[derive(Clone, Debug, Hash)]
pub struct BuildOutput {
/// Paths to pass to rustc with the `-L` flag
pub library_paths: Vec<PathBuf>,
/// Names and link kinds of libraries, suitable for the `-l` flag
pub library_links: Vec<String>,
/// Various `--cfg` flags to pass to the compiler
pub cfgs: Vec<String>,
/// Metadata to pass to the immediate dependencies
pub metadata: Vec<(String, String)>,
}
pub type BuildMap = HashMap<(PackageId, Kind), BuildOutput>;
pub struct BuildState {
pub outputs: Mutex<BuildMap>,
}
#[derive(Default)]
pub struct BuildScripts {
pub to_link: BTreeSet<(PackageId, Kind)>,
pub plugins: BTreeSet<PackageId>,
}
/// Prepares a `Work` that executes the target as a custom build script.
///
/// The `req` given is the requirement which this run of the build script will
/// prepare work for. If the requirement is specified as both the target and the
/// host platforms it is assumed that the two are equal and the build script is
/// only run once (not twice).
pub fn prepare(cx: &mut Context, unit: &Unit)
-> CargoResult<(Work, Work, Freshness)> {
let _p = profile::start(format!("build script prepare: {}/{}",
unit.pkg, unit.target.name()));
let key = (unit.pkg.package_id().clone(), unit.kind);
let overridden = cx.build_state.outputs.lock().unwrap().contains_key(&key);
let (work_dirty, work_fresh) = if overridden {
(Work::new(|_| Ok(())), Work::new(|_| Ok(())))
} else {
try!(build_work(cx, unit))
};
// Now that we've prep'd our work, build the work needed to manage the
// fingerprint and then start returning that upwards.
let (freshness, dirty, fresh) =
try!(fingerprint::prepare_build_cmd(cx, unit));
Ok((work_dirty.then(dirty), work_fresh.then(fresh), freshness))
}
fn build_work(cx: &mut Context, unit: &Unit) -> CargoResult<(Work, Work)> {
let (script_output, build_output) = {
(cx.layout(unit.pkg, Kind::Host).build(unit.pkg),
cx.layout(unit.pkg, unit.kind).build_out(unit.pkg))
};
// Building the command to execute
let to_exec = script_output.join(unit.target.name());
// Start preparing the process to execute, starting out with some
// environment variables. Note that the profile-related environment
// variables are not set with this the build script's profile but rather the
// package's library profile.
let profile = cx.lib_profile(unit.pkg.package_id());
let to_exec = to_exec.into_os_string();
let mut p = try!(super::process(CommandType::Host(to_exec), unit.pkg, cx));
p.env("OUT_DIR", &build_output)
.env("CARGO_MANIFEST_DIR", unit.pkg.root())
.env("NUM_JOBS", &cx.jobs().to_string())
.env("TARGET", &match unit.kind {
Kind::Host => &cx.config.rustc_info().host[..],
Kind::Target => cx.target_triple(),
})
.env("DEBUG", &profile.debuginfo.to_string())
.env("OPT_LEVEL", &profile.opt_level.to_string())
.env("PROFILE", if cx.build_config.release {"release"} else {"debug"})
.env("HOST", &cx.config.rustc_info().host);
// Be sure to pass along all enabled features for this package, this is the
// last piece of statically known information that we have.
if let Some(features) = cx.resolve.features(unit.pkg.package_id()) {
for feat in features.iter() {
p.env(&format!("CARGO_FEATURE_{}", super::envify(feat)), "1");
}
}
// Gather the set of native dependencies that this package has along with
// some other variables to close over.
//
// This information will be used at build-time later on to figure out which
// sorts of variables need to be discovered at that time.
let lib_deps = {
cx.dep_run_custom_build(unit).iter().filter_map(|unit| {
if unit.profile.run_custom_build {
Some((unit.pkg.manifest().links().unwrap().to_string(),
unit.pkg.package_id().clone()))
} else {
None
}
}).collect::<Vec<_>>()
};
let pkg_name = unit.pkg.to_string();
let build_state = cx.build_state.clone();
let id = unit.pkg.package_id().clone();
let all = (id.clone(), pkg_name.clone(), build_state.clone(),
build_output.clone());
let build_scripts = super::load_build_deps(cx, unit);
let kind = unit.kind;
try!(fs::create_dir_all(&cx.layout(unit.pkg, Kind::Host).build(unit.pkg)));
try!(fs::create_dir_all(&cx.layout(unit.pkg, unit.kind).build(unit.pkg)));
let exec_engine = cx.exec_engine.clone();
// Prepare the unit of "dirty work" which will actually run the custom build
// command.
//
// Note that this has to do some extra work just before running the command
// to determine extra environment variables and such.
let dirty = Work::new(move |desc_tx| {
// Make sure that OUT_DIR exists.
//
// If we have an old build directory, then just move it into place,
// otherwise create it!
if fs::metadata(&build_output).is_err() {
try!(fs::create_dir(&build_output).chain_error(|| {
internal("failed to create script output directory for \
build command")
}));
}
// For all our native lib dependencies, pick up their metadata to pass
// along to this custom build command. We're also careful to augment our
// dynamic library search path in case the build script depended on any
// native dynamic libraries.
{
let build_state = build_state.outputs.lock().unwrap();
for (name, id) in lib_deps {
let key = (id.clone(), kind);
let state = try!(build_state.get(&key).chain_error(|| {
internal(format!("failed to locate build state for env \
vars: {}/{:?}", id, kind))
}));
let data = &state.metadata;
for &(ref key, ref value) in data.iter() {
p.env(&format!("DEP_{}_{}", super::envify(&name),
super::envify(key)), value);
}
}
if let Some(build_scripts) = build_scripts {
try!(super::add_plugin_deps(&mut p, &build_state,
&build_scripts));
}
}
// And now finally, run the build command itself!
desc_tx.send(p.to_string()).ok();
let output = try!(exec_engine.exec_with_output(p).map_err(|mut e| {
e.desc = format!("failed to run custom build command for `{}`\n{}",
pkg_name, e.desc);
Human(e)
}));
try!(paths::write(&build_output.parent().unwrap().join("output"),
&output.stdout));
// After the build command has finished running, we need to be sure to
// remember all of its output so we can later discover precisely what it
// was, even if we don't run the build command again (due to freshness).
//
// This is also the location where we provide feedback into the build
// state informing what variables were discovered via our script as
// well.
let output = try!(str::from_utf8(&output.stdout).map_err(|_| {
human("build script output was not valid utf-8")
}));
let parsed_output = try!(BuildOutput::parse(output, &pkg_name));
build_state.insert(id, kind, parsed_output);
Ok(())
});
// Now that we've prepared our work-to-do, we need to prepare the fresh work
// itself to run when we actually end up just discarding what we calculated
// above.
let fresh = Work::new(move |_tx| {
let (id, pkg_name, build_state, build_output) = all;
let contents = try!(paths::read(&build_output.parent().unwrap()
.join("output")));
let output = try!(BuildOutput::parse(&contents, &pkg_name));
build_state.insert(id, kind, output);
Ok(())
});
Ok((dirty, fresh))
}
impl BuildState {
pub fn new(config: &super::BuildConfig,
packages: &PackageSet) -> BuildState {
let mut sources = HashMap::new();
for package in packages.iter() {
match package.manifest().links() {
Some(links) => {
sources.insert(links.to_string(),
package.package_id().clone());
}
None => {}
}
}
let mut outputs = HashMap::new();
let i1 = config.host.overrides.iter().map(|p| (p, Kind::Host));
let i2 = config.target.overrides.iter().map(|p| (p, Kind::Target));
for ((name, output), kind) in i1.chain(i2) {
// If no package is using the library named `name`, then this is
// just an override that we ignore.
if let Some(id) = sources.get(name) {
outputs.insert((id.clone(), kind), output.clone());
}
}
BuildState { outputs: Mutex::new(outputs) }
}
fn insert(&self, id: PackageId, kind: Kind, output: BuildOutput) {
self.outputs.lock().unwrap().insert((id, kind), output);
}
}
impl BuildOutput {
// Parses the output of a script.
// The `pkg_name` is used for error messages.
pub fn parse(input: &str, pkg_name: &str) -> CargoResult<BuildOutput> {
let mut library_paths = Vec::new();
let mut library_links = Vec::new();
let mut cfgs = Vec::new();
let mut metadata = Vec::new();
let whence = format!("build script of `{}`", pkg_name);
for line in input.lines() {
let mut iter = line.splitn(2, ':');
if iter.next() != Some("cargo") {
// skip this line since it doesn't start with "cargo:"
continue;
}
let data = match iter.next() {
Some(val) => val,
None => continue
};
// getting the `key=value` part of the line
let mut iter = data.splitn(2, '=');
let key = iter.next();
let value = iter.next();
let (key, value) = match (key, value) {
(Some(a), Some(b)) => (a, b.trim_right()),
// line started with `cargo:` but didn't match `key=value`
_ => bail!("Wrong output in {}: `{}`", whence, line),
};
match key {
"rustc-flags" => {
let (libs, links) = try!(
BuildOutput::parse_rustc_flags(value, &whence)
);
library_links.extend(links.into_iter());
library_paths.extend(libs.into_iter());
}
"rustc-link-lib" => library_links.push(value.to_string()),
"rustc-link-search" => library_paths.push(PathBuf::from(value)),
"rustc-cfg" => cfgs.push(value.to_string()),
_ => metadata.push((key.to_string(), value.to_string())),
}
}
Ok(BuildOutput {
library_paths: library_paths,
library_links: library_links,
cfgs: cfgs,
metadata: metadata,
})
}
pub fn parse_rustc_flags(value: &str, whence: &str)
-> CargoResult<(Vec<PathBuf>, Vec<String>)> {
let value = value.trim();
let mut flags_iter = value.split(|c: char| c.is_whitespace())
.filter(|w| w.chars().any(|c| !c.is_whitespace()));
let (mut library_links, mut library_paths) = (Vec::new(), Vec::new());
loop {
let flag = match flags_iter.next() {
Some(f) => f,
None => break
};
if flag != "-l" && flag != "-L" {
bail!("Only `-l` and `-L` flags are allowed in {}: `{}`",
whence, value)
}
let value = match flags_iter.next() {
Some(v) => v,
None => bail!("Flag in rustc-flags has no value in {}: `{}`",
whence, value)
};
match flag {
"-l" => library_links.push(value.to_string()),
"-L" => library_paths.push(PathBuf::from(value)),
// was already checked above
_ => bail!("only -l and -L flags are allowed")
};
}
Ok((library_paths, library_links))
}
}
/// Compute the `build_scripts` map in the `Context` which tracks what build
/// scripts each package depends on.
///
/// The global `build_scripts` map lists for all (package, kind) tuples what set
/// of packages' build script outputs must be considered. For example this lists
/// all dependencies' `-L` flags which need to be propagated transitively.
///
/// The given set of targets to this function is the initial set of
/// targets/profiles which are being built.
pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>,
units: &[Unit<'b>]) {
let mut ret = HashMap::new();
for unit in units {
build(&mut ret, cx, unit);
}
cx.build_scripts.extend(ret.into_iter().map(|(k, v)| {
(k, Arc::new(v))
}));
// Recursive function to build up the map we're constructing. This function
// memoizes all of its return values as it goes along.
fn build<'a, 'b, 'cfg>(out: &'a mut HashMap<Unit<'b>, BuildScripts>,
cx: &Context<'b, 'cfg>,
unit: &Unit<'b>)
-> &'a BuildScripts |
}
| {
// Do a quick pre-flight check to see if we've already calculated the
// set of dependencies.
if out.contains_key(unit) {
return &out[unit]
}
let mut to_link = BTreeSet::new();
let mut plugins = BTreeSet::new();
if !unit.target.is_custom_build() && unit.pkg.has_custom_build() {
to_link.insert((unit.pkg.package_id().clone(), unit.kind));
}
for unit in cx.dep_targets(unit).iter() {
let dep_scripts = build(out, cx, unit);
if unit.target.for_host() {
plugins.extend(dep_scripts.to_link.iter()
.map(|p| &p.0).cloned());
} else if unit.target.linkable() {
to_link.extend(dep_scripts.to_link.iter().cloned());
}
}
let prev = out.entry(*unit).or_insert(BuildScripts::default());
prev.to_link.extend(to_link);
prev.plugins.extend(plugins);
return prev
} | identifier_body |
custom_build.rs | use std::collections::{HashMap, BTreeSet};
use std::fs;
use std::io::prelude::*;
use std::path::PathBuf;
use std::str;
use std::sync::{Mutex, Arc};
use core::{PackageId, PackageSet};
use util::{CargoResult, human, Human};
use util::{internal, ChainError, profile, paths};
use util::Freshness;
use super::job::Work;
use super::{fingerprint, process, Kind, Context, Unit};
use super::CommandType;
/// Contains the parsed output of a custom build script.
#[derive(Clone, Debug, Hash)]
pub struct BuildOutput {
/// Paths to pass to rustc with the `-L` flag
pub library_paths: Vec<PathBuf>,
/// Names and link kinds of libraries, suitable for the `-l` flag
pub library_links: Vec<String>,
/// Various `--cfg` flags to pass to the compiler
pub cfgs: Vec<String>,
/// Metadata to pass to the immediate dependencies
pub metadata: Vec<(String, String)>,
}
pub type BuildMap = HashMap<(PackageId, Kind), BuildOutput>;
pub struct BuildState {
pub outputs: Mutex<BuildMap>,
}
#[derive(Default)]
pub struct BuildScripts {
pub to_link: BTreeSet<(PackageId, Kind)>,
pub plugins: BTreeSet<PackageId>,
}
/// Prepares a `Work` that executes the target as a custom build script.
///
/// The `req` given is the requirement which this run of the build script will
/// prepare work for. If the requirement is specified as both the target and the
/// host platforms it is assumed that the two are equal and the build script is
/// only run once (not twice).
pub fn prepare(cx: &mut Context, unit: &Unit)
-> CargoResult<(Work, Work, Freshness)> {
let _p = profile::start(format!("build script prepare: {}/{}",
unit.pkg, unit.target.name()));
let key = (unit.pkg.package_id().clone(), unit.kind);
let overridden = cx.build_state.outputs.lock().unwrap().contains_key(&key);
let (work_dirty, work_fresh) = if overridden {
(Work::new(|_| Ok(())), Work::new(|_| Ok(())))
} else {
try!(build_work(cx, unit))
};
// Now that we've prep'd our work, build the work needed to manage the
// fingerprint and then start returning that upwards.
let (freshness, dirty, fresh) =
try!(fingerprint::prepare_build_cmd(cx, unit));
Ok((work_dirty.then(dirty), work_fresh.then(fresh), freshness))
}
fn build_work(cx: &mut Context, unit: &Unit) -> CargoResult<(Work, Work)> {
let (script_output, build_output) = {
(cx.layout(unit.pkg, Kind::Host).build(unit.pkg),
cx.layout(unit.pkg, unit.kind).build_out(unit.pkg))
};
// Building the command to execute
let to_exec = script_output.join(unit.target.name());
// Start preparing the process to execute, starting out with some
// environment variables. Note that the profile-related environment
// variables are not set with this the build script's profile but rather the
// package's library profile.
let profile = cx.lib_profile(unit.pkg.package_id());
let to_exec = to_exec.into_os_string();
let mut p = try!(super::process(CommandType::Host(to_exec), unit.pkg, cx));
p.env("OUT_DIR", &build_output)
.env("CARGO_MANIFEST_DIR", unit.pkg.root())
.env("NUM_JOBS", &cx.jobs().to_string())
.env("TARGET", &match unit.kind {
Kind::Host => &cx.config.rustc_info().host[..],
Kind::Target => cx.target_triple(),
})
.env("DEBUG", &profile.debuginfo.to_string())
.env("OPT_LEVEL", &profile.opt_level.to_string())
.env("PROFILE", if cx.build_config.release {"release"} else {"debug"})
.env("HOST", &cx.config.rustc_info().host);
// Be sure to pass along all enabled features for this package, this is the
// last piece of statically known information that we have.
if let Some(features) = cx.resolve.features(unit.pkg.package_id()) {
for feat in features.iter() {
p.env(&format!("CARGO_FEATURE_{}", super::envify(feat)), "1");
}
}
// Gather the set of native dependencies that this package has along with
// some other variables to close over.
//
// This information will be used at build-time later on to figure out which
// sorts of variables need to be discovered at that time.
let lib_deps = {
cx.dep_run_custom_build(unit).iter().filter_map(|unit| {
if unit.profile.run_custom_build {
Some((unit.pkg.manifest().links().unwrap().to_string(),
unit.pkg.package_id().clone()))
} else {
None
}
}).collect::<Vec<_>>()
};
let pkg_name = unit.pkg.to_string();
let build_state = cx.build_state.clone();
let id = unit.pkg.package_id().clone();
let all = (id.clone(), pkg_name.clone(), build_state.clone(),
build_output.clone());
let build_scripts = super::load_build_deps(cx, unit);
let kind = unit.kind;
try!(fs::create_dir_all(&cx.layout(unit.pkg, Kind::Host).build(unit.pkg)));
try!(fs::create_dir_all(&cx.layout(unit.pkg, unit.kind).build(unit.pkg)));
let exec_engine = cx.exec_engine.clone();
// Prepare the unit of "dirty work" which will actually run the custom build
// command.
//
// Note that this has to do some extra work just before running the command
// to determine extra environment variables and such.
let dirty = Work::new(move |desc_tx| {
// Make sure that OUT_DIR exists.
//
// If we have an old build directory, then just move it into place,
// otherwise create it!
if fs::metadata(&build_output).is_err() {
try!(fs::create_dir(&build_output).chain_error(|| {
internal("failed to create script output directory for \
build command")
}));
}
// For all our native lib dependencies, pick up their metadata to pass
// along to this custom build command. We're also careful to augment our
// dynamic library search path in case the build script depended on any
// native dynamic libraries.
{
let build_state = build_state.outputs.lock().unwrap();
for (name, id) in lib_deps {
let key = (id.clone(), kind);
let state = try!(build_state.get(&key).chain_error(|| {
internal(format!("failed to locate build state for env \
vars: {}/{:?}", id, kind))
}));
let data = &state.metadata;
for &(ref key, ref value) in data.iter() {
p.env(&format!("DEP_{}_{}", super::envify(&name),
super::envify(key)), value);
}
}
if let Some(build_scripts) = build_scripts {
try!(super::add_plugin_deps(&mut p, &build_state,
&build_scripts));
}
}
// And now finally, run the build command itself!
desc_tx.send(p.to_string()).ok();
let output = try!(exec_engine.exec_with_output(p).map_err(|mut e| {
e.desc = format!("failed to run custom build command for `{}`\n{}",
pkg_name, e.desc);
Human(e)
}));
try!(paths::write(&build_output.parent().unwrap().join("output"),
&output.stdout));
// After the build command has finished running, we need to be sure to
// remember all of its output so we can later discover precisely what it
// was, even if we don't run the build command again (due to freshness).
//
// This is also the location where we provide feedback into the build
// state informing what variables were discovered via our script as
// well.
let output = try!(str::from_utf8(&output.stdout).map_err(|_| {
human("build script output was not valid utf-8")
}));
let parsed_output = try!(BuildOutput::parse(output, &pkg_name));
build_state.insert(id, kind, parsed_output);
Ok(())
});
// Now that we've prepared our work-to-do, we need to prepare the fresh work
// itself to run when we actually end up just discarding what we calculated
// above.
let fresh = Work::new(move |_tx| {
let (id, pkg_name, build_state, build_output) = all;
let contents = try!(paths::read(&build_output.parent().unwrap()
.join("output")));
let output = try!(BuildOutput::parse(&contents, &pkg_name));
build_state.insert(id, kind, output);
Ok(())
});
Ok((dirty, fresh))
}
impl BuildState {
pub fn new(config: &super::BuildConfig,
packages: &PackageSet) -> BuildState {
let mut sources = HashMap::new();
for package in packages.iter() {
match package.manifest().links() {
Some(links) => {
sources.insert(links.to_string(),
package.package_id().clone());
}
None => {}
}
}
let mut outputs = HashMap::new();
let i1 = config.host.overrides.iter().map(|p| (p, Kind::Host));
let i2 = config.target.overrides.iter().map(|p| (p, Kind::Target));
for ((name, output), kind) in i1.chain(i2) {
// If no package is using the library named `name`, then this is
// just an override that we ignore.
if let Some(id) = sources.get(name) {
outputs.insert((id.clone(), kind), output.clone());
}
}
BuildState { outputs: Mutex::new(outputs) }
}
fn insert(&self, id: PackageId, kind: Kind, output: BuildOutput) {
self.outputs.lock().unwrap().insert((id, kind), output);
}
}
impl BuildOutput {
// Parses the output of a script.
// The `pkg_name` is used for error messages.
pub fn parse(input: &str, pkg_name: &str) -> CargoResult<BuildOutput> {
let mut library_paths = Vec::new();
let mut library_links = Vec::new();
let mut cfgs = Vec::new();
let mut metadata = Vec::new();
let whence = format!("build script of `{}`", pkg_name);
for line in input.lines() {
let mut iter = line.splitn(2, ':');
if iter.next() != Some("cargo") |
let data = match iter.next() {
Some(val) => val,
None => continue
};
// getting the `key=value` part of the line
let mut iter = data.splitn(2, '=');
let key = iter.next();
let value = iter.next();
let (key, value) = match (key, value) {
(Some(a), Some(b)) => (a, b.trim_right()),
// line started with `cargo:` but didn't match `key=value`
_ => bail!("Wrong output in {}: `{}`", whence, line),
};
match key {
"rustc-flags" => {
let (libs, links) = try!(
BuildOutput::parse_rustc_flags(value, &whence)
);
library_links.extend(links.into_iter());
library_paths.extend(libs.into_iter());
}
"rustc-link-lib" => library_links.push(value.to_string()),
"rustc-link-search" => library_paths.push(PathBuf::from(value)),
"rustc-cfg" => cfgs.push(value.to_string()),
_ => metadata.push((key.to_string(), value.to_string())),
}
}
Ok(BuildOutput {
library_paths: library_paths,
library_links: library_links,
cfgs: cfgs,
metadata: metadata,
})
}
pub fn parse_rustc_flags(value: &str, whence: &str)
-> CargoResult<(Vec<PathBuf>, Vec<String>)> {
let value = value.trim();
let mut flags_iter = value.split(|c: char| c.is_whitespace())
.filter(|w| w.chars().any(|c| !c.is_whitespace()));
let (mut library_links, mut library_paths) = (Vec::new(), Vec::new());
loop {
let flag = match flags_iter.next() {
Some(f) => f,
None => break
};
if flag != "-l" && flag != "-L" {
bail!("Only `-l` and `-L` flags are allowed in {}: `{}`",
whence, value)
}
let value = match flags_iter.next() {
Some(v) => v,
None => bail!("Flag in rustc-flags has no value in {}: `{}`",
whence, value)
};
match flag {
"-l" => library_links.push(value.to_string()),
"-L" => library_paths.push(PathBuf::from(value)),
// was already checked above
_ => bail!("only -l and -L flags are allowed")
};
}
Ok((library_paths, library_links))
}
}
/// Compute the `build_scripts` map in the `Context` which tracks what build
/// scripts each package depends on.
///
/// The global `build_scripts` map lists for all (package, kind) tuples what set
/// of packages' build script outputs must be considered. For example this lists
/// all dependencies' `-L` flags which need to be propagated transitively.
///
/// The given set of targets to this function is the initial set of
/// targets/profiles which are being built.
pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>,
units: &[Unit<'b>]) {
let mut ret = HashMap::new();
for unit in units {
build(&mut ret, cx, unit);
}
cx.build_scripts.extend(ret.into_iter().map(|(k, v)| {
(k, Arc::new(v))
}));
// Recursive function to build up the map we're constructing. This function
// memoizes all of its return values as it goes along.
fn build<'a, 'b, 'cfg>(out: &'a mut HashMap<Unit<'b>, BuildScripts>,
cx: &Context<'b, 'cfg>,
unit: &Unit<'b>)
-> &'a BuildScripts {
// Do a quick pre-flight check to see if we've already calculated the
// set of dependencies.
if out.contains_key(unit) {
return &out[unit]
}
let mut to_link = BTreeSet::new();
let mut plugins = BTreeSet::new();
if !unit.target.is_custom_build() && unit.pkg.has_custom_build() {
to_link.insert((unit.pkg.package_id().clone(), unit.kind));
}
for unit in cx.dep_targets(unit).iter() {
let dep_scripts = build(out, cx, unit);
if unit.target.for_host() {
plugins.extend(dep_scripts.to_link.iter()
.map(|p| &p.0).cloned());
} else if unit.target.linkable() {
to_link.extend(dep_scripts.to_link.iter().cloned());
}
}
let prev = out.entry(*unit).or_insert(BuildScripts::default());
prev.to_link.extend(to_link);
prev.plugins.extend(plugins);
return prev
}
}
| {
// skip this line since it doesn't start with "cargo:"
continue;
} | conditional_block |
custom_build.rs | use std::collections::{HashMap, BTreeSet};
use std::fs;
use std::io::prelude::*;
use std::path::PathBuf;
use std::str;
use std::sync::{Mutex, Arc};
use core::{PackageId, PackageSet};
use util::{CargoResult, human, Human};
use util::{internal, ChainError, profile, paths};
use util::Freshness;
use super::job::Work;
use super::{fingerprint, process, Kind, Context, Unit};
use super::CommandType;
/// Contains the parsed output of a custom build script.
#[derive(Clone, Debug, Hash)]
pub struct BuildOutput {
/// Paths to pass to rustc with the `-L` flag
pub library_paths: Vec<PathBuf>,
/// Names and link kinds of libraries, suitable for the `-l` flag
pub library_links: Vec<String>,
/// Various `--cfg` flags to pass to the compiler
pub cfgs: Vec<String>,
/// Metadata to pass to the immediate dependencies
pub metadata: Vec<(String, String)>,
}
pub type BuildMap = HashMap<(PackageId, Kind), BuildOutput>;
pub struct BuildState {
pub outputs: Mutex<BuildMap>,
}
#[derive(Default)]
pub struct BuildScripts {
pub to_link: BTreeSet<(PackageId, Kind)>,
pub plugins: BTreeSet<PackageId>,
}
/// Prepares a `Work` that executes the target as a custom build script.
///
/// The `req` given is the requirement which this run of the build script will
/// prepare work for. If the requirement is specified as both the target and the
/// host platforms it is assumed that the two are equal and the build script is
/// only run once (not twice).
pub fn prepare(cx: &mut Context, unit: &Unit)
-> CargoResult<(Work, Work, Freshness)> {
let _p = profile::start(format!("build script prepare: {}/{}",
unit.pkg, unit.target.name()));
let key = (unit.pkg.package_id().clone(), unit.kind);
let overridden = cx.build_state.outputs.lock().unwrap().contains_key(&key);
let (work_dirty, work_fresh) = if overridden {
(Work::new(|_| Ok(())), Work::new(|_| Ok(())))
} else {
try!(build_work(cx, unit))
};
// Now that we've prep'd our work, build the work needed to manage the
// fingerprint and then start returning that upwards.
let (freshness, dirty, fresh) =
try!(fingerprint::prepare_build_cmd(cx, unit));
Ok((work_dirty.then(dirty), work_fresh.then(fresh), freshness))
}
fn build_work(cx: &mut Context, unit: &Unit) -> CargoResult<(Work, Work)> {
let (script_output, build_output) = {
(cx.layout(unit.pkg, Kind::Host).build(unit.pkg),
cx.layout(unit.pkg, unit.kind).build_out(unit.pkg))
};
// Building the command to execute
let to_exec = script_output.join(unit.target.name());
// Start preparing the process to execute, starting out with some
// environment variables. Note that the profile-related environment
// variables are not set with this the build script's profile but rather the
// package's library profile.
let profile = cx.lib_profile(unit.pkg.package_id());
let to_exec = to_exec.into_os_string();
let mut p = try!(super::process(CommandType::Host(to_exec), unit.pkg, cx));
p.env("OUT_DIR", &build_output)
.env("CARGO_MANIFEST_DIR", unit.pkg.root())
.env("NUM_JOBS", &cx.jobs().to_string())
.env("TARGET", &match unit.kind {
Kind::Host => &cx.config.rustc_info().host[..],
Kind::Target => cx.target_triple(),
})
.env("DEBUG", &profile.debuginfo.to_string())
.env("OPT_LEVEL", &profile.opt_level.to_string())
.env("PROFILE", if cx.build_config.release {"release"} else {"debug"})
.env("HOST", &cx.config.rustc_info().host);
// Be sure to pass along all enabled features for this package, this is the
// last piece of statically known information that we have.
if let Some(features) = cx.resolve.features(unit.pkg.package_id()) {
for feat in features.iter() {
p.env(&format!("CARGO_FEATURE_{}", super::envify(feat)), "1");
}
}
// Gather the set of native dependencies that this package has along with
// some other variables to close over.
//
// This information will be used at build-time later on to figure out which
// sorts of variables need to be discovered at that time.
let lib_deps = {
cx.dep_run_custom_build(unit).iter().filter_map(|unit| {
if unit.profile.run_custom_build {
Some((unit.pkg.manifest().links().unwrap().to_string(),
unit.pkg.package_id().clone()))
} else {
None
}
}).collect::<Vec<_>>()
};
let pkg_name = unit.pkg.to_string();
let build_state = cx.build_state.clone();
let id = unit.pkg.package_id().clone();
let all = (id.clone(), pkg_name.clone(), build_state.clone(),
build_output.clone());
let build_scripts = super::load_build_deps(cx, unit);
let kind = unit.kind;
try!(fs::create_dir_all(&cx.layout(unit.pkg, Kind::Host).build(unit.pkg)));
try!(fs::create_dir_all(&cx.layout(unit.pkg, unit.kind).build(unit.pkg)));
let exec_engine = cx.exec_engine.clone();
// Prepare the unit of "dirty work" which will actually run the custom build
// command.
//
// Note that this has to do some extra work just before running the command
// to determine extra environment variables and such.
let dirty = Work::new(move |desc_tx| {
// Make sure that OUT_DIR exists.
//
// If we have an old build directory, then just move it into place,
// otherwise create it!
if fs::metadata(&build_output).is_err() {
try!(fs::create_dir(&build_output).chain_error(|| {
internal("failed to create script output directory for \
build command")
}));
}
// For all our native lib dependencies, pick up their metadata to pass
// along to this custom build command. We're also careful to augment our
// dynamic library search path in case the build script depended on any
// native dynamic libraries.
{
let build_state = build_state.outputs.lock().unwrap();
for (name, id) in lib_deps {
let key = (id.clone(), kind);
let state = try!(build_state.get(&key).chain_error(|| {
internal(format!("failed to locate build state for env \
vars: {}/{:?}", id, kind))
}));
let data = &state.metadata;
for &(ref key, ref value) in data.iter() {
p.env(&format!("DEP_{}_{}", super::envify(&name),
super::envify(key)), value);
}
}
if let Some(build_scripts) = build_scripts {
try!(super::add_plugin_deps(&mut p, &build_state,
&build_scripts));
}
}
// And now finally, run the build command itself!
desc_tx.send(p.to_string()).ok();
let output = try!(exec_engine.exec_with_output(p).map_err(|mut e| {
e.desc = format!("failed to run custom build command for `{}`\n{}",
pkg_name, e.desc);
Human(e)
}));
try!(paths::write(&build_output.parent().unwrap().join("output"),
&output.stdout));
// After the build command has finished running, we need to be sure to
// remember all of its output so we can later discover precisely what it
// was, even if we don't run the build command again (due to freshness).
//
// This is also the location where we provide feedback into the build
// state informing what variables were discovered via our script as
// well.
let output = try!(str::from_utf8(&output.stdout).map_err(|_| {
human("build script output was not valid utf-8")
}));
let parsed_output = try!(BuildOutput::parse(output, &pkg_name));
build_state.insert(id, kind, parsed_output);
Ok(())
});
// Now that we've prepared our work-to-do, we need to prepare the fresh work
// itself to run when we actually end up just discarding what we calculated
// above.
let fresh = Work::new(move |_tx| {
let (id, pkg_name, build_state, build_output) = all;
let contents = try!(paths::read(&build_output.parent().unwrap()
.join("output")));
let output = try!(BuildOutput::parse(&contents, &pkg_name));
build_state.insert(id, kind, output);
Ok(())
});
Ok((dirty, fresh))
}
impl BuildState {
pub fn | (config: &super::BuildConfig,
packages: &PackageSet) -> BuildState {
let mut sources = HashMap::new();
for package in packages.iter() {
match package.manifest().links() {
Some(links) => {
sources.insert(links.to_string(),
package.package_id().clone());
}
None => {}
}
}
let mut outputs = HashMap::new();
let i1 = config.host.overrides.iter().map(|p| (p, Kind::Host));
let i2 = config.target.overrides.iter().map(|p| (p, Kind::Target));
for ((name, output), kind) in i1.chain(i2) {
// If no package is using the library named `name`, then this is
// just an override that we ignore.
if let Some(id) = sources.get(name) {
outputs.insert((id.clone(), kind), output.clone());
}
}
BuildState { outputs: Mutex::new(outputs) }
}
fn insert(&self, id: PackageId, kind: Kind, output: BuildOutput) {
self.outputs.lock().unwrap().insert((id, kind), output);
}
}
impl BuildOutput {
// Parses the output of a script.
// The `pkg_name` is used for error messages.
pub fn parse(input: &str, pkg_name: &str) -> CargoResult<BuildOutput> {
let mut library_paths = Vec::new();
let mut library_links = Vec::new();
let mut cfgs = Vec::new();
let mut metadata = Vec::new();
let whence = format!("build script of `{}`", pkg_name);
for line in input.lines() {
let mut iter = line.splitn(2, ':');
if iter.next() != Some("cargo") {
// skip this line since it doesn't start with "cargo:"
continue;
}
let data = match iter.next() {
Some(val) => val,
None => continue
};
// getting the `key=value` part of the line
let mut iter = data.splitn(2, '=');
let key = iter.next();
let value = iter.next();
let (key, value) = match (key, value) {
(Some(a), Some(b)) => (a, b.trim_right()),
// line started with `cargo:` but didn't match `key=value`
_ => bail!("Wrong output in {}: `{}`", whence, line),
};
match key {
"rustc-flags" => {
let (libs, links) = try!(
BuildOutput::parse_rustc_flags(value, &whence)
);
library_links.extend(links.into_iter());
library_paths.extend(libs.into_iter());
}
"rustc-link-lib" => library_links.push(value.to_string()),
"rustc-link-search" => library_paths.push(PathBuf::from(value)),
"rustc-cfg" => cfgs.push(value.to_string()),
_ => metadata.push((key.to_string(), value.to_string())),
}
}
Ok(BuildOutput {
library_paths: library_paths,
library_links: library_links,
cfgs: cfgs,
metadata: metadata,
})
}
pub fn parse_rustc_flags(value: &str, whence: &str)
-> CargoResult<(Vec<PathBuf>, Vec<String>)> {
let value = value.trim();
let mut flags_iter = value.split(|c: char| c.is_whitespace())
.filter(|w| w.chars().any(|c| !c.is_whitespace()));
let (mut library_links, mut library_paths) = (Vec::new(), Vec::new());
loop {
let flag = match flags_iter.next() {
Some(f) => f,
None => break
};
if flag != "-l" && flag != "-L" {
bail!("Only `-l` and `-L` flags are allowed in {}: `{}`",
whence, value)
}
let value = match flags_iter.next() {
Some(v) => v,
None => bail!("Flag in rustc-flags has no value in {}: `{}`",
whence, value)
};
match flag {
"-l" => library_links.push(value.to_string()),
"-L" => library_paths.push(PathBuf::from(value)),
// was already checked above
_ => bail!("only -l and -L flags are allowed")
};
}
Ok((library_paths, library_links))
}
}
/// Compute the `build_scripts` map in the `Context` which tracks what build
/// scripts each package depends on.
///
/// The global `build_scripts` map lists for all (package, kind) tuples what set
/// of packages' build script outputs must be considered. For example this lists
/// all dependencies' `-L` flags which need to be propagated transitively.
///
/// The given set of targets to this function is the initial set of
/// targets/profiles which are being built.
pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>,
units: &[Unit<'b>]) {
let mut ret = HashMap::new();
for unit in units {
build(&mut ret, cx, unit);
}
cx.build_scripts.extend(ret.into_iter().map(|(k, v)| {
(k, Arc::new(v))
}));
// Recursive function to build up the map we're constructing. This function
// memoizes all of its return values as it goes along.
fn build<'a, 'b, 'cfg>(out: &'a mut HashMap<Unit<'b>, BuildScripts>,
cx: &Context<'b, 'cfg>,
unit: &Unit<'b>)
-> &'a BuildScripts {
// Do a quick pre-flight check to see if we've already calculated the
// set of dependencies.
if out.contains_key(unit) {
return &out[unit]
}
let mut to_link = BTreeSet::new();
let mut plugins = BTreeSet::new();
if !unit.target.is_custom_build() && unit.pkg.has_custom_build() {
to_link.insert((unit.pkg.package_id().clone(), unit.kind));
}
for unit in cx.dep_targets(unit).iter() {
let dep_scripts = build(out, cx, unit);
if unit.target.for_host() {
plugins.extend(dep_scripts.to_link.iter()
.map(|p| &p.0).cloned());
} else if unit.target.linkable() {
to_link.extend(dep_scripts.to_link.iter().cloned());
}
}
let prev = out.entry(*unit).or_insert(BuildScripts::default());
prev.to_link.extend(to_link);
prev.plugins.extend(plugins);
return prev
}
}
| new | identifier_name |
custom_build.rs | use std::collections::{HashMap, BTreeSet};
use std::fs;
use std::io::prelude::*;
use std::path::PathBuf;
use std::str;
use std::sync::{Mutex, Arc};
use core::{PackageId, PackageSet};
use util::{CargoResult, human, Human};
use util::{internal, ChainError, profile, paths};
use util::Freshness;
use super::job::Work;
use super::{fingerprint, process, Kind, Context, Unit};
use super::CommandType;
/// Contains the parsed output of a custom build script.
#[derive(Clone, Debug, Hash)]
pub struct BuildOutput {
/// Paths to pass to rustc with the `-L` flag
pub library_paths: Vec<PathBuf>,
/// Names and link kinds of libraries, suitable for the `-l` flag
pub library_links: Vec<String>,
/// Various `--cfg` flags to pass to the compiler
pub cfgs: Vec<String>,
/// Metadata to pass to the immediate dependencies
pub metadata: Vec<(String, String)>,
}
pub type BuildMap = HashMap<(PackageId, Kind), BuildOutput>;
pub struct BuildState {
pub outputs: Mutex<BuildMap>,
}
#[derive(Default)]
pub struct BuildScripts {
pub to_link: BTreeSet<(PackageId, Kind)>,
pub plugins: BTreeSet<PackageId>,
}
/// Prepares a `Work` that executes the target as a custom build script.
///
/// The `req` given is the requirement which this run of the build script will
/// prepare work for. If the requirement is specified as both the target and the
/// host platforms it is assumed that the two are equal and the build script is
/// only run once (not twice).
pub fn prepare(cx: &mut Context, unit: &Unit)
-> CargoResult<(Work, Work, Freshness)> {
let _p = profile::start(format!("build script prepare: {}/{}",
unit.pkg, unit.target.name()));
let key = (unit.pkg.package_id().clone(), unit.kind);
let overridden = cx.build_state.outputs.lock().unwrap().contains_key(&key);
let (work_dirty, work_fresh) = if overridden {
(Work::new(|_| Ok(())), Work::new(|_| Ok(())))
} else {
try!(build_work(cx, unit))
};
// Now that we've prep'd our work, build the work needed to manage the
// fingerprint and then start returning that upwards.
let (freshness, dirty, fresh) =
try!(fingerprint::prepare_build_cmd(cx, unit));
Ok((work_dirty.then(dirty), work_fresh.then(fresh), freshness))
}
fn build_work(cx: &mut Context, unit: &Unit) -> CargoResult<(Work, Work)> {
let (script_output, build_output) = {
(cx.layout(unit.pkg, Kind::Host).build(unit.pkg),
cx.layout(unit.pkg, unit.kind).build_out(unit.pkg))
};
// Building the command to execute
let to_exec = script_output.join(unit.target.name());
// Start preparing the process to execute, starting out with some
// environment variables. Note that the profile-related environment
// variables are not set with this the build script's profile but rather the
// package's library profile.
let profile = cx.lib_profile(unit.pkg.package_id());
let to_exec = to_exec.into_os_string();
let mut p = try!(super::process(CommandType::Host(to_exec), unit.pkg, cx));
p.env("OUT_DIR", &build_output)
.env("CARGO_MANIFEST_DIR", unit.pkg.root())
.env("NUM_JOBS", &cx.jobs().to_string())
.env("TARGET", &match unit.kind {
Kind::Host => &cx.config.rustc_info().host[..],
Kind::Target => cx.target_triple(),
})
.env("DEBUG", &profile.debuginfo.to_string())
.env("OPT_LEVEL", &profile.opt_level.to_string())
.env("PROFILE", if cx.build_config.release {"release"} else {"debug"})
.env("HOST", &cx.config.rustc_info().host);
// Be sure to pass along all enabled features for this package, this is the
// last piece of statically known information that we have.
if let Some(features) = cx.resolve.features(unit.pkg.package_id()) {
for feat in features.iter() {
p.env(&format!("CARGO_FEATURE_{}", super::envify(feat)), "1");
}
}
// Gather the set of native dependencies that this package has along with
// some other variables to close over.
//
// This information will be used at build-time later on to figure out which
// sorts of variables need to be discovered at that time.
let lib_deps = {
cx.dep_run_custom_build(unit).iter().filter_map(|unit| {
if unit.profile.run_custom_build {
Some((unit.pkg.manifest().links().unwrap().to_string(),
unit.pkg.package_id().clone()))
} else {
None
}
}).collect::<Vec<_>>()
};
let pkg_name = unit.pkg.to_string();
let build_state = cx.build_state.clone();
let id = unit.pkg.package_id().clone();
let all = (id.clone(), pkg_name.clone(), build_state.clone(),
build_output.clone());
let build_scripts = super::load_build_deps(cx, unit);
let kind = unit.kind;
try!(fs::create_dir_all(&cx.layout(unit.pkg, Kind::Host).build(unit.pkg)));
try!(fs::create_dir_all(&cx.layout(unit.pkg, unit.kind).build(unit.pkg)));
let exec_engine = cx.exec_engine.clone();
// Prepare the unit of "dirty work" which will actually run the custom build
// command.
//
// Note that this has to do some extra work just before running the command
// to determine extra environment variables and such.
let dirty = Work::new(move |desc_tx| {
// Make sure that OUT_DIR exists.
//
// If we have an old build directory, then just move it into place,
// otherwise create it!
if fs::metadata(&build_output).is_err() {
try!(fs::create_dir(&build_output).chain_error(|| {
internal("failed to create script output directory for \
build command")
}));
}
// For all our native lib dependencies, pick up their metadata to pass
// along to this custom build command. We're also careful to augment our
// dynamic library search path in case the build script depended on any
// native dynamic libraries.
{
let build_state = build_state.outputs.lock().unwrap();
for (name, id) in lib_deps {
let key = (id.clone(), kind);
let state = try!(build_state.get(&key).chain_error(|| {
internal(format!("failed to locate build state for env \
vars: {}/{:?}", id, kind))
}));
let data = &state.metadata;
for &(ref key, ref value) in data.iter() {
p.env(&format!("DEP_{}_{}", super::envify(&name),
super::envify(key)), value);
}
}
if let Some(build_scripts) = build_scripts {
try!(super::add_plugin_deps(&mut p, &build_state,
&build_scripts));
}
}
// And now finally, run the build command itself!
desc_tx.send(p.to_string()).ok();
let output = try!(exec_engine.exec_with_output(p).map_err(|mut e| {
e.desc = format!("failed to run custom build command for `{}`\n{}",
pkg_name, e.desc);
Human(e)
}));
try!(paths::write(&build_output.parent().unwrap().join("output"),
&output.stdout));
// After the build command has finished running, we need to be sure to
// remember all of its output so we can later discover precisely what it
// was, even if we don't run the build command again (due to freshness).
//
// This is also the location where we provide feedback into the build
// state informing what variables were discovered via our script as
// well.
let output = try!(str::from_utf8(&output.stdout).map_err(|_| {
human("build script output was not valid utf-8")
}));
let parsed_output = try!(BuildOutput::parse(output, &pkg_name));
build_state.insert(id, kind, parsed_output);
Ok(())
});
// Now that we've prepared our work-to-do, we need to prepare the fresh work
// itself to run when we actually end up just discarding what we calculated
// above.
let fresh = Work::new(move |_tx| {
let (id, pkg_name, build_state, build_output) = all;
let contents = try!(paths::read(&build_output.parent().unwrap()
.join("output")));
let output = try!(BuildOutput::parse(&contents, &pkg_name));
build_state.insert(id, kind, output);
Ok(())
});
Ok((dirty, fresh))
}
impl BuildState {
pub fn new(config: &super::BuildConfig,
packages: &PackageSet) -> BuildState {
let mut sources = HashMap::new();
for package in packages.iter() {
match package.manifest().links() {
Some(links) => {
sources.insert(links.to_string(),
package.package_id().clone());
}
None => {}
}
}
let mut outputs = HashMap::new();
let i1 = config.host.overrides.iter().map(|p| (p, Kind::Host));
let i2 = config.target.overrides.iter().map(|p| (p, Kind::Target));
for ((name, output), kind) in i1.chain(i2) {
// If no package is using the library named `name`, then this is
// just an override that we ignore.
if let Some(id) = sources.get(name) {
outputs.insert((id.clone(), kind), output.clone());
}
}
BuildState { outputs: Mutex::new(outputs) }
}
fn insert(&self, id: PackageId, kind: Kind, output: BuildOutput) {
self.outputs.lock().unwrap().insert((id, kind), output);
}
}
impl BuildOutput {
// Parses the output of a script.
// The `pkg_name` is used for error messages.
pub fn parse(input: &str, pkg_name: &str) -> CargoResult<BuildOutput> {
let mut library_paths = Vec::new();
let mut library_links = Vec::new();
let mut cfgs = Vec::new();
let mut metadata = Vec::new();
let whence = format!("build script of `{}`", pkg_name);
for line in input.lines() {
let mut iter = line.splitn(2, ':');
if iter.next() != Some("cargo") {
// skip this line since it doesn't start with "cargo:"
continue;
}
let data = match iter.next() {
Some(val) => val,
None => continue
};
// getting the `key=value` part of the line
let mut iter = data.splitn(2, '='); | // line started with `cargo:` but didn't match `key=value`
_ => bail!("Wrong output in {}: `{}`", whence, line),
};
match key {
"rustc-flags" => {
let (libs, links) = try!(
BuildOutput::parse_rustc_flags(value, &whence)
);
library_links.extend(links.into_iter());
library_paths.extend(libs.into_iter());
}
"rustc-link-lib" => library_links.push(value.to_string()),
"rustc-link-search" => library_paths.push(PathBuf::from(value)),
"rustc-cfg" => cfgs.push(value.to_string()),
_ => metadata.push((key.to_string(), value.to_string())),
}
}
Ok(BuildOutput {
library_paths: library_paths,
library_links: library_links,
cfgs: cfgs,
metadata: metadata,
})
}
pub fn parse_rustc_flags(value: &str, whence: &str)
-> CargoResult<(Vec<PathBuf>, Vec<String>)> {
let value = value.trim();
let mut flags_iter = value.split(|c: char| c.is_whitespace())
.filter(|w| w.chars().any(|c| !c.is_whitespace()));
let (mut library_links, mut library_paths) = (Vec::new(), Vec::new());
loop {
let flag = match flags_iter.next() {
Some(f) => f,
None => break
};
if flag != "-l" && flag != "-L" {
bail!("Only `-l` and `-L` flags are allowed in {}: `{}`",
whence, value)
}
let value = match flags_iter.next() {
Some(v) => v,
None => bail!("Flag in rustc-flags has no value in {}: `{}`",
whence, value)
};
match flag {
"-l" => library_links.push(value.to_string()),
"-L" => library_paths.push(PathBuf::from(value)),
// was already checked above
_ => bail!("only -l and -L flags are allowed")
};
}
Ok((library_paths, library_links))
}
}
/// Compute the `build_scripts` map in the `Context` which tracks what build
/// scripts each package depends on.
///
/// The global `build_scripts` map lists for all (package, kind) tuples what set
/// of packages' build script outputs must be considered. For example this lists
/// all dependencies' `-L` flags which need to be propagated transitively.
///
/// The given set of targets to this function is the initial set of
/// targets/profiles which are being built.
pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>,
units: &[Unit<'b>]) {
let mut ret = HashMap::new();
for unit in units {
build(&mut ret, cx, unit);
}
cx.build_scripts.extend(ret.into_iter().map(|(k, v)| {
(k, Arc::new(v))
}));
// Recursive function to build up the map we're constructing. This function
// memoizes all of its return values as it goes along.
fn build<'a, 'b, 'cfg>(out: &'a mut HashMap<Unit<'b>, BuildScripts>,
cx: &Context<'b, 'cfg>,
unit: &Unit<'b>)
-> &'a BuildScripts {
// Do a quick pre-flight check to see if we've already calculated the
// set of dependencies.
if out.contains_key(unit) {
return &out[unit]
}
let mut to_link = BTreeSet::new();
let mut plugins = BTreeSet::new();
if !unit.target.is_custom_build() && unit.pkg.has_custom_build() {
to_link.insert((unit.pkg.package_id().clone(), unit.kind));
}
for unit in cx.dep_targets(unit).iter() {
let dep_scripts = build(out, cx, unit);
if unit.target.for_host() {
plugins.extend(dep_scripts.to_link.iter()
.map(|p| &p.0).cloned());
} else if unit.target.linkable() {
to_link.extend(dep_scripts.to_link.iter().cloned());
}
}
let prev = out.entry(*unit).or_insert(BuildScripts::default());
prev.to_link.extend(to_link);
prev.plugins.extend(plugins);
return prev
}
} | let key = iter.next();
let value = iter.next();
let (key, value) = match (key, value) {
(Some(a), Some(b)) => (a, b.trim_right()), | random_line_split |
flex-item.directive.ts | import {Directive, Input, HostBinding, ElementRef} from "@angular/core";
/**
* A directive to control flex items layout properties.
*/
@Directive({
selector: '[flex]'
})
export class FlexItemDirective{
/**
* Controls the flex-basis property.
* @type {string} size value (px, vh, vp, em, %, etc...)
*/
@HostBinding('style.flex-basis')
@HostBinding('style.-webkit-flex-basis')
@Input('flex')
basis: string = 'auto';
/**
* Controls the flex-grow property.
* @type {number} positive integer.
*/
@HostBinding('style.flex-grow')
@HostBinding('style.-webkit-flex-grow')
@Input()
grow: number = 0;
/**
* Controls the flex-shrink property.
* @type {number} positive integer
*/
@HostBinding('style.flex-shrink')
@HostBinding('style.-webkit-flex-shrink')
@Input()
shrink: number = 1;
/**
* Controls the flex align-self property.
*/
@HostBinding('style.align-self') |
constructor(private el: ElementRef) {}
@Input("gravity")
set gravity(value: string) {
switch (value){
case 'start':
this._gravity = 'flex-start';
break;
case 'center':
this._gravity = 'center';
break;
case 'end':
this._gravity = 'flex-end';
break;
case 'fill':
this._gravity = 'stretch';
break;
case 'baseline':
this._gravity = 'baseline';
break;
default:
this._gravity = 'inherit';
break;
}
}
} | private _gravity: string = 'inherit'; | random_line_split |
flex-item.directive.ts | import {Directive, Input, HostBinding, ElementRef} from "@angular/core";
/**
* A directive to control flex items layout properties.
*/
@Directive({
selector: '[flex]'
})
export class | {
/**
* Controls the flex-basis property.
* @type {string} size value (px, vh, vp, em, %, etc...)
*/
@HostBinding('style.flex-basis')
@HostBinding('style.-webkit-flex-basis')
@Input('flex')
basis: string = 'auto';
/**
* Controls the flex-grow property.
* @type {number} positive integer.
*/
@HostBinding('style.flex-grow')
@HostBinding('style.-webkit-flex-grow')
@Input()
grow: number = 0;
/**
* Controls the flex-shrink property.
* @type {number} positive integer
*/
@HostBinding('style.flex-shrink')
@HostBinding('style.-webkit-flex-shrink')
@Input()
shrink: number = 1;
/**
* Controls the flex align-self property.
*/
@HostBinding('style.align-self')
private _gravity: string = 'inherit';
constructor(private el: ElementRef) {}
@Input("gravity")
set gravity(value: string) {
switch (value){
case 'start':
this._gravity = 'flex-start';
break;
case 'center':
this._gravity = 'center';
break;
case 'end':
this._gravity = 'flex-end';
break;
case 'fill':
this._gravity = 'stretch';
break;
case 'baseline':
this._gravity = 'baseline';
break;
default:
this._gravity = 'inherit';
break;
}
}
}
| FlexItemDirective | identifier_name |
time.rs | //! Utilities for mapping between human-usable time units and BAPS3's
//! preferred time units.
/// Enum of available time units.
///
/// This does not contain every possible time unit anyone may want to use with
/// a BAPS3 client, but covers the main possibilities.
///
/// Each unit specified in terms of its equivalent in microseconds, which is
/// the unit used 'over the wire' when talking to BAPS3.
#[derive(Copy)]
pub enum | {
/// Hours (1 hour = 60 minutes)
Hours,
/// Minutes (1 minute = 60 seconds).
Minutes,
/// Seconds (1 second = 1,000 milliseconds).
Seconds,
/// Milliseconds (1 millisecond = 1,000 microseconds).
Milliseconds,
/// Microseconds (the BAPS3 base unit).
Microseconds
}
impl TimeUnit {
/// Returns the suffix of the given unit.
///
/// This is mainly for use in human-readable times.
pub fn suffix(&self) -> &'static str {
match *self {
TimeUnit::Hours => "h",
TimeUnit::Minutes => "m",
TimeUnit::Seconds => "s",
TimeUnit::Milliseconds => "ms",
TimeUnit::Microseconds => "us"
}
}
/// Returns the equivalent of `n` of the given unit in microseconds.
pub fn as_micros(&self, n: u64) -> u64 {
match *self {
TimeUnit::Hours => n * 1000 * 1000 * 60 * 60,
TimeUnit::Minutes => n * 1000 * 1000 * 60,
TimeUnit::Seconds => n * 1000 * 1000,
TimeUnit::Milliseconds => n * 1000,
TimeUnit::Microseconds => n
}
}
/// Returns the equivalent of `n` microseconds in the given unit.
///
/// As the return value is an integer, there may be some rounding down.
///
/// # Examples
///
/// 1 million microseconds is equivalent to 1 second:
///
/// ```rust
/// use baps3_cli::time::TimeUnit;
/// assert_eq!(TimeUnit::Seconds.from_micros(1000000), 1)
/// ```
///
/// Translating one hour of time to microseconds and back is the identity:
///
/// ```rust
/// use baps3_cli::time::TimeUnit;
/// let hour_us = TimeUnit::Hours.as_micros(1);
/// assert_eq!(TimeUnit::Hours.from_micros(hour_us), 1)
/// ```
pub fn from_micros(&self, n: u64) -> u64 {
match *self {
TimeUnit::Hours => n / 1000 / 1000 / 60 / 60,
TimeUnit::Minutes => n / 1000 / 1000 / 60,
TimeUnit::Seconds => n / 1000 / 1000,
TimeUnit::Milliseconds => n / 1000,
TimeUnit::Microseconds => n
}
}
/// Multiplexes a series of unit flags into a TimeUnit.
/// Larger units take precedence.
pub fn from_flags(h: bool, m: bool, s: bool, ms: bool) -> TimeUnit {
if h { TimeUnit::Hours }
else if m { TimeUnit::Minutes }
else if s { TimeUnit::Seconds }
else if ms { TimeUnit::Milliseconds }
else { TimeUnit::Microseconds }
}
} | TimeUnit | identifier_name |
time.rs | //! Utilities for mapping between human-usable time units and BAPS3's
//! preferred time units.
/// Enum of available time units.
///
/// This does not contain every possible time unit anyone may want to use with
/// a BAPS3 client, but covers the main possibilities.
///
/// Each unit specified in terms of its equivalent in microseconds, which is
/// the unit used 'over the wire' when talking to BAPS3.
#[derive(Copy)]
pub enum TimeUnit {
/// Hours (1 hour = 60 minutes)
Hours,
/// Minutes (1 minute = 60 seconds).
Minutes,
/// Seconds (1 second = 1,000 milliseconds).
Seconds,
/// Milliseconds (1 millisecond = 1,000 microseconds).
Milliseconds,
/// Microseconds (the BAPS3 base unit).
Microseconds
}
impl TimeUnit {
/// Returns the suffix of the given unit.
///
/// This is mainly for use in human-readable times.
pub fn suffix(&self) -> &'static str {
match *self {
TimeUnit::Hours => "h",
TimeUnit::Minutes => "m",
TimeUnit::Seconds => "s",
TimeUnit::Milliseconds => "ms",
TimeUnit::Microseconds => "us"
}
}
/// Returns the equivalent of `n` of the given unit in microseconds.
pub fn as_micros(&self, n: u64) -> u64 {
match *self {
TimeUnit::Hours => n * 1000 * 1000 * 60 * 60,
TimeUnit::Minutes => n * 1000 * 1000 * 60,
TimeUnit::Seconds => n * 1000 * 1000,
TimeUnit::Milliseconds => n * 1000,
TimeUnit::Microseconds => n
}
}
/// Returns the equivalent of `n` microseconds in the given unit.
///
/// As the return value is an integer, there may be some rounding down.
///
/// # Examples
///
/// 1 million microseconds is equivalent to 1 second:
///
/// ```rust
/// use baps3_cli::time::TimeUnit;
/// assert_eq!(TimeUnit::Seconds.from_micros(1000000), 1)
/// ```
///
/// Translating one hour of time to microseconds and back is the identity:
///
/// ```rust
/// use baps3_cli::time::TimeUnit;
/// let hour_us = TimeUnit::Hours.as_micros(1);
/// assert_eq!(TimeUnit::Hours.from_micros(hour_us), 1)
/// ```
pub fn from_micros(&self, n: u64) -> u64 {
match *self {
TimeUnit::Hours => n / 1000 / 1000 / 60 / 60,
TimeUnit::Minutes => n / 1000 / 1000 / 60,
TimeUnit::Seconds => n / 1000 / 1000,
TimeUnit::Milliseconds => n / 1000,
TimeUnit::Microseconds => n
}
} |
/// Multiplexes a series of unit flags into a TimeUnit.
/// Larger units take precedence.
pub fn from_flags(h: bool, m: bool, s: bool, ms: bool) -> TimeUnit {
if h { TimeUnit::Hours }
else if m { TimeUnit::Minutes }
else if s { TimeUnit::Seconds }
else if ms { TimeUnit::Milliseconds }
else { TimeUnit::Microseconds }
}
} | random_line_split | |
time.rs | //! Utilities for mapping between human-usable time units and BAPS3's
//! preferred time units.
/// Enum of available time units.
///
/// This does not contain every possible time unit anyone may want to use with
/// a BAPS3 client, but covers the main possibilities.
///
/// Each unit specified in terms of its equivalent in microseconds, which is
/// the unit used 'over the wire' when talking to BAPS3.
#[derive(Copy)]
pub enum TimeUnit {
/// Hours (1 hour = 60 minutes)
Hours,
/// Minutes (1 minute = 60 seconds).
Minutes,
/// Seconds (1 second = 1,000 milliseconds).
Seconds,
/// Milliseconds (1 millisecond = 1,000 microseconds).
Milliseconds,
/// Microseconds (the BAPS3 base unit).
Microseconds
}
impl TimeUnit {
/// Returns the suffix of the given unit.
///
/// This is mainly for use in human-readable times.
pub fn suffix(&self) -> &'static str {
match *self {
TimeUnit::Hours => "h",
TimeUnit::Minutes => "m",
TimeUnit::Seconds => "s",
TimeUnit::Milliseconds => "ms",
TimeUnit::Microseconds => "us"
}
}
/// Returns the equivalent of `n` of the given unit in microseconds.
pub fn as_micros(&self, n: u64) -> u64 {
match *self {
TimeUnit::Hours => n * 1000 * 1000 * 60 * 60,
TimeUnit::Minutes => n * 1000 * 1000 * 60,
TimeUnit::Seconds => n * 1000 * 1000,
TimeUnit::Milliseconds => n * 1000,
TimeUnit::Microseconds => n
}
}
/// Returns the equivalent of `n` microseconds in the given unit.
///
/// As the return value is an integer, there may be some rounding down.
///
/// # Examples
///
/// 1 million microseconds is equivalent to 1 second:
///
/// ```rust
/// use baps3_cli::time::TimeUnit;
/// assert_eq!(TimeUnit::Seconds.from_micros(1000000), 1)
/// ```
///
/// Translating one hour of time to microseconds and back is the identity:
///
/// ```rust
/// use baps3_cli::time::TimeUnit;
/// let hour_us = TimeUnit::Hours.as_micros(1);
/// assert_eq!(TimeUnit::Hours.from_micros(hour_us), 1)
/// ```
pub fn from_micros(&self, n: u64) -> u64 {
match *self {
TimeUnit::Hours => n / 1000 / 1000 / 60 / 60,
TimeUnit::Minutes => n / 1000 / 1000 / 60,
TimeUnit::Seconds => n / 1000 / 1000,
TimeUnit::Milliseconds => n / 1000,
TimeUnit::Microseconds => n
}
}
/// Multiplexes a series of unit flags into a TimeUnit.
/// Larger units take precedence.
pub fn from_flags(h: bool, m: bool, s: bool, ms: bool) -> TimeUnit |
} | {
if h { TimeUnit::Hours }
else if m { TimeUnit::Minutes }
else if s { TimeUnit::Seconds }
else if ms { TimeUnit::Milliseconds }
else { TimeUnit::Microseconds }
} | identifier_body |
time.rs | //! Utilities for mapping between human-usable time units and BAPS3's
//! preferred time units.
/// Enum of available time units.
///
/// This does not contain every possible time unit anyone may want to use with
/// a BAPS3 client, but covers the main possibilities.
///
/// Each unit specified in terms of its equivalent in microseconds, which is
/// the unit used 'over the wire' when talking to BAPS3.
#[derive(Copy)]
pub enum TimeUnit {
/// Hours (1 hour = 60 minutes)
Hours,
/// Minutes (1 minute = 60 seconds).
Minutes,
/// Seconds (1 second = 1,000 milliseconds).
Seconds,
/// Milliseconds (1 millisecond = 1,000 microseconds).
Milliseconds,
/// Microseconds (the BAPS3 base unit).
Microseconds
}
impl TimeUnit {
/// Returns the suffix of the given unit.
///
/// This is mainly for use in human-readable times.
pub fn suffix(&self) -> &'static str {
match *self {
TimeUnit::Hours => "h",
TimeUnit::Minutes => "m",
TimeUnit::Seconds => "s",
TimeUnit::Milliseconds => "ms",
TimeUnit::Microseconds => "us"
}
}
/// Returns the equivalent of `n` of the given unit in microseconds.
pub fn as_micros(&self, n: u64) -> u64 {
match *self {
TimeUnit::Hours => n * 1000 * 1000 * 60 * 60,
TimeUnit::Minutes => n * 1000 * 1000 * 60,
TimeUnit::Seconds => n * 1000 * 1000,
TimeUnit::Milliseconds => n * 1000,
TimeUnit::Microseconds => n
}
}
/// Returns the equivalent of `n` microseconds in the given unit.
///
/// As the return value is an integer, there may be some rounding down.
///
/// # Examples
///
/// 1 million microseconds is equivalent to 1 second:
///
/// ```rust
/// use baps3_cli::time::TimeUnit;
/// assert_eq!(TimeUnit::Seconds.from_micros(1000000), 1)
/// ```
///
/// Translating one hour of time to microseconds and back is the identity:
///
/// ```rust
/// use baps3_cli::time::TimeUnit;
/// let hour_us = TimeUnit::Hours.as_micros(1);
/// assert_eq!(TimeUnit::Hours.from_micros(hour_us), 1)
/// ```
pub fn from_micros(&self, n: u64) -> u64 {
match *self {
TimeUnit::Hours => n / 1000 / 1000 / 60 / 60,
TimeUnit::Minutes => n / 1000 / 1000 / 60,
TimeUnit::Seconds => n / 1000 / 1000,
TimeUnit::Milliseconds => n / 1000,
TimeUnit::Microseconds => n
}
}
/// Multiplexes a series of unit flags into a TimeUnit.
/// Larger units take precedence.
pub fn from_flags(h: bool, m: bool, s: bool, ms: bool) -> TimeUnit {
if h { TimeUnit::Hours }
else if m { TimeUnit::Minutes }
else if s { TimeUnit::Seconds }
else if ms |
else { TimeUnit::Microseconds }
}
} | { TimeUnit::Milliseconds } | conditional_block |
mem.rs | // Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.
use kvproto::encryptionpb::EncryptedContent;
use super::metadata::*;
use crate::crypter::*;
use crate::{AesGcmCrypter, Error, Iv, Result};
/// An in-memory backend, it saves master key in memory.
pub(crate) struct MemAesGcmBackend {
pub key: Vec<u8>,
}
impl MemAesGcmBackend {
pub fn new(key: Vec<u8>) -> Result<MemAesGcmBackend> {
if key.len() != AesGcmCrypter::KEY_LEN {
return Err(box_err!(
"encryption method and key length mismatch, expect {} get {}",
AesGcmCrypter::KEY_LEN,
key.len()
));
}
Ok(MemAesGcmBackend { key })
}
pub fn encrypt_content(&self, plaintext: &[u8], iv: Iv) -> Result<EncryptedContent> {
let mut content = EncryptedContent::default();
content.mut_metadata().insert(
MetadataKey::Method.as_str().to_owned(),
MetadataMethod::Aes256Gcm.as_slice().to_vec(),
);
let iv_value = iv.as_slice().to_vec();
content
.mut_metadata()
.insert(MetadataKey::Iv.as_str().to_owned(), iv_value);
let (ciphertext, gcm_tag) = AesGcmCrypter::new(&self.key, iv).encrypt(plaintext)?;
content.set_content(ciphertext);
content.mut_metadata().insert(
MetadataKey::AesGcmTag.as_str().to_owned(),
gcm_tag.as_slice().to_owned(),
);
Ok(content)
}
// On decrypt failure, the rule is to return WrongMasterKey error in case it is possible that
// a wrong master key has been used, or other error otherwise.
pub fn decrypt_content(&self, content: &EncryptedContent) -> Result<Vec<u8>> {
let method = content
.get_metadata()
.get(MetadataKey::Method.as_str())
.ok_or_else(|| {
// Missing method in metadata. The metadata of the encrypted content is invalid or
// corrupted.
Error::Other(box_err!(
"metadata {} not found",
MetadataKey::Method.as_str()
))
})?;
if method.as_slice() != MetadataMethod::Aes256Gcm.as_slice() |
let key = &self.key;
let iv_value = content
.get_metadata()
.get(MetadataKey::Iv.as_str())
.ok_or_else(|| {
// IV is missing. The metadata of the encrypted content is invalid or corrupted.
Error::Other(box_err!("metadata {} not found", MetadataKey::Iv.as_str()))
})?;
let iv = Iv::from_slice(iv_value.as_slice())?;
let tag = content
.get_metadata()
.get(MetadataKey::AesGcmTag.as_str())
.ok_or_else(|| {
// Tag is missing. The metadata of the encrypted content is invalid or corrupted.
Error::Other(box_err!("gcm tag not found"))
})?;
let gcm_tag = AesGcmTag::from(tag.as_slice());
let ciphertext = content.get_content();
let plaintext = AesGcmCrypter::new(key, iv)
.decrypt(ciphertext, gcm_tag)
.map_err(|e|
// Decryption error, likely caused by mismatched tag. It could be the tag is
// corrupted, or the encrypted content is fake by an attacker, but more likely
// it is caused by a wrong master key being used.
Error::WrongMasterKey(box_err!("decrypt in GCM mode failed: {}", e)))?;
Ok(plaintext)
}
}
#[cfg(test)]
mod tests {
use hex::FromHex;
use matches::assert_matches;
use super::*;
#[test]
fn test_mem_backend_ase_256_gcm() {
// See more http://csrc.nist.gov/groups/STM/cavp/documents/mac/gcmtestvectors.zip
let pt = Vec::from_hex("25431587e9ecffc7c37f8d6d52a9bc3310651d46fb0e3bad2726c8f2db653749")
.unwrap();
let ct = Vec::from_hex("84e5f23f95648fa247cb28eef53abec947dbf05ac953734618111583840bd980")
.unwrap();
let key = Vec::from_hex("c3d99825f2181f4808acd2068eac7441a65bd428f14d2aab43fefc0129091139")
.unwrap();
let iv = Vec::from_hex("cafabd9672ca6c79a2fbdc22").unwrap();
let backend = MemAesGcmBackend::new(key).unwrap();
let iv = Iv::from_slice(iv.as_slice()).unwrap();
let encrypted_content = backend.encrypt_content(&pt, iv).unwrap();
assert_eq!(encrypted_content.get_content(), ct.as_slice());
let plaintext = backend.decrypt_content(&encrypted_content).unwrap();
assert_eq!(plaintext, pt);
}
#[test]
fn test_mem_backend_authenticate() {
let pt = vec![1u8, 2, 3];
let key = Vec::from_hex("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4")
.unwrap();
let backend = MemAesGcmBackend::new(key).unwrap();
let encrypted_content = backend.encrypt_content(&pt, Iv::new_gcm()).unwrap();
let plaintext = backend.decrypt_content(&encrypted_content).unwrap();
assert_eq!(plaintext, pt);
// Must fail is method not found.
let mut encrypted_content_missing_method = encrypted_content.clone();
encrypted_content_missing_method
.mut_metadata()
.remove(MetadataKey::Method.as_str());
assert_matches!(
backend
.decrypt_content(&encrypted_content_missing_method)
.unwrap_err(),
Error::Other(_)
);
// Must fail if method is not aes256-gcm.
let mut encrypted_content_invalid_method = encrypted_content.clone();
let mut invalid_suffix = b"_invalid".to_vec();
encrypted_content_invalid_method
.mut_metadata()
.get_mut(MetadataKey::Method.as_str())
.unwrap()
.append(&mut invalid_suffix);
assert_matches!(
backend
.decrypt_content(&encrypted_content_invalid_method)
.unwrap_err(),
Error::Other(_)
);
// Must fail if tag not found.
let mut encrypted_content_missing_tag = encrypted_content.clone();
encrypted_content_missing_tag
.mut_metadata()
.remove(MetadataKey::AesGcmTag.as_str());
assert_matches!(
backend
.decrypt_content(&encrypted_content_missing_tag)
.unwrap_err(),
Error::Other(_)
);
// Must fail with WrongMasterKey error due to mismatched tag.
let mut encrypted_content_mismatch_tag = encrypted_content;
encrypted_content_mismatch_tag
.mut_metadata()
.get_mut(MetadataKey::AesGcmTag.as_str())
.unwrap()[0] ^= 0b11111111u8;
assert_matches!(
backend
.decrypt_content(&encrypted_content_mismatch_tag)
.unwrap_err(),
Error::WrongMasterKey(_)
);
}
}
| {
// Currently we only support aes256-gcm. A different method could mean the encrypted
// content is written by a future version of TiKV, and we don't know how to handle it.
// Fail immediately instead of fallback to previous key.
return Err(Error::Other(box_err!(
"encryption method mismatch, expected {:?} vs actual {:?}",
MetadataMethod::Aes256Gcm.as_slice(),
method
)));
} | conditional_block |
mem.rs | // Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.
use kvproto::encryptionpb::EncryptedContent;
use super::metadata::*;
use crate::crypter::*;
use crate::{AesGcmCrypter, Error, Iv, Result};
/// An in-memory backend, it saves master key in memory.
pub(crate) struct MemAesGcmBackend {
pub key: Vec<u8>,
}
impl MemAesGcmBackend {
pub fn new(key: Vec<u8>) -> Result<MemAesGcmBackend> {
if key.len() != AesGcmCrypter::KEY_LEN {
return Err(box_err!(
"encryption method and key length mismatch, expect {} get {}",
AesGcmCrypter::KEY_LEN,
key.len()
));
}
Ok(MemAesGcmBackend { key })
}
pub fn encrypt_content(&self, plaintext: &[u8], iv: Iv) -> Result<EncryptedContent> {
let mut content = EncryptedContent::default();
content.mut_metadata().insert(
MetadataKey::Method.as_str().to_owned(),
MetadataMethod::Aes256Gcm.as_slice().to_vec(),
);
let iv_value = iv.as_slice().to_vec();
content
.mut_metadata()
.insert(MetadataKey::Iv.as_str().to_owned(), iv_value);
let (ciphertext, gcm_tag) = AesGcmCrypter::new(&self.key, iv).encrypt(plaintext)?;
content.set_content(ciphertext);
content.mut_metadata().insert(
MetadataKey::AesGcmTag.as_str().to_owned(),
gcm_tag.as_slice().to_owned(),
);
Ok(content)
}
// On decrypt failure, the rule is to return WrongMasterKey error in case it is possible that
// a wrong master key has been used, or other error otherwise.
pub fn decrypt_content(&self, content: &EncryptedContent) -> Result<Vec<u8>> {
let method = content
.get_metadata()
.get(MetadataKey::Method.as_str())
.ok_or_else(|| {
// Missing method in metadata. The metadata of the encrypted content is invalid or
// corrupted.
Error::Other(box_err!(
"metadata {} not found",
MetadataKey::Method.as_str()
))
})?;
if method.as_slice() != MetadataMethod::Aes256Gcm.as_slice() {
// Currently we only support aes256-gcm. A different method could mean the encrypted
// content is written by a future version of TiKV, and we don't know how to handle it.
// Fail immediately instead of fallback to previous key.
return Err(Error::Other(box_err!(
"encryption method mismatch, expected {:?} vs actual {:?}",
MetadataMethod::Aes256Gcm.as_slice(),
method
)));
}
let key = &self.key;
let iv_value = content
.get_metadata()
.get(MetadataKey::Iv.as_str())
.ok_or_else(|| {
// IV is missing. The metadata of the encrypted content is invalid or corrupted.
Error::Other(box_err!("metadata {} not found", MetadataKey::Iv.as_str()))
})?;
let iv = Iv::from_slice(iv_value.as_slice())?;
let tag = content
.get_metadata()
.get(MetadataKey::AesGcmTag.as_str())
.ok_or_else(|| {
// Tag is missing. The metadata of the encrypted content is invalid or corrupted.
Error::Other(box_err!("gcm tag not found"))
})?;
let gcm_tag = AesGcmTag::from(tag.as_slice());
let ciphertext = content.get_content();
let plaintext = AesGcmCrypter::new(key, iv)
.decrypt(ciphertext, gcm_tag)
.map_err(|e|
// Decryption error, likely caused by mismatched tag. It could be the tag is
// corrupted, or the encrypted content is fake by an attacker, but more likely
// it is caused by a wrong master key being used.
Error::WrongMasterKey(box_err!("decrypt in GCM mode failed: {}", e)))?;
Ok(plaintext)
}
}
#[cfg(test)]
mod tests {
use hex::FromHex;
use matches::assert_matches;
use super::*;
#[test]
fn test_mem_backend_ase_256_gcm() |
#[test]
fn test_mem_backend_authenticate() {
let pt = vec![1u8, 2, 3];
let key = Vec::from_hex("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4")
.unwrap();
let backend = MemAesGcmBackend::new(key).unwrap();
let encrypted_content = backend.encrypt_content(&pt, Iv::new_gcm()).unwrap();
let plaintext = backend.decrypt_content(&encrypted_content).unwrap();
assert_eq!(plaintext, pt);
// Must fail is method not found.
let mut encrypted_content_missing_method = encrypted_content.clone();
encrypted_content_missing_method
.mut_metadata()
.remove(MetadataKey::Method.as_str());
assert_matches!(
backend
.decrypt_content(&encrypted_content_missing_method)
.unwrap_err(),
Error::Other(_)
);
// Must fail if method is not aes256-gcm.
let mut encrypted_content_invalid_method = encrypted_content.clone();
let mut invalid_suffix = b"_invalid".to_vec();
encrypted_content_invalid_method
.mut_metadata()
.get_mut(MetadataKey::Method.as_str())
.unwrap()
.append(&mut invalid_suffix);
assert_matches!(
backend
.decrypt_content(&encrypted_content_invalid_method)
.unwrap_err(),
Error::Other(_)
);
// Must fail if tag not found.
let mut encrypted_content_missing_tag = encrypted_content.clone();
encrypted_content_missing_tag
.mut_metadata()
.remove(MetadataKey::AesGcmTag.as_str());
assert_matches!(
backend
.decrypt_content(&encrypted_content_missing_tag)
.unwrap_err(),
Error::Other(_)
);
// Must fail with WrongMasterKey error due to mismatched tag.
let mut encrypted_content_mismatch_tag = encrypted_content;
encrypted_content_mismatch_tag
.mut_metadata()
.get_mut(MetadataKey::AesGcmTag.as_str())
.unwrap()[0] ^= 0b11111111u8;
assert_matches!(
backend
.decrypt_content(&encrypted_content_mismatch_tag)
.unwrap_err(),
Error::WrongMasterKey(_)
);
}
}
| {
// See more http://csrc.nist.gov/groups/STM/cavp/documents/mac/gcmtestvectors.zip
let pt = Vec::from_hex("25431587e9ecffc7c37f8d6d52a9bc3310651d46fb0e3bad2726c8f2db653749")
.unwrap();
let ct = Vec::from_hex("84e5f23f95648fa247cb28eef53abec947dbf05ac953734618111583840bd980")
.unwrap();
let key = Vec::from_hex("c3d99825f2181f4808acd2068eac7441a65bd428f14d2aab43fefc0129091139")
.unwrap();
let iv = Vec::from_hex("cafabd9672ca6c79a2fbdc22").unwrap();
let backend = MemAesGcmBackend::new(key).unwrap();
let iv = Iv::from_slice(iv.as_slice()).unwrap();
let encrypted_content = backend.encrypt_content(&pt, iv).unwrap();
assert_eq!(encrypted_content.get_content(), ct.as_slice());
let plaintext = backend.decrypt_content(&encrypted_content).unwrap();
assert_eq!(plaintext, pt);
} | identifier_body |
mem.rs | // Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.
use kvproto::encryptionpb::EncryptedContent;
use super::metadata::*;
use crate::crypter::*;
use crate::{AesGcmCrypter, Error, Iv, Result};
/// An in-memory backend, it saves master key in memory.
pub(crate) struct MemAesGcmBackend {
pub key: Vec<u8>,
}
impl MemAesGcmBackend {
pub fn new(key: Vec<u8>) -> Result<MemAesGcmBackend> {
if key.len() != AesGcmCrypter::KEY_LEN {
return Err(box_err!(
"encryption method and key length mismatch, expect {} get {}",
AesGcmCrypter::KEY_LEN,
key.len()
));
}
Ok(MemAesGcmBackend { key })
}
pub fn encrypt_content(&self, plaintext: &[u8], iv: Iv) -> Result<EncryptedContent> {
let mut content = EncryptedContent::default();
content.mut_metadata().insert(
MetadataKey::Method.as_str().to_owned(),
MetadataMethod::Aes256Gcm.as_slice().to_vec(),
);
let iv_value = iv.as_slice().to_vec();
content
.mut_metadata()
.insert(MetadataKey::Iv.as_str().to_owned(), iv_value);
let (ciphertext, gcm_tag) = AesGcmCrypter::new(&self.key, iv).encrypt(plaintext)?;
content.set_content(ciphertext);
content.mut_metadata().insert(
MetadataKey::AesGcmTag.as_str().to_owned(),
gcm_tag.as_slice().to_owned(),
);
Ok(content)
}
// On decrypt failure, the rule is to return WrongMasterKey error in case it is possible that
// a wrong master key has been used, or other error otherwise.
pub fn decrypt_content(&self, content: &EncryptedContent) -> Result<Vec<u8>> {
let method = content
.get_metadata()
.get(MetadataKey::Method.as_str())
.ok_or_else(|| {
// Missing method in metadata. The metadata of the encrypted content is invalid or
// corrupted.
Error::Other(box_err!(
"metadata {} not found",
MetadataKey::Method.as_str()
))
})?;
if method.as_slice() != MetadataMethod::Aes256Gcm.as_slice() {
// Currently we only support aes256-gcm. A different method could mean the encrypted
// content is written by a future version of TiKV, and we don't know how to handle it.
// Fail immediately instead of fallback to previous key.
return Err(Error::Other(box_err!(
"encryption method mismatch, expected {:?} vs actual {:?}",
MetadataMethod::Aes256Gcm.as_slice(),
method
)));
}
let key = &self.key;
let iv_value = content
.get_metadata()
.get(MetadataKey::Iv.as_str())
.ok_or_else(|| {
// IV is missing. The metadata of the encrypted content is invalid or corrupted.
Error::Other(box_err!("metadata {} not found", MetadataKey::Iv.as_str()))
})?;
let iv = Iv::from_slice(iv_value.as_slice())?;
let tag = content
.get_metadata()
.get(MetadataKey::AesGcmTag.as_str())
.ok_or_else(|| { | let gcm_tag = AesGcmTag::from(tag.as_slice());
let ciphertext = content.get_content();
let plaintext = AesGcmCrypter::new(key, iv)
.decrypt(ciphertext, gcm_tag)
.map_err(|e|
// Decryption error, likely caused by mismatched tag. It could be the tag is
// corrupted, or the encrypted content is fake by an attacker, but more likely
// it is caused by a wrong master key being used.
Error::WrongMasterKey(box_err!("decrypt in GCM mode failed: {}", e)))?;
Ok(plaintext)
}
}
#[cfg(test)]
mod tests {
use hex::FromHex;
use matches::assert_matches;
use super::*;
#[test]
fn test_mem_backend_ase_256_gcm() {
// See more http://csrc.nist.gov/groups/STM/cavp/documents/mac/gcmtestvectors.zip
let pt = Vec::from_hex("25431587e9ecffc7c37f8d6d52a9bc3310651d46fb0e3bad2726c8f2db653749")
.unwrap();
let ct = Vec::from_hex("84e5f23f95648fa247cb28eef53abec947dbf05ac953734618111583840bd980")
.unwrap();
let key = Vec::from_hex("c3d99825f2181f4808acd2068eac7441a65bd428f14d2aab43fefc0129091139")
.unwrap();
let iv = Vec::from_hex("cafabd9672ca6c79a2fbdc22").unwrap();
let backend = MemAesGcmBackend::new(key).unwrap();
let iv = Iv::from_slice(iv.as_slice()).unwrap();
let encrypted_content = backend.encrypt_content(&pt, iv).unwrap();
assert_eq!(encrypted_content.get_content(), ct.as_slice());
let plaintext = backend.decrypt_content(&encrypted_content).unwrap();
assert_eq!(plaintext, pt);
}
#[test]
fn test_mem_backend_authenticate() {
let pt = vec![1u8, 2, 3];
let key = Vec::from_hex("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4")
.unwrap();
let backend = MemAesGcmBackend::new(key).unwrap();
let encrypted_content = backend.encrypt_content(&pt, Iv::new_gcm()).unwrap();
let plaintext = backend.decrypt_content(&encrypted_content).unwrap();
assert_eq!(plaintext, pt);
// Must fail is method not found.
let mut encrypted_content_missing_method = encrypted_content.clone();
encrypted_content_missing_method
.mut_metadata()
.remove(MetadataKey::Method.as_str());
assert_matches!(
backend
.decrypt_content(&encrypted_content_missing_method)
.unwrap_err(),
Error::Other(_)
);
// Must fail if method is not aes256-gcm.
let mut encrypted_content_invalid_method = encrypted_content.clone();
let mut invalid_suffix = b"_invalid".to_vec();
encrypted_content_invalid_method
.mut_metadata()
.get_mut(MetadataKey::Method.as_str())
.unwrap()
.append(&mut invalid_suffix);
assert_matches!(
backend
.decrypt_content(&encrypted_content_invalid_method)
.unwrap_err(),
Error::Other(_)
);
// Must fail if tag not found.
let mut encrypted_content_missing_tag = encrypted_content.clone();
encrypted_content_missing_tag
.mut_metadata()
.remove(MetadataKey::AesGcmTag.as_str());
assert_matches!(
backend
.decrypt_content(&encrypted_content_missing_tag)
.unwrap_err(),
Error::Other(_)
);
// Must fail with WrongMasterKey error due to mismatched tag.
let mut encrypted_content_mismatch_tag = encrypted_content;
encrypted_content_mismatch_tag
.mut_metadata()
.get_mut(MetadataKey::AesGcmTag.as_str())
.unwrap()[0] ^= 0b11111111u8;
assert_matches!(
backend
.decrypt_content(&encrypted_content_mismatch_tag)
.unwrap_err(),
Error::WrongMasterKey(_)
);
}
} | // Tag is missing. The metadata of the encrypted content is invalid or corrupted.
Error::Other(box_err!("gcm tag not found"))
})?; | random_line_split |
mem.rs | // Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.
use kvproto::encryptionpb::EncryptedContent;
use super::metadata::*;
use crate::crypter::*;
use crate::{AesGcmCrypter, Error, Iv, Result};
/// An in-memory backend, it saves master key in memory.
pub(crate) struct MemAesGcmBackend {
pub key: Vec<u8>,
}
impl MemAesGcmBackend {
pub fn new(key: Vec<u8>) -> Result<MemAesGcmBackend> {
if key.len() != AesGcmCrypter::KEY_LEN {
return Err(box_err!(
"encryption method and key length mismatch, expect {} get {}",
AesGcmCrypter::KEY_LEN,
key.len()
));
}
Ok(MemAesGcmBackend { key })
}
pub fn | (&self, plaintext: &[u8], iv: Iv) -> Result<EncryptedContent> {
let mut content = EncryptedContent::default();
content.mut_metadata().insert(
MetadataKey::Method.as_str().to_owned(),
MetadataMethod::Aes256Gcm.as_slice().to_vec(),
);
let iv_value = iv.as_slice().to_vec();
content
.mut_metadata()
.insert(MetadataKey::Iv.as_str().to_owned(), iv_value);
let (ciphertext, gcm_tag) = AesGcmCrypter::new(&self.key, iv).encrypt(plaintext)?;
content.set_content(ciphertext);
content.mut_metadata().insert(
MetadataKey::AesGcmTag.as_str().to_owned(),
gcm_tag.as_slice().to_owned(),
);
Ok(content)
}
// On decrypt failure, the rule is to return WrongMasterKey error in case it is possible that
// a wrong master key has been used, or other error otherwise.
pub fn decrypt_content(&self, content: &EncryptedContent) -> Result<Vec<u8>> {
let method = content
.get_metadata()
.get(MetadataKey::Method.as_str())
.ok_or_else(|| {
// Missing method in metadata. The metadata of the encrypted content is invalid or
// corrupted.
Error::Other(box_err!(
"metadata {} not found",
MetadataKey::Method.as_str()
))
})?;
if method.as_slice() != MetadataMethod::Aes256Gcm.as_slice() {
// Currently we only support aes256-gcm. A different method could mean the encrypted
// content is written by a future version of TiKV, and we don't know how to handle it.
// Fail immediately instead of fallback to previous key.
return Err(Error::Other(box_err!(
"encryption method mismatch, expected {:?} vs actual {:?}",
MetadataMethod::Aes256Gcm.as_slice(),
method
)));
}
let key = &self.key;
let iv_value = content
.get_metadata()
.get(MetadataKey::Iv.as_str())
.ok_or_else(|| {
// IV is missing. The metadata of the encrypted content is invalid or corrupted.
Error::Other(box_err!("metadata {} not found", MetadataKey::Iv.as_str()))
})?;
let iv = Iv::from_slice(iv_value.as_slice())?;
let tag = content
.get_metadata()
.get(MetadataKey::AesGcmTag.as_str())
.ok_or_else(|| {
// Tag is missing. The metadata of the encrypted content is invalid or corrupted.
Error::Other(box_err!("gcm tag not found"))
})?;
let gcm_tag = AesGcmTag::from(tag.as_slice());
let ciphertext = content.get_content();
let plaintext = AesGcmCrypter::new(key, iv)
.decrypt(ciphertext, gcm_tag)
.map_err(|e|
// Decryption error, likely caused by mismatched tag. It could be the tag is
// corrupted, or the encrypted content is fake by an attacker, but more likely
// it is caused by a wrong master key being used.
Error::WrongMasterKey(box_err!("decrypt in GCM mode failed: {}", e)))?;
Ok(plaintext)
}
}
#[cfg(test)]
mod tests {
use hex::FromHex;
use matches::assert_matches;
use super::*;
#[test]
fn test_mem_backend_ase_256_gcm() {
// See more http://csrc.nist.gov/groups/STM/cavp/documents/mac/gcmtestvectors.zip
let pt = Vec::from_hex("25431587e9ecffc7c37f8d6d52a9bc3310651d46fb0e3bad2726c8f2db653749")
.unwrap();
let ct = Vec::from_hex("84e5f23f95648fa247cb28eef53abec947dbf05ac953734618111583840bd980")
.unwrap();
let key = Vec::from_hex("c3d99825f2181f4808acd2068eac7441a65bd428f14d2aab43fefc0129091139")
.unwrap();
let iv = Vec::from_hex("cafabd9672ca6c79a2fbdc22").unwrap();
let backend = MemAesGcmBackend::new(key).unwrap();
let iv = Iv::from_slice(iv.as_slice()).unwrap();
let encrypted_content = backend.encrypt_content(&pt, iv).unwrap();
assert_eq!(encrypted_content.get_content(), ct.as_slice());
let plaintext = backend.decrypt_content(&encrypted_content).unwrap();
assert_eq!(plaintext, pt);
}
#[test]
fn test_mem_backend_authenticate() {
let pt = vec![1u8, 2, 3];
let key = Vec::from_hex("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4")
.unwrap();
let backend = MemAesGcmBackend::new(key).unwrap();
let encrypted_content = backend.encrypt_content(&pt, Iv::new_gcm()).unwrap();
let plaintext = backend.decrypt_content(&encrypted_content).unwrap();
assert_eq!(plaintext, pt);
// Must fail is method not found.
let mut encrypted_content_missing_method = encrypted_content.clone();
encrypted_content_missing_method
.mut_metadata()
.remove(MetadataKey::Method.as_str());
assert_matches!(
backend
.decrypt_content(&encrypted_content_missing_method)
.unwrap_err(),
Error::Other(_)
);
// Must fail if method is not aes256-gcm.
let mut encrypted_content_invalid_method = encrypted_content.clone();
let mut invalid_suffix = b"_invalid".to_vec();
encrypted_content_invalid_method
.mut_metadata()
.get_mut(MetadataKey::Method.as_str())
.unwrap()
.append(&mut invalid_suffix);
assert_matches!(
backend
.decrypt_content(&encrypted_content_invalid_method)
.unwrap_err(),
Error::Other(_)
);
// Must fail if tag not found.
let mut encrypted_content_missing_tag = encrypted_content.clone();
encrypted_content_missing_tag
.mut_metadata()
.remove(MetadataKey::AesGcmTag.as_str());
assert_matches!(
backend
.decrypt_content(&encrypted_content_missing_tag)
.unwrap_err(),
Error::Other(_)
);
// Must fail with WrongMasterKey error due to mismatched tag.
let mut encrypted_content_mismatch_tag = encrypted_content;
encrypted_content_mismatch_tag
.mut_metadata()
.get_mut(MetadataKey::AesGcmTag.as_str())
.unwrap()[0] ^= 0b11111111u8;
assert_matches!(
backend
.decrypt_content(&encrypted_content_mismatch_tag)
.unwrap_err(),
Error::WrongMasterKey(_)
);
}
}
| encrypt_content | identifier_name |
003.rs | #![feature(slicing_syntax)]
extern crate test;
extern crate time;
| use std::os;
fn solution() -> u64 {
let mut n = 600_851_475_143;
for factor in iter::count(3, 2) {
while n % factor == 0 {
n /= factor;
}
if factor * factor > n {
return n;
} else if n == 1 {
return factor;
}
}
unreachable!();
}
fn main() {
match os::args()[] {
[_, ref flag] if flag[] == "-a" => return println!("{}", solution()),
_ => {},
}
for line in stdio::stdin().lock().lines() {
let iters: u64 = line.unwrap()[].trim().parse().unwrap();
let start = time::precise_time_ns();
for _ in range(0, iters) {
test::black_box(solution());
}
let end = time::precise_time_ns();
println!("{}", end - start);
}
} | use std::io::stdio;
use std::iter; | random_line_split |
003.rs | #![feature(slicing_syntax)]
extern crate test;
extern crate time;
use std::io::stdio;
use std::iter;
use std::os;
fn solution() -> u64 {
let mut n = 600_851_475_143;
for factor in iter::count(3, 2) {
while n % factor == 0 {
n /= factor;
}
if factor * factor > n {
return n;
} else if n == 1 {
return factor;
}
}
unreachable!();
}
fn main() | {
match os::args()[] {
[_, ref flag] if flag[] == "-a" => return println!("{}", solution()),
_ => {},
}
for line in stdio::stdin().lock().lines() {
let iters: u64 = line.unwrap()[].trim().parse().unwrap();
let start = time::precise_time_ns();
for _ in range(0, iters) {
test::black_box(solution());
}
let end = time::precise_time_ns();
println!("{}", end - start);
}
} | identifier_body | |
003.rs | #![feature(slicing_syntax)]
extern crate test;
extern crate time;
use std::io::stdio;
use std::iter;
use std::os;
fn solution() -> u64 {
let mut n = 600_851_475_143;
for factor in iter::count(3, 2) {
while n % factor == 0 {
n /= factor;
}
if factor * factor > n {
return n;
} else if n == 1 {
return factor;
}
}
unreachable!();
}
fn main() {
match os::args()[] {
[_, ref flag] if flag[] == "-a" => return println!("{}", solution()),
_ => | ,
}
for line in stdio::stdin().lock().lines() {
let iters: u64 = line.unwrap()[].trim().parse().unwrap();
let start = time::precise_time_ns();
for _ in range(0, iters) {
test::black_box(solution());
}
let end = time::precise_time_ns();
println!("{}", end - start);
}
}
| {} | conditional_block |
003.rs | #![feature(slicing_syntax)]
extern crate test;
extern crate time;
use std::io::stdio;
use std::iter;
use std::os;
fn solution() -> u64 {
let mut n = 600_851_475_143;
for factor in iter::count(3, 2) {
while n % factor == 0 {
n /= factor;
}
if factor * factor > n {
return n;
} else if n == 1 {
return factor;
}
}
unreachable!();
}
fn | () {
match os::args()[] {
[_, ref flag] if flag[] == "-a" => return println!("{}", solution()),
_ => {},
}
for line in stdio::stdin().lock().lines() {
let iters: u64 = line.unwrap()[].trim().parse().unwrap();
let start = time::precise_time_ns();
for _ in range(0, iters) {
test::black_box(solution());
}
let end = time::precise_time_ns();
println!("{}", end - start);
}
}
| main | identifier_name |
zip.js | import gulp from 'gulp';
import path from 'path';
import runSequence from 'run-sequence';
import { spawn } from 'child_process';
const pkg = require('../package.json');
function zip(src, dest) {
const current_process = spawn('7z', ['a', '-tzip', dest, src], {cwd: './tmp'});
let is_error = false;
return new Promise((resolve, reject) => {
current_process.stdout.on('data', function(msg) {
console.log(msg.toString());
});
current_process.stderr.on('data', function(msg) {
console.error(msg.toString());
is_error = true;
});
current_process.on('close', function(code) {
if (is_error) return reject(); | return resolve();
});
});
}
function releaseFile(platform) {
return `Championify-${platform}-${pkg.version}.zip`;
}
gulp.task('zip:osx', function(cb) {
const src = `${pkg.name}.app`;
const dest = path.join('../releases', releaseFile('OSX'));
return zip(src, dest, cb);
});
gulp.task('zip:win', function(cb) {
const src = pkg.name;
const dest = path.join('../releases', releaseFile('WIN'));
return zip(src, dest, cb);
});
gulp.task('zip:all', function(cb) {
return runSequence(['zip:osx', 'zip:win'], cb);
}); | random_line_split | |
zip.js | import gulp from 'gulp';
import path from 'path';
import runSequence from 'run-sequence';
import { spawn } from 'child_process';
const pkg = require('../package.json');
function zip(src, dest) {
const current_process = spawn('7z', ['a', '-tzip', dest, src], {cwd: './tmp'});
let is_error = false;
return new Promise((resolve, reject) => {
current_process.stdout.on('data', function(msg) {
console.log(msg.toString());
});
current_process.stderr.on('data', function(msg) {
console.error(msg.toString());
is_error = true;
});
current_process.on('close', function(code) {
if (is_error) return reject();
return resolve();
});
});
}
function | (platform) {
return `Championify-${platform}-${pkg.version}.zip`;
}
gulp.task('zip:osx', function(cb) {
const src = `${pkg.name}.app`;
const dest = path.join('../releases', releaseFile('OSX'));
return zip(src, dest, cb);
});
gulp.task('zip:win', function(cb) {
const src = pkg.name;
const dest = path.join('../releases', releaseFile('WIN'));
return zip(src, dest, cb);
});
gulp.task('zip:all', function(cb) {
return runSequence(['zip:osx', 'zip:win'], cb);
});
| releaseFile | identifier_name |
zip.js | import gulp from 'gulp';
import path from 'path';
import runSequence from 'run-sequence';
import { spawn } from 'child_process';
const pkg = require('../package.json');
function zip(src, dest) {
const current_process = spawn('7z', ['a', '-tzip', dest, src], {cwd: './tmp'});
let is_error = false;
return new Promise((resolve, reject) => {
current_process.stdout.on('data', function(msg) {
console.log(msg.toString());
});
current_process.stderr.on('data', function(msg) {
console.error(msg.toString());
is_error = true;
});
current_process.on('close', function(code) {
if (is_error) return reject();
return resolve();
});
});
}
function releaseFile(platform) |
gulp.task('zip:osx', function(cb) {
const src = `${pkg.name}.app`;
const dest = path.join('../releases', releaseFile('OSX'));
return zip(src, dest, cb);
});
gulp.task('zip:win', function(cb) {
const src = pkg.name;
const dest = path.join('../releases', releaseFile('WIN'));
return zip(src, dest, cb);
});
gulp.task('zip:all', function(cb) {
return runSequence(['zip:osx', 'zip:win'], cb);
});
| {
return `Championify-${platform}-${pkg.version}.zip`;
} | identifier_body |
bpf_base.rs | use crate::abi::Endian;
use crate::spec::{LinkerFlavor, MergeFunctions, PanicStrategy, TargetOptions};
pub fn opts(endian: Endian) -> TargetOptions | {
TargetOptions {
allow_asm: true,
endian,
linker_flavor: LinkerFlavor::BpfLinker,
atomic_cas: false,
executables: true,
dynamic_linking: true,
no_builtins: true,
panic_strategy: PanicStrategy::Abort,
position_independent_executables: true,
// Disable MergeFunctions since:
// - older kernels don't support bpf-to-bpf calls
// - on newer kernels, userspace still needs to relocate before calling
// BPF_PROG_LOAD and not all BPF libraries do that yet
merge_functions: MergeFunctions::Disabled,
obj_is_bitcode: true,
requires_lto: false,
singlethread: true,
max_atomic_width: Some(64),
..Default::default()
}
} | identifier_body | |
bpf_base.rs | use crate::abi::Endian;
use crate::spec::{LinkerFlavor, MergeFunctions, PanicStrategy, TargetOptions};
pub fn opts(endian: Endian) -> TargetOptions {
TargetOptions {
allow_asm: true,
endian,
linker_flavor: LinkerFlavor::BpfLinker,
atomic_cas: false,
executables: true, | no_builtins: true,
panic_strategy: PanicStrategy::Abort,
position_independent_executables: true,
// Disable MergeFunctions since:
// - older kernels don't support bpf-to-bpf calls
// - on newer kernels, userspace still needs to relocate before calling
// BPF_PROG_LOAD and not all BPF libraries do that yet
merge_functions: MergeFunctions::Disabled,
obj_is_bitcode: true,
requires_lto: false,
singlethread: true,
max_atomic_width: Some(64),
..Default::default()
}
} | dynamic_linking: true, | random_line_split |
bpf_base.rs | use crate::abi::Endian;
use crate::spec::{LinkerFlavor, MergeFunctions, PanicStrategy, TargetOptions};
pub fn | (endian: Endian) -> TargetOptions {
TargetOptions {
allow_asm: true,
endian,
linker_flavor: LinkerFlavor::BpfLinker,
atomic_cas: false,
executables: true,
dynamic_linking: true,
no_builtins: true,
panic_strategy: PanicStrategy::Abort,
position_independent_executables: true,
// Disable MergeFunctions since:
// - older kernels don't support bpf-to-bpf calls
// - on newer kernels, userspace still needs to relocate before calling
// BPF_PROG_LOAD and not all BPF libraries do that yet
merge_functions: MergeFunctions::Disabled,
obj_is_bitcode: true,
requires_lto: false,
singlethread: true,
max_atomic_width: Some(64),
..Default::default()
}
}
| opts | identifier_name |
XMLControl.py | from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
import os
import xml.etree.ElementTree
from xml.etree.cElementTree import ElementTree, Element, SubElement
from xml.etree.cElementTree import fromstring, tostring
import fs_uae_launcher.fsui as fsui
from ..Config import Config
from ..Settings import Settings
from ..I18N import _, ngettext
class XMLControl(fsui.TextArea): | def __init__(self, parent):
fsui.TextArea.__init__(self, parent, horizontal_scroll=True)
self.path = ""
def connect_game(self, info):
tree = self.get_tree()
root = tree.getroot()
if not root.tag == "config":
return
game_node = self.find_or_create_node(root, "game")
game_node.set("uuid", info["uuid"])
game_name_node = self.find_or_create_node(game_node, "name")
game_name_node.text = info["name"]
self.set_tree(tree)
def find_or_create_node(self, element, name):
node = element.find(name)
if node is None:
node = SubElement(element, name)
return node
def set_path(self, path):
if not os.path.exists(path):
path = ""
self.path = path
if path:
self.load_xml(path)
else:
self.set_text("")
def get_tree(self):
text = self.get_text().strip()
try:
root = fromstring(text.encode("UTF-8"))
except Exception:
# FIXME: show message
import traceback
traceback.print_exc()
return
tree = ElementTree(root)
indent_tree(root)
return tree
def set_tree(self, tree):
data = tostring(tree.getroot(), encoding="UTF-8").decode("UTF-8")
std_decl = "<?xml version='1.0' encoding='UTF-8'?>"
if data.startswith(std_decl):
data = data[len(std_decl):].strip()
self.set_text(data)
def load_xml(self, path):
with open(path, "rb") as f:
data = f.read()
self.set_text(data)
def save(self):
if not self.path:
print("no path to save XML to")
return
self.save_xml(self.path)
def save_xml(self, path):
self.get_tree().write(self.path)
def indent_tree(elem, level=0):
i = "\n" + level*" "
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = i + " "
if not elem.tail or not elem.tail.strip():
elem.tail = i
for elem in elem:
indent_tree(elem, level+1)
if not elem.tail or not elem.tail.strip():
elem.tail = i
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i | random_line_split | |
XMLControl.py | from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
import os
import xml.etree.ElementTree
from xml.etree.cElementTree import ElementTree, Element, SubElement
from xml.etree.cElementTree import fromstring, tostring
import fs_uae_launcher.fsui as fsui
from ..Config import Config
from ..Settings import Settings
from ..I18N import _, ngettext
class XMLControl(fsui.TextArea):
def __init__(self, parent):
fsui.TextArea.__init__(self, parent, horizontal_scroll=True)
self.path = ""
def connect_game(self, info):
tree = self.get_tree()
root = tree.getroot()
if not root.tag == "config":
return
game_node = self.find_or_create_node(root, "game")
game_node.set("uuid", info["uuid"])
game_name_node = self.find_or_create_node(game_node, "name")
game_name_node.text = info["name"]
self.set_tree(tree)
def find_or_create_node(self, element, name):
node = element.find(name)
if node is None:
node = SubElement(element, name)
return node
def set_path(self, path):
if not os.path.exists(path):
path = ""
self.path = path
if path:
self.load_xml(path)
else:
self.set_text("")
def get_tree(self):
text = self.get_text().strip()
try:
root = fromstring(text.encode("UTF-8"))
except Exception:
# FIXME: show message
import traceback
traceback.print_exc()
return
tree = ElementTree(root)
indent_tree(root)
return tree
def set_tree(self, tree):
|
def load_xml(self, path):
with open(path, "rb") as f:
data = f.read()
self.set_text(data)
def save(self):
if not self.path:
print("no path to save XML to")
return
self.save_xml(self.path)
def save_xml(self, path):
self.get_tree().write(self.path)
def indent_tree(elem, level=0):
i = "\n" + level*" "
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = i + " "
if not elem.tail or not elem.tail.strip():
elem.tail = i
for elem in elem:
indent_tree(elem, level+1)
if not elem.tail or not elem.tail.strip():
elem.tail = i
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i
| data = tostring(tree.getroot(), encoding="UTF-8").decode("UTF-8")
std_decl = "<?xml version='1.0' encoding='UTF-8'?>"
if data.startswith(std_decl):
data = data[len(std_decl):].strip()
self.set_text(data) | identifier_body |
XMLControl.py | from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
import os
import xml.etree.ElementTree
from xml.etree.cElementTree import ElementTree, Element, SubElement
from xml.etree.cElementTree import fromstring, tostring
import fs_uae_launcher.fsui as fsui
from ..Config import Config
from ..Settings import Settings
from ..I18N import _, ngettext
class XMLControl(fsui.TextArea):
def __init__(self, parent):
fsui.TextArea.__init__(self, parent, horizontal_scroll=True)
self.path = ""
def connect_game(self, info):
tree = self.get_tree()
root = tree.getroot()
if not root.tag == "config":
return
game_node = self.find_or_create_node(root, "game")
game_node.set("uuid", info["uuid"])
game_name_node = self.find_or_create_node(game_node, "name")
game_name_node.text = info["name"]
self.set_tree(tree)
def find_or_create_node(self, element, name):
node = element.find(name)
if node is None:
node = SubElement(element, name)
return node
def set_path(self, path):
if not os.path.exists(path):
path = ""
self.path = path
if path:
self.load_xml(path)
else:
|
def get_tree(self):
text = self.get_text().strip()
try:
root = fromstring(text.encode("UTF-8"))
except Exception:
# FIXME: show message
import traceback
traceback.print_exc()
return
tree = ElementTree(root)
indent_tree(root)
return tree
def set_tree(self, tree):
data = tostring(tree.getroot(), encoding="UTF-8").decode("UTF-8")
std_decl = "<?xml version='1.0' encoding='UTF-8'?>"
if data.startswith(std_decl):
data = data[len(std_decl):].strip()
self.set_text(data)
def load_xml(self, path):
with open(path, "rb") as f:
data = f.read()
self.set_text(data)
def save(self):
if not self.path:
print("no path to save XML to")
return
self.save_xml(self.path)
def save_xml(self, path):
self.get_tree().write(self.path)
def indent_tree(elem, level=0):
i = "\n" + level*" "
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = i + " "
if not elem.tail or not elem.tail.strip():
elem.tail = i
for elem in elem:
indent_tree(elem, level+1)
if not elem.tail or not elem.tail.strip():
elem.tail = i
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i
| self.set_text("") | conditional_block |
XMLControl.py | from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
import os
import xml.etree.ElementTree
from xml.etree.cElementTree import ElementTree, Element, SubElement
from xml.etree.cElementTree import fromstring, tostring
import fs_uae_launcher.fsui as fsui
from ..Config import Config
from ..Settings import Settings
from ..I18N import _, ngettext
class XMLControl(fsui.TextArea):
def __init__(self, parent):
fsui.TextArea.__init__(self, parent, horizontal_scroll=True)
self.path = ""
def connect_game(self, info):
tree = self.get_tree()
root = tree.getroot()
if not root.tag == "config":
return
game_node = self.find_or_create_node(root, "game")
game_node.set("uuid", info["uuid"])
game_name_node = self.find_or_create_node(game_node, "name")
game_name_node.text = info["name"]
self.set_tree(tree)
def find_or_create_node(self, element, name):
node = element.find(name)
if node is None:
node = SubElement(element, name)
return node
def set_path(self, path):
if not os.path.exists(path):
path = ""
self.path = path
if path:
self.load_xml(path)
else:
self.set_text("")
def get_tree(self):
text = self.get_text().strip()
try:
root = fromstring(text.encode("UTF-8"))
except Exception:
# FIXME: show message
import traceback
traceback.print_exc()
return
tree = ElementTree(root)
indent_tree(root)
return tree
def | (self, tree):
data = tostring(tree.getroot(), encoding="UTF-8").decode("UTF-8")
std_decl = "<?xml version='1.0' encoding='UTF-8'?>"
if data.startswith(std_decl):
data = data[len(std_decl):].strip()
self.set_text(data)
def load_xml(self, path):
with open(path, "rb") as f:
data = f.read()
self.set_text(data)
def save(self):
if not self.path:
print("no path to save XML to")
return
self.save_xml(self.path)
def save_xml(self, path):
self.get_tree().write(self.path)
def indent_tree(elem, level=0):
i = "\n" + level*" "
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = i + " "
if not elem.tail or not elem.tail.strip():
elem.tail = i
for elem in elem:
indent_tree(elem, level+1)
if not elem.tail or not elem.tail.strip():
elem.tail = i
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i
| set_tree | identifier_name |
emberTemplates.js | module.exports = {
options: {
templateCompilerPath: 'bower_components/ember/ember-template-compiler.js',
handlebarsPath: 'bower_components/handlebars/handlebars.js',
preprocess: function (source) {
return source.replace(/\s+/g, ' ');
},
templateName: function (sourceFile) {
/*
These are how templates will be named based on their folder
structure.
components/[name].hbs ==> components/[name]
partials/[name].hbs ==> _[name]
modules/application/templates/[name].hbs ==> [name]
modules/application/partials/[name].hbs ==> _[name]
modules/[moduleName]/templates/[moduleName].hbs ==> [moduleName]
modules/[moduleName]/templates/[name].hbs ==> [moduleName]/[name]
modules/[moduleName]/partials/[name].hbs ==> [moduleName]/_[name]
Additionally any template that is nested deeper will have that
structure added as well.
modules/[moduleName]/templates/[folder1]/[folder2]/[name] ==> [moduleName]/[folder1]/[folder2]/[name]
*/
var matches = sourceFile.match(new RegExp('(?:app/modules/(.*?)/|app/)(templates|partials)?/?(.*)')),
moduleName = matches[1],
isAppModule = (moduleName === 'application'),
isPartial = (matches[2] === 'partials'),
fileName = matches[3],
prefix = (isPartial ? '_' : ''),
templateName = '';
if (moduleName && !isAppModule) |
else {
templateName = prefix + fileName;
}
console.log('Compiling ' + sourceFile.blue + ' to ' + templateName.green);
return templateName;
}
},
compile: {
files: {
'tmp/compiled-templates.js': ['templates/**/*.{hbs,handlebars}', 'app/**/*.{hbs,handlebars}']
}
}
};
| {
if (fileName === moduleName) {
templateName = moduleName;
}
else {
templateName = moduleName + '/' + prefix + fileName;
}
} | conditional_block |
emberTemplates.js | module.exports = {
options: {
templateCompilerPath: 'bower_components/ember/ember-template-compiler.js',
handlebarsPath: 'bower_components/handlebars/handlebars.js',
preprocess: function (source) {
return source.replace(/\s+/g, ' ');
},
templateName: function (sourceFile) {
/*
These are how templates will be named based on their folder
structure.
components/[name].hbs ==> components/[name]
partials/[name].hbs ==> _[name]
modules/application/templates/[name].hbs ==> [name]
modules/application/partials/[name].hbs ==> _[name]
modules/[moduleName]/templates/[moduleName].hbs ==> [moduleName]
modules/[moduleName]/templates/[name].hbs ==> [moduleName]/[name]
modules/[moduleName]/partials/[name].hbs ==> [moduleName]/_[name]
Additionally any template that is nested deeper will have that
structure added as well.
modules/[moduleName]/templates/[folder1]/[folder2]/[name] ==> [moduleName]/[folder1]/[folder2]/[name]
*/
var matches = sourceFile.match(new RegExp('(?:app/modules/(.*?)/|app/)(templates|partials)?/?(.*)')),
moduleName = matches[1],
isAppModule = (moduleName === 'application'),
isPartial = (matches[2] === 'partials'),
fileName = matches[3],
prefix = (isPartial ? '_' : ''),
templateName = '';
if (moduleName && !isAppModule) {
if (fileName === moduleName) {
templateName = moduleName;
}
else {
templateName = moduleName + '/' + prefix + fileName;
}
}
else {
templateName = prefix + fileName;
} | }
},
compile: {
files: {
'tmp/compiled-templates.js': ['templates/**/*.{hbs,handlebars}', 'app/**/*.{hbs,handlebars}']
}
}
}; |
console.log('Compiling ' + sourceFile.blue + ' to ' + templateName.green);
return templateName; | random_line_split |
test_input.py | #!/usr/bin/env python
# Copyright (C) 2010 Google Inc. All rights reserved.
# Copyright (C) 2010 Gabor Rapcsanyi (rgabor@inf.u-szeged.hu), University of Szeged
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
class TestInput(object):
"""Groups information about a test for easy passing of data."""
def __init__(self, test_name, timeout):
"""Holds the input parameters for a test.
Args:
test: name of test (not an absolute path!)
timeout: Timeout in msecs the driver should use while running the test
"""
self.test_name = test_name
self.timeout = timeout
# TestInput objects are normally constructed by the manager and passed
# to the workers, but these two fields are set lazily in the workers
# because they require us to figure out if the test is a reftest or not
# and we want to be able to do that in parallel.
self.should_run_pixel_tests = None
self.reference_files = None
def __repr__(self):
return "TestInput('%s', %d, %s, %s)" % (self.test_name, self.timeout, self.should_run_pixel_tests, self.reference_files) | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | random_line_split |
test_input.py | #!/usr/bin/env python
# Copyright (C) 2010 Google Inc. All rights reserved.
# Copyright (C) 2010 Gabor Rapcsanyi (rgabor@inf.u-szeged.hu), University of Szeged
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
class TestInput(object):
"""Groups information about a test for easy passing of data."""
def | (self, test_name, timeout):
"""Holds the input parameters for a test.
Args:
test: name of test (not an absolute path!)
timeout: Timeout in msecs the driver should use while running the test
"""
self.test_name = test_name
self.timeout = timeout
# TestInput objects are normally constructed by the manager and passed
# to the workers, but these two fields are set lazily in the workers
# because they require us to figure out if the test is a reftest or not
# and we want to be able to do that in parallel.
self.should_run_pixel_tests = None
self.reference_files = None
def __repr__(self):
return "TestInput('%s', %d, %s, %s)" % (self.test_name, self.timeout, self.should_run_pixel_tests, self.reference_files)
| __init__ | identifier_name |
test_input.py | #!/usr/bin/env python
# Copyright (C) 2010 Google Inc. All rights reserved.
# Copyright (C) 2010 Gabor Rapcsanyi (rgabor@inf.u-szeged.hu), University of Szeged
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
class TestInput(object):
"""Groups information about a test for easy passing of data."""
def __init__(self, test_name, timeout):
"""Holds the input parameters for a test.
Args:
test: name of test (not an absolute path!)
timeout: Timeout in msecs the driver should use while running the test
"""
self.test_name = test_name
self.timeout = timeout
# TestInput objects are normally constructed by the manager and passed
# to the workers, but these two fields are set lazily in the workers
# because they require us to figure out if the test is a reftest or not
# and we want to be able to do that in parallel.
self.should_run_pixel_tests = None
self.reference_files = None
def __repr__(self):
| return "TestInput('%s', %d, %s, %s)" % (self.test_name, self.timeout, self.should_run_pixel_tests, self.reference_files) | identifier_body | |
lib.rs | //
// imag - the personal information management suite for the commandline
// Copyright (C) 2015-2020 Matthias Beyer <mail@beyermatthias.de> and contributors
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; version
// 2.1 of the License.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
//
#![forbid(unsafe_code)]
#![recursion_limit="256"]
#![deny(
dead_code,
non_camel_case_types,
non_snake_case,
path_statements,
trivial_numeric_casts,
unstable_features,
unused_allocation,
unused_import_braces,
unused_imports,
unused_must_use,
unused_mut,
unused_qualifications,
while_true,
)]
extern crate itertools;
#[macro_use] extern crate log;
extern crate toml;
extern crate toml_query;
extern crate url;
extern crate sha1;
extern crate hex;
extern crate serde;
#[macro_use] extern crate serde_derive;
#[macro_use] extern crate anyhow;
#[macro_use] extern crate is_match; |
#[macro_use] extern crate libimagstore;
extern crate libimagerror;
extern crate libimagutil;
module_entry_path_mod!("links");
pub mod iter;
pub mod linkable;
pub mod link;
pub mod storecheck; |
#[cfg(test)]
extern crate env_logger; | random_line_split |
types.pre.rs | use std::{fmt, str};
#[derive(Debug)]
pub struct Machine<'a> {
pub memory: CambridgeArray<'a, u8>,
pub output: UTF8Wrapper<'a>,
#ifdef PROFILE
pub trace: ProfileShim,
#endif
}
pub struct CambridgeArray<'a, T: 'a>(pub &'a [T]); // Cambridge is Oxford's rival
pub struct UTF8Wrapper<'a>(pub &'a [u8]);
#ifdef PROFILE
pub struct | (pub fn() -> Profile);
#endif
impl<'a, T: fmt::Display> fmt::Debug for CambridgeArray<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
try!(write!(f, "["));
if self.0.len() > 0 {
for e in &self.0[..] {
try!(write!(f, " {}", e));
}
}
write!(f, " ]")
}
}
impl<'a> fmt::Debug for UTF8Wrapper<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "\n{}", try!(str::from_utf8(self.0).map_err(|_| fmt::Error)))
}
}
#ifdef PROFILE
#[derive(Debug, Default)]
pub struct Profile {
pub instructions: u32,
pub increments: u32, pub decrements: u32, pub overflows: u32, pub underflows: u32,
pub lefts: u32, pub rights: u32, pub left_grows: u32, pub right_grows: u32,
pub ins: u32, pub in_revconvs: u32, pub in_unaries: u32, pub eofs: u32,
pub outs: u32, pub out_revs: u32,
pub loops: u32, pub clears: u32,
pub noops: u32,
}
impl fmt::Debug for ProfileShim {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self.0(), f)
}
}
#endif
| ProfileShim | identifier_name |
types.pre.rs | use std::{fmt, str};
#[derive(Debug)]
pub struct Machine<'a> {
pub memory: CambridgeArray<'a, u8>,
pub output: UTF8Wrapper<'a>,
#ifdef PROFILE
pub trace: ProfileShim,
#endif
}
pub struct CambridgeArray<'a, T: 'a>(pub &'a [T]); // Cambridge is Oxford's rival
pub struct UTF8Wrapper<'a>(pub &'a [u8]);
#ifdef PROFILE
pub struct ProfileShim(pub fn() -> Profile);
#endif
impl<'a, T: fmt::Display> fmt::Debug for CambridgeArray<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
try!(write!(f, "["));
if self.0.len() > 0 |
write!(f, " ]")
}
}
impl<'a> fmt::Debug for UTF8Wrapper<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "\n{}", try!(str::from_utf8(self.0).map_err(|_| fmt::Error)))
}
}
#ifdef PROFILE
#[derive(Debug, Default)]
pub struct Profile {
pub instructions: u32,
pub increments: u32, pub decrements: u32, pub overflows: u32, pub underflows: u32,
pub lefts: u32, pub rights: u32, pub left_grows: u32, pub right_grows: u32,
pub ins: u32, pub in_revconvs: u32, pub in_unaries: u32, pub eofs: u32,
pub outs: u32, pub out_revs: u32,
pub loops: u32, pub clears: u32,
pub noops: u32,
}
impl fmt::Debug for ProfileShim {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self.0(), f)
}
}
#endif
| {
for e in &self.0[..] {
try!(write!(f, " {}", e));
}
} | conditional_block |
types.pre.rs | use std::{fmt, str};
#[derive(Debug)]
pub struct Machine<'a> {
pub memory: CambridgeArray<'a, u8>,
pub output: UTF8Wrapper<'a>,
#ifdef PROFILE
pub trace: ProfileShim,
#endif
}
pub struct CambridgeArray<'a, T: 'a>(pub &'a [T]); // Cambridge is Oxford's rival
pub struct UTF8Wrapper<'a>(pub &'a [u8]);
#ifdef PROFILE
pub struct ProfileShim(pub fn() -> Profile);
#endif
impl<'a, T: fmt::Display> fmt::Debug for CambridgeArray<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
try!(write!(f, "["));
if self.0.len() > 0 {
for e in &self.0[..] {
try!(write!(f, " {}", e));
}
}
write!(f, " ]")
}
}
impl<'a> fmt::Debug for UTF8Wrapper<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result |
}
#ifdef PROFILE
#[derive(Debug, Default)]
pub struct Profile {
pub instructions: u32,
pub increments: u32, pub decrements: u32, pub overflows: u32, pub underflows: u32,
pub lefts: u32, pub rights: u32, pub left_grows: u32, pub right_grows: u32,
pub ins: u32, pub in_revconvs: u32, pub in_unaries: u32, pub eofs: u32,
pub outs: u32, pub out_revs: u32,
pub loops: u32, pub clears: u32,
pub noops: u32,
}
impl fmt::Debug for ProfileShim {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self.0(), f)
}
}
#endif
| {
write!(f, "\n{}", try!(str::from_utf8(self.0).map_err(|_| fmt::Error)))
} | identifier_body |
types.pre.rs | use std::{fmt, str};
#[derive(Debug)]
pub struct Machine<'a> {
pub memory: CambridgeArray<'a, u8>,
pub output: UTF8Wrapper<'a>,
#ifdef PROFILE
pub trace: ProfileShim,
#endif
}
| #ifdef PROFILE
pub struct ProfileShim(pub fn() -> Profile);
#endif
impl<'a, T: fmt::Display> fmt::Debug for CambridgeArray<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
try!(write!(f, "["));
if self.0.len() > 0 {
for e in &self.0[..] {
try!(write!(f, " {}", e));
}
}
write!(f, " ]")
}
}
impl<'a> fmt::Debug for UTF8Wrapper<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "\n{}", try!(str::from_utf8(self.0).map_err(|_| fmt::Error)))
}
}
#ifdef PROFILE
#[derive(Debug, Default)]
pub struct Profile {
pub instructions: u32,
pub increments: u32, pub decrements: u32, pub overflows: u32, pub underflows: u32,
pub lefts: u32, pub rights: u32, pub left_grows: u32, pub right_grows: u32,
pub ins: u32, pub in_revconvs: u32, pub in_unaries: u32, pub eofs: u32,
pub outs: u32, pub out_revs: u32,
pub loops: u32, pub clears: u32,
pub noops: u32,
}
impl fmt::Debug for ProfileShim {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self.0(), f)
}
}
#endif | pub struct CambridgeArray<'a, T: 'a>(pub &'a [T]); // Cambridge is Oxford's rival
pub struct UTF8Wrapper<'a>(pub &'a [u8]); | random_line_split |
Input.d.ts | type InputType =
| 'text'
| 'email'
| 'select'
| 'file'
| 'radio'
| 'checkbox'
| 'textarea'
| 'button'
| 'reset'
| 'submit'
| 'date'
| 'datetime-local'
| 'hidden'
| 'image'
| 'month'
| 'number'
| 'range'
| 'search'
| 'tel'
| 'url'
| 'week'
| 'password'
| 'datetime'
| 'time'
| 'color';
// Intermediate interface to "redefine" the type of size to string
// size:number => size:any => size:string
interface Intermediate extends React.ChangeTargetHTMLProps<HTMLInputElement> {
size?: any;
}
interface InputProps extends Intermediate {
type?: InputType;
size?: string;
state?: string;
tag?: React.ReactType;
addon?: boolean;
className?: string;
// We don't have the property 'static' here because 'static' is a reserved keyword in TypeScript
// Maybe reactstrap will support an 'isStatic' alias in the future
} | declare var Input: React.StatelessComponent<InputProps>;
export default Input; | random_line_split | |
base.py | import numpy as np
from tfs.core.util import run_once_for_each_obj
from tfs.core.initializer import DefaultInit
from tfs.core.loss import DefaultLoss
from tfs.core.regularizers import DefaultRegularizer
from tfs.core.monitor import DefaultMonitor
from tfs.core.optimizer import DefaultOptimizer
from tfs.core.layer import func_table,Layer
from tfs.core.elem import Component
from tfs.core.layer import ops
import pickle
import tensorflow as tf
from tensorflow.python.util.deprecation import deprecated
from tensorflow.python.client import device_lib
from sklearn import metrics
# for supporting multi-gpu:
# https://github.com/tensorflow/tensorflow/blob/r0.7/tensorflow/models/image/cifar10/cifar10_multi_gpu_train.py#L174
#
# we use shared variables on CPU and model distributed on each GPU
from tfs.network.net_struct import NetStructure
#################### Network
# decorators
def with_graph(f):
def with_graph_run(self,*args,**kwargs):
with self.graph.as_default():
return f(self,*args,**kwargs)
# this is important to make the decorator compatiable with run_once_each_obj.
with_graph_run.__name__=f.__name__
return with_graph_run
class Network(object):
__hash__=object.__hash__
def __init__(self):
self._init_graph_sess()
self._struct = NetStructure(self)
self._true_out=None
self._in = None
self._out = None
self._loss=None
self.variables = {}
self.initializer = DefaultInit(self)
self.losser = DefaultLoss(self)
self.regularizer =DefaultRegularizer(self)
self.monitor = {}
self.monitor['default']=DefaultMonitor(self)
self._optimizer = DefaultOptimizer(self)
# this must be set when define a network
self.loss_input_layer_name = None
self._regulization=None
self.grads = None
self._train_op = None
self.num_gpu = 0
self.i_step = 0
self.n_epoch = 0
self._dtype = None
def to_pickle(self):
return [
self.in_shape,
self.loss_input_layer_name,
self.optimizer.to_pickle(),
self.losser.to_pickle(),
self.regularizer.to_pickle()
]
def restore(self,objs):
inshape = objs[0]
self.loss_input_layer_name = objs[1]
self.optimizer = Component.restore(objs[2],self)
self.losser = Component.restore(objs[3],self)
self.regularizer = Component.restore(objs[4],self)
if inshape:
self.build(inshape)
def _init_graph_sess(self):
self._graph = tf.Graph()
with self.graph.as_default():
self._sess = tf.Session()
@property
def optimizer(self):
return self._optimizer
@optimizer.setter
def optimizer(self,opt):
self.grads=None
self._optimizer=opt
def add_monitor(self,name,monitor):
self.monitor[name] = monitor
@staticmethod
def available_devices():
local_device_protos = device_lib.list_local_devices()
return [x for x in local_device_protos]
def __len__(self):
return len(self.net_def)
@property
@deprecated("2017-05-01", "Use `net_def` instead.")
def layers(self):
return self._struct
@property
def nodes(self):
return self._struct
@property
def net_def(self):
return self._struct
def node_to_index(self,l):
return self.net_def.find_index(l)
def node_by_index(self,idx):
return self.net_def[idx]
@deprecated("2017-05-01", "Use `node_by_name` instead.")
def layer_by_name(self,name):
return self.net_def.by_name(name)
def node_by_name(self,name):
return self.net_def.by_name(name)
def __del__(self):
self.sess.close()
def setup(self):
'''Construct the network. '''
raise NotImplementedError('Must be implemented by the subclass.')
def setup_with_def(self,struct_def,in_shape=None):
if isinstance(struct_def,list):
struct_def = NetStructure(self,nodes=struct_def)
self._struct = struct_def.copy_to(self)
if in_shape:
self.build(in_shape)
@property
def graph(self):
return self._graph
@property
def input(self):
return self._in
@property
def output(self):
return self._out
@property
def true_output(self):
return self._true_out
@property
def sess(self):
return self._sess
def _init_in_out_size(self):
if self.num_gpu and self._in is None and self._out is None:
self._in = [None]*self.num_gpu
self._out = [None]*self.num_gpu
self._true_out = [None]*self.num_gpu
self._loss = [None]*self.num_gpu
def tf_graph_str(self):
info=[]
for n in self.graph.as_graph_def().node:
s = '%-20s@%20s'%(n.name,n.device)
if hasattr(n,'tfs_nodename'):
s=s+' --%s'%n.tfs_nodename
info.append(s)
return '\n'.join(info)
@with_graph
@run_once_for_each_obj
def build(self,input_shape,dtype=tf.float32):
self._dtype = dtype
"""Build the computational graph
inTensor: the network input tensor.
"""
if not self.num_gpu:
self._build(input_shape,dtype)
else:
tower_grads = []
for i in range(self.num_gpu):
with tf.device('/gpu:%d' % i):
with tf.name_scope('%s_%d' % ('GPU', i)) as scope:
self._build(input_shape,dtype,i)
tf.get_variable_scope().reuse_variables()
_loss = self.loss[i]
tower_grads.append(_grad)
self.build_variables_table()
self._initialize()
self.compute_gradients()
return self.output
def compute_gradients(self):
if self.loss is None:
return
if not self.num_gpu:
self.grads = self.optimizer.compute_gradients(self.loss,self.variables)
else:
tower_grads = []
for i in range(self.num_gpu):
with tf.device('/gpu:%d' % i):
with tf.name_scope('%s_%d' % ('GPU', i)) as scope:
tf.get_variable_scope().reuse_variables()
_loss = self.loss[i]
_grad = self.optimizer.compute_gradients(_loss,self.variables.values())
tower_grads.append(_grad)
self.grads = self.average_gradients(tower_grads)
def average_gradients(self,tower_grads):
average_grads = []
for grad_and_vars in zip(*tower_grads):
# Note that each grad_and_vars looks like the following:
# ((grad0_gpu0, var0_gpu0), ... , (grad0_gpuN, var0_gpuN))
grads = []
for g, _ in grad_and_vars:
expanded_g = tf.expand_dims(g, 0)
grads.append(expanded_g)
grad = tf.concat(axis=0, values=grads)
grad = tf.reduce_mean(grad, 0)
v = grad_and_vars[0][1]
grad_and_var = (grad, v)
average_grads.append(grad_and_var)
return average_grads
# this function is called only in build() under current graph.
def _build(self,input_shape,dtype,idx=None):
self._init_in_out_size()
tmp = tf.placeholder(dtype,input_shape)
if idx is None:
self._in = tmp
else:
self._in[idx] = tmp
for l in self.net_def:
tmp = l.build(tmp,idx)
if idx is None:
self._out = tmp
output_shape=self._out.get_shape().as_list()
output_dtype=self._out.dtype
self._true_out=tf.placeholder(dtype=output_dtype,shape=output_shape)
self._loss = self._compute_loss(idx)
else:
self._out[idx] = tmp
output_shape=self._out[idx].get_shape().as_list()
output_dtype=self._out[idx].dtype
self._true_out[i]=tf.placeholder(dtype=output_dtype,shape=output_shape)
self._loss[idx] = self._compute_loss(idx)
return self
def _initialize(self):
self.run_initor(self.initializer)
def _compute_loss(self,idx):
loss = self.losser.compute(idx)
if loss is None:
return loss
return loss + self.regularizer.compute()
@property
def loss(self):
return self._loss
def build_variables_table(self):
for l in self.net_def:
for k in l.variables:
v = l.variables[k]
self.variables[v.name] = v
def has_built(self):
if hasattr(self,'_has_run'):
if Network.build.__name__ in self._has_run:
return True
return False
def fit(self,dataset,batch_size,n_epoch,
shuffle_epoch=True,max_step=10000000):
if dataset.train.labels.shape[-1] != self.out_shape[-1]:
dataset = dataset.to_one_hot()
train_set = dataset.train
test_set = dataset.test
train_set.before_iter()
self.i_step = 0
self.n_epoch = 0
while True:
self.i_step += 1
self.n_epoch = train_set.epochs_completed
X,y = train_set.next_batch(batch_size,shuffle=shuffle_epoch)
self.step(X,y,self.i_step)
for v in self.monitor.values():
v.status(train_set,test_set,self.i_step,self.n_epoch)
if self.n_epoch>=n_epoch:
break
if self.i_step >= max_step:
break
return self
@property
def train_op(self):
if self._train_op is None:
self._train_op = self._get_train_op()
return self._train_op
@with_graph
def _get_train_op(self,step=None):
if self.loss is None:
return None
if self.grads is None:
self.compute_gradients()
op = self.optimizer.apply_gradients(self.grads,step)
# initialize the uninitalized variable (the optimizer would introduce
# uninitalized variable)
vars = self.optimizer.variables
self.run(tf.variables_initializer(vars.values()))
return op
def | (self,X,y,step):
self.run(self.train_op,feed_dict={self.input:X,self.true_output:y})
def predict(self,X):
if self.num_gpu==0:
_in = self.input
_out = self.output
else:
_in = self.input[0]
_out = self.output[0]
return self.run(_out,feed_dict={_in:X})
def eval_node_input(self,node,X):
_in = self.input
if isinstance(node,str):
_out = self.node_by_name(node).input
else:
_out = node.input
return self.run(_out,feed_dict={_in:X})
def eval_node(self,node,X):
_in = self.input
if isinstance(node,str):
_out = self.node_by_name(node).output
else:
_out = node.output
return self.run(_out,feed_dict={_in:X})
def function(self,input_tensors,output_tensors):
def _func(input_vals):
feed = {t:v in zip(input_vals,input_tensors)}
return self.run(output_tensors,feed_dict=feed)
return _func
def score(self,datasubset):
y_pred = self.predict(datasubset.data)
y_pred = np.argmax(y_pred,1)
y_true = datasubset.labels
y_true = np.argmax(y_true,1)
return metrics.accuracy_score(y_true,y_pred)
def measure_loss(self,X,y):
if self.num_gpu==0:
_in = self.input
_true_out = self.true_output
_loss = self.loss
else:
_in = self.input[0]
_true_out = self.true_output[0]
_loss = self.loss[0]
return self.run(_loss,feed_dict={_in:X,_true_out:y})
def run(self,eval_list,feed_dict=None):
return self.sess.run(eval_list, feed_dict=feed_dict)
def run_initor(self,initor):
op = initor.compute()
return self.sess.run(op)
def save(self,filename):
self.save_def(filename)
to_save={}
for k,v in self.variables.items():
to_save[k]=self.run(v)
f=open(filename+'.model','wb')
pickle.dump(to_save,f)
f.close()
def save_def(self,filename):
self.net_def.save(filename+'.modeldef')
def load(self,filename):
self._init_graph_sess()
self.load_def(filename)
f=open(filename+'.model','rb')
data_dict=pickle.load(f)
f.close()
if self.has_built():
with self._graph.as_default():
op = self.initializer.op_by_value_table(data_dict)
self.run(op)
def load_def(self,filename):
self.net_def.load(filename+'.modeldef')
@property
def in_shape(self):
if self._in is not None:
if self.num_gpu==0:
return self._in.get_shape().as_list()
else:
return self._in[0].get_shape().as_list()
return None
@property
def dtype(self):
return self._dtype
@property
def out_shape(self):
if self._out is not None:
if self.num_gpu==0:
return self._out.get_shape().as_list()
else:
return self._out[0].get_shape().as_list()
return None
def copy(self):
obj = Network()
obj.loss_input_layer_name = self.loss_input_layer_name
obj.setup_with_def(self.net_def,self.in_shape)
return obj
def __str__(self):
return '\n'.join([str(l) for l in self.nodes])
def print_shape(self):
for l in self.nodes:
print('%-20s %20s %s %-20s'%(
l.name,
l.input.get_shape(),
'->',
l.output.get_shape()))
def subnet(self,begin_index,end_index):
obj = Network()
obj.setup_with_def(self.layers[begin_index:end_index])
return obj
def supported_layers(self):
return func_table.keys()
def conv2d(self,
ksize,
knum,
strides,
activation=ops.relu,
padding='SAME',
group=1,
biased=True,
name=None):
self.net_def.append(
func_table['conv2d'](
self,ksize,knum,strides,activation,padding,group,biased,name
))
return self
def fc(self,
outdim,
activation = ops.relu,
name=None):
self.net_def.append(
func_table['fc'](
self,outdim,activation,name
))
return self
def dropout(self,
keep_prob,
name=None):
self.net_def.append(
func_table['dropout'](
self,keep_prob,name
))
return self
def lrn(self,
radius,
alpha,
beta,
bias=1.0,
name=None):
self.net_def.append(
func_table['lrn'](
self,radius,alpha,beta,bias,name
))
return self
def bn(self,
scale_offset=True,
activation=ops.relu,
name=None):
self.net_def.append(
func_table['bn'](
self,scale_offset,activation,name
))
return self
def softmax(self,
name=None):
self.net_def.append(
func_table['softmax'](
self,name
))
return self
def maxpool(self,
ksize,
strides,
padding='SAME',
name=None):
self.net_def.append(
func_table['maxpool'](
self,ksize,strides,padding,name
))
return self
def avgpool(self,
ksize,
strides,
padding='SAME',
name=None):
self.net_def.append(
func_table['avgpool'](
self,ksize,strides,padding,name
))
return self
class CustomNetwork(Network):
"""Automatically called setup and build when construct
"""
def __init__(self):
Network.__init__(self)
self.default_in_shape = None
self.setup()
in_shape = self.default_in_shape
if not in_shape:
raise ValueError("must sepecify the default_in_shape attributes, or pass the shape as an argument when construction")
def setup(self):
raise NotImplementedError("CustomNetwork Must Implement setup Method")
def build(self,inshape=None):
inshape = inshape or self.default_in_shape
return Network.build(self,inshape)
| step | identifier_name |
base.py | import numpy as np
from tfs.core.util import run_once_for_each_obj
from tfs.core.initializer import DefaultInit
from tfs.core.loss import DefaultLoss
from tfs.core.regularizers import DefaultRegularizer
from tfs.core.monitor import DefaultMonitor
from tfs.core.optimizer import DefaultOptimizer
from tfs.core.layer import func_table,Layer
from tfs.core.elem import Component
from tfs.core.layer import ops
import pickle
import tensorflow as tf
from tensorflow.python.util.deprecation import deprecated
from tensorflow.python.client import device_lib
from sklearn import metrics
# for supporting multi-gpu:
# https://github.com/tensorflow/tensorflow/blob/r0.7/tensorflow/models/image/cifar10/cifar10_multi_gpu_train.py#L174
#
# we use shared variables on CPU and model distributed on each GPU
from tfs.network.net_struct import NetStructure
#################### Network
# decorators
def with_graph(f):
def with_graph_run(self,*args,**kwargs):
with self.graph.as_default():
return f(self,*args,**kwargs)
# this is important to make the decorator compatiable with run_once_each_obj.
with_graph_run.__name__=f.__name__
return with_graph_run
class Network(object):
__hash__=object.__hash__
def __init__(self):
self._init_graph_sess()
self._struct = NetStructure(self)
self._true_out=None
self._in = None
self._out = None
self._loss=None
self.variables = {}
self.initializer = DefaultInit(self)
self.losser = DefaultLoss(self)
self.regularizer =DefaultRegularizer(self)
self.monitor = {}
self.monitor['default']=DefaultMonitor(self)
self._optimizer = DefaultOptimizer(self)
# this must be set when define a network
self.loss_input_layer_name = None
self._regulization=None
self.grads = None
self._train_op = None
self.num_gpu = 0
self.i_step = 0
self.n_epoch = 0
self._dtype = None
def to_pickle(self):
return [
self.in_shape,
self.loss_input_layer_name,
self.optimizer.to_pickle(),
self.losser.to_pickle(),
self.regularizer.to_pickle()
]
def restore(self,objs):
inshape = objs[0]
self.loss_input_layer_name = objs[1]
self.optimizer = Component.restore(objs[2],self)
self.losser = Component.restore(objs[3],self)
self.regularizer = Component.restore(objs[4],self)
if inshape:
self.build(inshape)
def _init_graph_sess(self):
self._graph = tf.Graph()
with self.graph.as_default():
self._sess = tf.Session()
@property
def optimizer(self):
return self._optimizer
@optimizer.setter
def optimizer(self,opt):
self.grads=None
self._optimizer=opt
def add_monitor(self,name,monitor):
self.monitor[name] = monitor
@staticmethod
def available_devices():
local_device_protos = device_lib.list_local_devices()
return [x for x in local_device_protos]
def __len__(self):
return len(self.net_def)
@property
@deprecated("2017-05-01", "Use `net_def` instead.")
def layers(self):
return self._struct
@property
def nodes(self):
return self._struct
@property
def net_def(self):
return self._struct
def node_to_index(self,l):
return self.net_def.find_index(l)
def node_by_index(self,idx):
return self.net_def[idx]
@deprecated("2017-05-01", "Use `node_by_name` instead.")
def layer_by_name(self,name):
return self.net_def.by_name(name)
def node_by_name(self,name):
return self.net_def.by_name(name)
def __del__(self):
self.sess.close()
def setup(self):
'''Construct the network. '''
raise NotImplementedError('Must be implemented by the subclass.')
def setup_with_def(self,struct_def,in_shape=None):
if isinstance(struct_def,list):
struct_def = NetStructure(self,nodes=struct_def)
self._struct = struct_def.copy_to(self)
if in_shape:
self.build(in_shape)
@property
def graph(self):
return self._graph
@property
def input(self):
return self._in
@property
def output(self):
return self._out
@property
def true_output(self):
return self._true_out
@property
def sess(self):
return self._sess
def _init_in_out_size(self):
if self.num_gpu and self._in is None and self._out is None:
self._in = [None]*self.num_gpu
self._out = [None]*self.num_gpu
self._true_out = [None]*self.num_gpu
self._loss = [None]*self.num_gpu
def tf_graph_str(self):
info=[]
for n in self.graph.as_graph_def().node:
s = '%-20s@%20s'%(n.name,n.device)
if hasattr(n,'tfs_nodename'):
s=s+' --%s'%n.tfs_nodename
info.append(s)
return '\n'.join(info)
@with_graph
@run_once_for_each_obj
def build(self,input_shape,dtype=tf.float32):
self._dtype = dtype
"""Build the computational graph
inTensor: the network input tensor.
"""
if not self.num_gpu:
self._build(input_shape,dtype)
else:
tower_grads = []
for i in range(self.num_gpu):
with tf.device('/gpu:%d' % i):
with tf.name_scope('%s_%d' % ('GPU', i)) as scope:
self._build(input_shape,dtype,i)
tf.get_variable_scope().reuse_variables()
_loss = self.loss[i]
tower_grads.append(_grad)
self.build_variables_table()
self._initialize()
self.compute_gradients()
return self.output
def compute_gradients(self):
if self.loss is None:
return
if not self.num_gpu:
self.grads = self.optimizer.compute_gradients(self.loss,self.variables)
else:
tower_grads = []
for i in range(self.num_gpu):
with tf.device('/gpu:%d' % i):
with tf.name_scope('%s_%d' % ('GPU', i)) as scope:
tf.get_variable_scope().reuse_variables()
_loss = self.loss[i]
_grad = self.optimizer.compute_gradients(_loss,self.variables.values())
tower_grads.append(_grad)
self.grads = self.average_gradients(tower_grads)
def average_gradients(self,tower_grads):
average_grads = []
for grad_and_vars in zip(*tower_grads):
# Note that each grad_and_vars looks like the following:
# ((grad0_gpu0, var0_gpu0), ... , (grad0_gpuN, var0_gpuN))
grads = []
for g, _ in grad_and_vars:
expanded_g = tf.expand_dims(g, 0)
grads.append(expanded_g)
grad = tf.concat(axis=0, values=grads)
grad = tf.reduce_mean(grad, 0)
v = grad_and_vars[0][1]
grad_and_var = (grad, v)
average_grads.append(grad_and_var)
return average_grads
# this function is called only in build() under current graph.
def _build(self,input_shape,dtype,idx=None):
self._init_in_out_size()
tmp = tf.placeholder(dtype,input_shape)
if idx is None:
self._in = tmp
else:
self._in[idx] = tmp
for l in self.net_def:
tmp = l.build(tmp,idx)
if idx is None:
self._out = tmp
output_shape=self._out.get_shape().as_list()
output_dtype=self._out.dtype
self._true_out=tf.placeholder(dtype=output_dtype,shape=output_shape)
self._loss = self._compute_loss(idx)
else:
self._out[idx] = tmp
output_shape=self._out[idx].get_shape().as_list()
output_dtype=self._out[idx].dtype
self._true_out[i]=tf.placeholder(dtype=output_dtype,shape=output_shape)
self._loss[idx] = self._compute_loss(idx)
return self
def _initialize(self):
self.run_initor(self.initializer)
def _compute_loss(self,idx):
loss = self.losser.compute(idx)
if loss is None:
return loss
return loss + self.regularizer.compute()
@property
def loss(self):
return self._loss
def build_variables_table(self):
for l in self.net_def:
for k in l.variables:
v = l.variables[k]
self.variables[v.name] = v
def has_built(self):
if hasattr(self,'_has_run'):
if Network.build.__name__ in self._has_run:
return True
return False
def fit(self,dataset,batch_size,n_epoch,
shuffle_epoch=True,max_step=10000000):
if dataset.train.labels.shape[-1] != self.out_shape[-1]:
dataset = dataset.to_one_hot()
train_set = dataset.train
test_set = dataset.test
train_set.before_iter()
self.i_step = 0
self.n_epoch = 0
while True:
self.i_step += 1
self.n_epoch = train_set.epochs_completed
X,y = train_set.next_batch(batch_size,shuffle=shuffle_epoch)
self.step(X,y,self.i_step)
for v in self.monitor.values():
v.status(train_set,test_set,self.i_step,self.n_epoch)
if self.n_epoch>=n_epoch:
break
if self.i_step >= max_step:
break
return self
@property
def train_op(self):
if self._train_op is None:
self._train_op = self._get_train_op()
return self._train_op
@with_graph
def _get_train_op(self,step=None):
if self.loss is None:
return None
if self.grads is None:
self.compute_gradients()
op = self.optimizer.apply_gradients(self.grads,step)
# initialize the uninitalized variable (the optimizer would introduce
# uninitalized variable)
vars = self.optimizer.variables
self.run(tf.variables_initializer(vars.values()))
return op
def step(self,X,y,step):
self.run(self.train_op,feed_dict={self.input:X,self.true_output:y})
def predict(self,X):
if self.num_gpu==0:
_in = self.input
_out = self.output
else:
_in = self.input[0]
_out = self.output[0]
return self.run(_out,feed_dict={_in:X})
def eval_node_input(self,node,X):
_in = self.input
if isinstance(node,str):
_out = self.node_by_name(node).input
else:
_out = node.input
return self.run(_out,feed_dict={_in:X})
def eval_node(self,node,X):
_in = self.input
if isinstance(node,str):
_out = self.node_by_name(node).output
else:
_out = node.output
return self.run(_out,feed_dict={_in:X}) | def _func(input_vals):
feed = {t:v in zip(input_vals,input_tensors)}
return self.run(output_tensors,feed_dict=feed)
return _func
def score(self,datasubset):
y_pred = self.predict(datasubset.data)
y_pred = np.argmax(y_pred,1)
y_true = datasubset.labels
y_true = np.argmax(y_true,1)
return metrics.accuracy_score(y_true,y_pred)
def measure_loss(self,X,y):
if self.num_gpu==0:
_in = self.input
_true_out = self.true_output
_loss = self.loss
else:
_in = self.input[0]
_true_out = self.true_output[0]
_loss = self.loss[0]
return self.run(_loss,feed_dict={_in:X,_true_out:y})
def run(self,eval_list,feed_dict=None):
return self.sess.run(eval_list, feed_dict=feed_dict)
def run_initor(self,initor):
op = initor.compute()
return self.sess.run(op)
def save(self,filename):
self.save_def(filename)
to_save={}
for k,v in self.variables.items():
to_save[k]=self.run(v)
f=open(filename+'.model','wb')
pickle.dump(to_save,f)
f.close()
def save_def(self,filename):
self.net_def.save(filename+'.modeldef')
def load(self,filename):
self._init_graph_sess()
self.load_def(filename)
f=open(filename+'.model','rb')
data_dict=pickle.load(f)
f.close()
if self.has_built():
with self._graph.as_default():
op = self.initializer.op_by_value_table(data_dict)
self.run(op)
def load_def(self,filename):
self.net_def.load(filename+'.modeldef')
@property
def in_shape(self):
if self._in is not None:
if self.num_gpu==0:
return self._in.get_shape().as_list()
else:
return self._in[0].get_shape().as_list()
return None
@property
def dtype(self):
return self._dtype
@property
def out_shape(self):
if self._out is not None:
if self.num_gpu==0:
return self._out.get_shape().as_list()
else:
return self._out[0].get_shape().as_list()
return None
def copy(self):
obj = Network()
obj.loss_input_layer_name = self.loss_input_layer_name
obj.setup_with_def(self.net_def,self.in_shape)
return obj
def __str__(self):
return '\n'.join([str(l) for l in self.nodes])
def print_shape(self):
for l in self.nodes:
print('%-20s %20s %s %-20s'%(
l.name,
l.input.get_shape(),
'->',
l.output.get_shape()))
def subnet(self,begin_index,end_index):
obj = Network()
obj.setup_with_def(self.layers[begin_index:end_index])
return obj
def supported_layers(self):
return func_table.keys()
def conv2d(self,
ksize,
knum,
strides,
activation=ops.relu,
padding='SAME',
group=1,
biased=True,
name=None):
self.net_def.append(
func_table['conv2d'](
self,ksize,knum,strides,activation,padding,group,biased,name
))
return self
def fc(self,
outdim,
activation = ops.relu,
name=None):
self.net_def.append(
func_table['fc'](
self,outdim,activation,name
))
return self
def dropout(self,
keep_prob,
name=None):
self.net_def.append(
func_table['dropout'](
self,keep_prob,name
))
return self
def lrn(self,
radius,
alpha,
beta,
bias=1.0,
name=None):
self.net_def.append(
func_table['lrn'](
self,radius,alpha,beta,bias,name
))
return self
def bn(self,
scale_offset=True,
activation=ops.relu,
name=None):
self.net_def.append(
func_table['bn'](
self,scale_offset,activation,name
))
return self
def softmax(self,
name=None):
self.net_def.append(
func_table['softmax'](
self,name
))
return self
def maxpool(self,
ksize,
strides,
padding='SAME',
name=None):
self.net_def.append(
func_table['maxpool'](
self,ksize,strides,padding,name
))
return self
def avgpool(self,
ksize,
strides,
padding='SAME',
name=None):
self.net_def.append(
func_table['avgpool'](
self,ksize,strides,padding,name
))
return self
class CustomNetwork(Network):
"""Automatically called setup and build when construct
"""
def __init__(self):
Network.__init__(self)
self.default_in_shape = None
self.setup()
in_shape = self.default_in_shape
if not in_shape:
raise ValueError("must sepecify the default_in_shape attributes, or pass the shape as an argument when construction")
def setup(self):
raise NotImplementedError("CustomNetwork Must Implement setup Method")
def build(self,inshape=None):
inshape = inshape or self.default_in_shape
return Network.build(self,inshape) |
def function(self,input_tensors,output_tensors): | random_line_split |
base.py | import numpy as np
from tfs.core.util import run_once_for_each_obj
from tfs.core.initializer import DefaultInit
from tfs.core.loss import DefaultLoss
from tfs.core.regularizers import DefaultRegularizer
from tfs.core.monitor import DefaultMonitor
from tfs.core.optimizer import DefaultOptimizer
from tfs.core.layer import func_table,Layer
from tfs.core.elem import Component
from tfs.core.layer import ops
import pickle
import tensorflow as tf
from tensorflow.python.util.deprecation import deprecated
from tensorflow.python.client import device_lib
from sklearn import metrics
# for supporting multi-gpu:
# https://github.com/tensorflow/tensorflow/blob/r0.7/tensorflow/models/image/cifar10/cifar10_multi_gpu_train.py#L174
#
# we use shared variables on CPU and model distributed on each GPU
from tfs.network.net_struct import NetStructure
#################### Network
# decorators
def with_graph(f):
def with_graph_run(self,*args,**kwargs):
with self.graph.as_default():
return f(self,*args,**kwargs)
# this is important to make the decorator compatiable with run_once_each_obj.
with_graph_run.__name__=f.__name__
return with_graph_run
class Network(object):
__hash__=object.__hash__
def __init__(self):
self._init_graph_sess()
self._struct = NetStructure(self)
self._true_out=None
self._in = None
self._out = None
self._loss=None
self.variables = {}
self.initializer = DefaultInit(self)
self.losser = DefaultLoss(self)
self.regularizer =DefaultRegularizer(self)
self.monitor = {}
self.monitor['default']=DefaultMonitor(self)
self._optimizer = DefaultOptimizer(self)
# this must be set when define a network
self.loss_input_layer_name = None
self._regulization=None
self.grads = None
self._train_op = None
self.num_gpu = 0
self.i_step = 0
self.n_epoch = 0
self._dtype = None
def to_pickle(self):
return [
self.in_shape,
self.loss_input_layer_name,
self.optimizer.to_pickle(),
self.losser.to_pickle(),
self.regularizer.to_pickle()
]
def restore(self,objs):
inshape = objs[0]
self.loss_input_layer_name = objs[1]
self.optimizer = Component.restore(objs[2],self)
self.losser = Component.restore(objs[3],self)
self.regularizer = Component.restore(objs[4],self)
if inshape:
self.build(inshape)
def _init_graph_sess(self):
self._graph = tf.Graph()
with self.graph.as_default():
self._sess = tf.Session()
@property
def optimizer(self):
return self._optimizer
@optimizer.setter
def optimizer(self,opt):
self.grads=None
self._optimizer=opt
def add_monitor(self,name,monitor):
self.monitor[name] = monitor
@staticmethod
def available_devices():
local_device_protos = device_lib.list_local_devices()
return [x for x in local_device_protos]
def __len__(self):
return len(self.net_def)
@property
@deprecated("2017-05-01", "Use `net_def` instead.")
def layers(self):
return self._struct
@property
def nodes(self):
return self._struct
@property
def net_def(self):
return self._struct
def node_to_index(self,l):
return self.net_def.find_index(l)
def node_by_index(self,idx):
return self.net_def[idx]
@deprecated("2017-05-01", "Use `node_by_name` instead.")
def layer_by_name(self,name):
return self.net_def.by_name(name)
def node_by_name(self,name):
return self.net_def.by_name(name)
def __del__(self):
self.sess.close()
def setup(self):
'''Construct the network. '''
raise NotImplementedError('Must be implemented by the subclass.')
def setup_with_def(self,struct_def,in_shape=None):
if isinstance(struct_def,list):
struct_def = NetStructure(self,nodes=struct_def)
self._struct = struct_def.copy_to(self)
if in_shape:
self.build(in_shape)
@property
def graph(self):
return self._graph
@property
def input(self):
return self._in
@property
def output(self):
return self._out
@property
def true_output(self):
return self._true_out
@property
def sess(self):
return self._sess
def _init_in_out_size(self):
if self.num_gpu and self._in is None and self._out is None:
self._in = [None]*self.num_gpu
self._out = [None]*self.num_gpu
self._true_out = [None]*self.num_gpu
self._loss = [None]*self.num_gpu
def tf_graph_str(self):
info=[]
for n in self.graph.as_graph_def().node:
|
return '\n'.join(info)
@with_graph
@run_once_for_each_obj
def build(self,input_shape,dtype=tf.float32):
self._dtype = dtype
"""Build the computational graph
inTensor: the network input tensor.
"""
if not self.num_gpu:
self._build(input_shape,dtype)
else:
tower_grads = []
for i in range(self.num_gpu):
with tf.device('/gpu:%d' % i):
with tf.name_scope('%s_%d' % ('GPU', i)) as scope:
self._build(input_shape,dtype,i)
tf.get_variable_scope().reuse_variables()
_loss = self.loss[i]
tower_grads.append(_grad)
self.build_variables_table()
self._initialize()
self.compute_gradients()
return self.output
def compute_gradients(self):
if self.loss is None:
return
if not self.num_gpu:
self.grads = self.optimizer.compute_gradients(self.loss,self.variables)
else:
tower_grads = []
for i in range(self.num_gpu):
with tf.device('/gpu:%d' % i):
with tf.name_scope('%s_%d' % ('GPU', i)) as scope:
tf.get_variable_scope().reuse_variables()
_loss = self.loss[i]
_grad = self.optimizer.compute_gradients(_loss,self.variables.values())
tower_grads.append(_grad)
self.grads = self.average_gradients(tower_grads)
def average_gradients(self,tower_grads):
average_grads = []
for grad_and_vars in zip(*tower_grads):
# Note that each grad_and_vars looks like the following:
# ((grad0_gpu0, var0_gpu0), ... , (grad0_gpuN, var0_gpuN))
grads = []
for g, _ in grad_and_vars:
expanded_g = tf.expand_dims(g, 0)
grads.append(expanded_g)
grad = tf.concat(axis=0, values=grads)
grad = tf.reduce_mean(grad, 0)
v = grad_and_vars[0][1]
grad_and_var = (grad, v)
average_grads.append(grad_and_var)
return average_grads
# this function is called only in build() under current graph.
def _build(self,input_shape,dtype,idx=None):
self._init_in_out_size()
tmp = tf.placeholder(dtype,input_shape)
if idx is None:
self._in = tmp
else:
self._in[idx] = tmp
for l in self.net_def:
tmp = l.build(tmp,idx)
if idx is None:
self._out = tmp
output_shape=self._out.get_shape().as_list()
output_dtype=self._out.dtype
self._true_out=tf.placeholder(dtype=output_dtype,shape=output_shape)
self._loss = self._compute_loss(idx)
else:
self._out[idx] = tmp
output_shape=self._out[idx].get_shape().as_list()
output_dtype=self._out[idx].dtype
self._true_out[i]=tf.placeholder(dtype=output_dtype,shape=output_shape)
self._loss[idx] = self._compute_loss(idx)
return self
def _initialize(self):
self.run_initor(self.initializer)
def _compute_loss(self,idx):
loss = self.losser.compute(idx)
if loss is None:
return loss
return loss + self.regularizer.compute()
@property
def loss(self):
return self._loss
def build_variables_table(self):
for l in self.net_def:
for k in l.variables:
v = l.variables[k]
self.variables[v.name] = v
def has_built(self):
if hasattr(self,'_has_run'):
if Network.build.__name__ in self._has_run:
return True
return False
def fit(self,dataset,batch_size,n_epoch,
shuffle_epoch=True,max_step=10000000):
if dataset.train.labels.shape[-1] != self.out_shape[-1]:
dataset = dataset.to_one_hot()
train_set = dataset.train
test_set = dataset.test
train_set.before_iter()
self.i_step = 0
self.n_epoch = 0
while True:
self.i_step += 1
self.n_epoch = train_set.epochs_completed
X,y = train_set.next_batch(batch_size,shuffle=shuffle_epoch)
self.step(X,y,self.i_step)
for v in self.monitor.values():
v.status(train_set,test_set,self.i_step,self.n_epoch)
if self.n_epoch>=n_epoch:
break
if self.i_step >= max_step:
break
return self
@property
def train_op(self):
if self._train_op is None:
self._train_op = self._get_train_op()
return self._train_op
@with_graph
def _get_train_op(self,step=None):
if self.loss is None:
return None
if self.grads is None:
self.compute_gradients()
op = self.optimizer.apply_gradients(self.grads,step)
# initialize the uninitalized variable (the optimizer would introduce
# uninitalized variable)
vars = self.optimizer.variables
self.run(tf.variables_initializer(vars.values()))
return op
def step(self,X,y,step):
self.run(self.train_op,feed_dict={self.input:X,self.true_output:y})
def predict(self,X):
if self.num_gpu==0:
_in = self.input
_out = self.output
else:
_in = self.input[0]
_out = self.output[0]
return self.run(_out,feed_dict={_in:X})
def eval_node_input(self,node,X):
_in = self.input
if isinstance(node,str):
_out = self.node_by_name(node).input
else:
_out = node.input
return self.run(_out,feed_dict={_in:X})
def eval_node(self,node,X):
_in = self.input
if isinstance(node,str):
_out = self.node_by_name(node).output
else:
_out = node.output
return self.run(_out,feed_dict={_in:X})
def function(self,input_tensors,output_tensors):
def _func(input_vals):
feed = {t:v in zip(input_vals,input_tensors)}
return self.run(output_tensors,feed_dict=feed)
return _func
def score(self,datasubset):
y_pred = self.predict(datasubset.data)
y_pred = np.argmax(y_pred,1)
y_true = datasubset.labels
y_true = np.argmax(y_true,1)
return metrics.accuracy_score(y_true,y_pred)
def measure_loss(self,X,y):
if self.num_gpu==0:
_in = self.input
_true_out = self.true_output
_loss = self.loss
else:
_in = self.input[0]
_true_out = self.true_output[0]
_loss = self.loss[0]
return self.run(_loss,feed_dict={_in:X,_true_out:y})
def run(self,eval_list,feed_dict=None):
return self.sess.run(eval_list, feed_dict=feed_dict)
def run_initor(self,initor):
op = initor.compute()
return self.sess.run(op)
def save(self,filename):
self.save_def(filename)
to_save={}
for k,v in self.variables.items():
to_save[k]=self.run(v)
f=open(filename+'.model','wb')
pickle.dump(to_save,f)
f.close()
def save_def(self,filename):
self.net_def.save(filename+'.modeldef')
def load(self,filename):
self._init_graph_sess()
self.load_def(filename)
f=open(filename+'.model','rb')
data_dict=pickle.load(f)
f.close()
if self.has_built():
with self._graph.as_default():
op = self.initializer.op_by_value_table(data_dict)
self.run(op)
def load_def(self,filename):
self.net_def.load(filename+'.modeldef')
@property
def in_shape(self):
if self._in is not None:
if self.num_gpu==0:
return self._in.get_shape().as_list()
else:
return self._in[0].get_shape().as_list()
return None
@property
def dtype(self):
return self._dtype
@property
def out_shape(self):
if self._out is not None:
if self.num_gpu==0:
return self._out.get_shape().as_list()
else:
return self._out[0].get_shape().as_list()
return None
def copy(self):
obj = Network()
obj.loss_input_layer_name = self.loss_input_layer_name
obj.setup_with_def(self.net_def,self.in_shape)
return obj
def __str__(self):
return '\n'.join([str(l) for l in self.nodes])
def print_shape(self):
for l in self.nodes:
print('%-20s %20s %s %-20s'%(
l.name,
l.input.get_shape(),
'->',
l.output.get_shape()))
def subnet(self,begin_index,end_index):
obj = Network()
obj.setup_with_def(self.layers[begin_index:end_index])
return obj
def supported_layers(self):
return func_table.keys()
def conv2d(self,
ksize,
knum,
strides,
activation=ops.relu,
padding='SAME',
group=1,
biased=True,
name=None):
self.net_def.append(
func_table['conv2d'](
self,ksize,knum,strides,activation,padding,group,biased,name
))
return self
def fc(self,
outdim,
activation = ops.relu,
name=None):
self.net_def.append(
func_table['fc'](
self,outdim,activation,name
))
return self
def dropout(self,
keep_prob,
name=None):
self.net_def.append(
func_table['dropout'](
self,keep_prob,name
))
return self
def lrn(self,
radius,
alpha,
beta,
bias=1.0,
name=None):
self.net_def.append(
func_table['lrn'](
self,radius,alpha,beta,bias,name
))
return self
def bn(self,
scale_offset=True,
activation=ops.relu,
name=None):
self.net_def.append(
func_table['bn'](
self,scale_offset,activation,name
))
return self
def softmax(self,
name=None):
self.net_def.append(
func_table['softmax'](
self,name
))
return self
def maxpool(self,
ksize,
strides,
padding='SAME',
name=None):
self.net_def.append(
func_table['maxpool'](
self,ksize,strides,padding,name
))
return self
def avgpool(self,
ksize,
strides,
padding='SAME',
name=None):
self.net_def.append(
func_table['avgpool'](
self,ksize,strides,padding,name
))
return self
class CustomNetwork(Network):
"""Automatically called setup and build when construct
"""
def __init__(self):
Network.__init__(self)
self.default_in_shape = None
self.setup()
in_shape = self.default_in_shape
if not in_shape:
raise ValueError("must sepecify the default_in_shape attributes, or pass the shape as an argument when construction")
def setup(self):
raise NotImplementedError("CustomNetwork Must Implement setup Method")
def build(self,inshape=None):
inshape = inshape or self.default_in_shape
return Network.build(self,inshape)
| s = '%-20s@%20s'%(n.name,n.device)
if hasattr(n,'tfs_nodename'):
s=s+' --%s'%n.tfs_nodename
info.append(s) | conditional_block |
base.py | import numpy as np
from tfs.core.util import run_once_for_each_obj
from tfs.core.initializer import DefaultInit
from tfs.core.loss import DefaultLoss
from tfs.core.regularizers import DefaultRegularizer
from tfs.core.monitor import DefaultMonitor
from tfs.core.optimizer import DefaultOptimizer
from tfs.core.layer import func_table,Layer
from tfs.core.elem import Component
from tfs.core.layer import ops
import pickle
import tensorflow as tf
from tensorflow.python.util.deprecation import deprecated
from tensorflow.python.client import device_lib
from sklearn import metrics
# for supporting multi-gpu:
# https://github.com/tensorflow/tensorflow/blob/r0.7/tensorflow/models/image/cifar10/cifar10_multi_gpu_train.py#L174
#
# we use shared variables on CPU and model distributed on each GPU
from tfs.network.net_struct import NetStructure
#################### Network
# decorators
def with_graph(f):
def with_graph_run(self,*args,**kwargs):
with self.graph.as_default():
return f(self,*args,**kwargs)
# this is important to make the decorator compatiable with run_once_each_obj.
with_graph_run.__name__=f.__name__
return with_graph_run
class Network(object):
__hash__=object.__hash__
def __init__(self):
self._init_graph_sess()
self._struct = NetStructure(self)
self._true_out=None
self._in = None
self._out = None
self._loss=None
self.variables = {}
self.initializer = DefaultInit(self)
self.losser = DefaultLoss(self)
self.regularizer =DefaultRegularizer(self)
self.monitor = {}
self.monitor['default']=DefaultMonitor(self)
self._optimizer = DefaultOptimizer(self)
# this must be set when define a network
self.loss_input_layer_name = None
self._regulization=None
self.grads = None
self._train_op = None
self.num_gpu = 0
self.i_step = 0
self.n_epoch = 0
self._dtype = None
def to_pickle(self):
return [
self.in_shape,
self.loss_input_layer_name,
self.optimizer.to_pickle(),
self.losser.to_pickle(),
self.regularizer.to_pickle()
]
def restore(self,objs):
inshape = objs[0]
self.loss_input_layer_name = objs[1]
self.optimizer = Component.restore(objs[2],self)
self.losser = Component.restore(objs[3],self)
self.regularizer = Component.restore(objs[4],self)
if inshape:
self.build(inshape)
def _init_graph_sess(self):
self._graph = tf.Graph()
with self.graph.as_default():
self._sess = tf.Session()
@property
def optimizer(self):
return self._optimizer
@optimizer.setter
def optimizer(self,opt):
self.grads=None
self._optimizer=opt
def add_monitor(self,name,monitor):
self.monitor[name] = monitor
@staticmethod
def available_devices():
local_device_protos = device_lib.list_local_devices()
return [x for x in local_device_protos]
def __len__(self):
return len(self.net_def)
@property
@deprecated("2017-05-01", "Use `net_def` instead.")
def layers(self):
return self._struct
@property
def nodes(self):
return self._struct
@property
def net_def(self):
return self._struct
def node_to_index(self,l):
return self.net_def.find_index(l)
def node_by_index(self,idx):
return self.net_def[idx]
@deprecated("2017-05-01", "Use `node_by_name` instead.")
def layer_by_name(self,name):
return self.net_def.by_name(name)
def node_by_name(self,name):
return self.net_def.by_name(name)
def __del__(self):
self.sess.close()
def setup(self):
'''Construct the network. '''
raise NotImplementedError('Must be implemented by the subclass.')
def setup_with_def(self,struct_def,in_shape=None):
if isinstance(struct_def,list):
struct_def = NetStructure(self,nodes=struct_def)
self._struct = struct_def.copy_to(self)
if in_shape:
self.build(in_shape)
@property
def graph(self):
return self._graph
@property
def input(self):
return self._in
@property
def output(self):
return self._out
@property
def true_output(self):
return self._true_out
@property
def sess(self):
return self._sess
def _init_in_out_size(self):
if self.num_gpu and self._in is None and self._out is None:
self._in = [None]*self.num_gpu
self._out = [None]*self.num_gpu
self._true_out = [None]*self.num_gpu
self._loss = [None]*self.num_gpu
def tf_graph_str(self):
info=[]
for n in self.graph.as_graph_def().node:
s = '%-20s@%20s'%(n.name,n.device)
if hasattr(n,'tfs_nodename'):
s=s+' --%s'%n.tfs_nodename
info.append(s)
return '\n'.join(info)
@with_graph
@run_once_for_each_obj
def build(self,input_shape,dtype=tf.float32):
self._dtype = dtype
"""Build the computational graph
inTensor: the network input tensor.
"""
if not self.num_gpu:
self._build(input_shape,dtype)
else:
tower_grads = []
for i in range(self.num_gpu):
with tf.device('/gpu:%d' % i):
with tf.name_scope('%s_%d' % ('GPU', i)) as scope:
self._build(input_shape,dtype,i)
tf.get_variable_scope().reuse_variables()
_loss = self.loss[i]
tower_grads.append(_grad)
self.build_variables_table()
self._initialize()
self.compute_gradients()
return self.output
def compute_gradients(self):
if self.loss is None:
return
if not self.num_gpu:
self.grads = self.optimizer.compute_gradients(self.loss,self.variables)
else:
tower_grads = []
for i in range(self.num_gpu):
with tf.device('/gpu:%d' % i):
with tf.name_scope('%s_%d' % ('GPU', i)) as scope:
tf.get_variable_scope().reuse_variables()
_loss = self.loss[i]
_grad = self.optimizer.compute_gradients(_loss,self.variables.values())
tower_grads.append(_grad)
self.grads = self.average_gradients(tower_grads)
def average_gradients(self,tower_grads):
average_grads = []
for grad_and_vars in zip(*tower_grads):
# Note that each grad_and_vars looks like the following:
# ((grad0_gpu0, var0_gpu0), ... , (grad0_gpuN, var0_gpuN))
grads = []
for g, _ in grad_and_vars:
expanded_g = tf.expand_dims(g, 0)
grads.append(expanded_g)
grad = tf.concat(axis=0, values=grads)
grad = tf.reduce_mean(grad, 0)
v = grad_and_vars[0][1]
grad_and_var = (grad, v)
average_grads.append(grad_and_var)
return average_grads
# this function is called only in build() under current graph.
def _build(self,input_shape,dtype,idx=None):
self._init_in_out_size()
tmp = tf.placeholder(dtype,input_shape)
if idx is None:
self._in = tmp
else:
self._in[idx] = tmp
for l in self.net_def:
tmp = l.build(tmp,idx)
if idx is None:
self._out = tmp
output_shape=self._out.get_shape().as_list()
output_dtype=self._out.dtype
self._true_out=tf.placeholder(dtype=output_dtype,shape=output_shape)
self._loss = self._compute_loss(idx)
else:
self._out[idx] = tmp
output_shape=self._out[idx].get_shape().as_list()
output_dtype=self._out[idx].dtype
self._true_out[i]=tf.placeholder(dtype=output_dtype,shape=output_shape)
self._loss[idx] = self._compute_loss(idx)
return self
def _initialize(self):
|
def _compute_loss(self,idx):
loss = self.losser.compute(idx)
if loss is None:
return loss
return loss + self.regularizer.compute()
@property
def loss(self):
return self._loss
def build_variables_table(self):
for l in self.net_def:
for k in l.variables:
v = l.variables[k]
self.variables[v.name] = v
def has_built(self):
if hasattr(self,'_has_run'):
if Network.build.__name__ in self._has_run:
return True
return False
def fit(self,dataset,batch_size,n_epoch,
shuffle_epoch=True,max_step=10000000):
if dataset.train.labels.shape[-1] != self.out_shape[-1]:
dataset = dataset.to_one_hot()
train_set = dataset.train
test_set = dataset.test
train_set.before_iter()
self.i_step = 0
self.n_epoch = 0
while True:
self.i_step += 1
self.n_epoch = train_set.epochs_completed
X,y = train_set.next_batch(batch_size,shuffle=shuffle_epoch)
self.step(X,y,self.i_step)
for v in self.monitor.values():
v.status(train_set,test_set,self.i_step,self.n_epoch)
if self.n_epoch>=n_epoch:
break
if self.i_step >= max_step:
break
return self
@property
def train_op(self):
if self._train_op is None:
self._train_op = self._get_train_op()
return self._train_op
@with_graph
def _get_train_op(self,step=None):
if self.loss is None:
return None
if self.grads is None:
self.compute_gradients()
op = self.optimizer.apply_gradients(self.grads,step)
# initialize the uninitalized variable (the optimizer would introduce
# uninitalized variable)
vars = self.optimizer.variables
self.run(tf.variables_initializer(vars.values()))
return op
def step(self,X,y,step):
self.run(self.train_op,feed_dict={self.input:X,self.true_output:y})
def predict(self,X):
if self.num_gpu==0:
_in = self.input
_out = self.output
else:
_in = self.input[0]
_out = self.output[0]
return self.run(_out,feed_dict={_in:X})
def eval_node_input(self,node,X):
_in = self.input
if isinstance(node,str):
_out = self.node_by_name(node).input
else:
_out = node.input
return self.run(_out,feed_dict={_in:X})
def eval_node(self,node,X):
_in = self.input
if isinstance(node,str):
_out = self.node_by_name(node).output
else:
_out = node.output
return self.run(_out,feed_dict={_in:X})
def function(self,input_tensors,output_tensors):
def _func(input_vals):
feed = {t:v in zip(input_vals,input_tensors)}
return self.run(output_tensors,feed_dict=feed)
return _func
def score(self,datasubset):
y_pred = self.predict(datasubset.data)
y_pred = np.argmax(y_pred,1)
y_true = datasubset.labels
y_true = np.argmax(y_true,1)
return metrics.accuracy_score(y_true,y_pred)
def measure_loss(self,X,y):
if self.num_gpu==0:
_in = self.input
_true_out = self.true_output
_loss = self.loss
else:
_in = self.input[0]
_true_out = self.true_output[0]
_loss = self.loss[0]
return self.run(_loss,feed_dict={_in:X,_true_out:y})
def run(self,eval_list,feed_dict=None):
return self.sess.run(eval_list, feed_dict=feed_dict)
def run_initor(self,initor):
op = initor.compute()
return self.sess.run(op)
def save(self,filename):
self.save_def(filename)
to_save={}
for k,v in self.variables.items():
to_save[k]=self.run(v)
f=open(filename+'.model','wb')
pickle.dump(to_save,f)
f.close()
def save_def(self,filename):
self.net_def.save(filename+'.modeldef')
def load(self,filename):
self._init_graph_sess()
self.load_def(filename)
f=open(filename+'.model','rb')
data_dict=pickle.load(f)
f.close()
if self.has_built():
with self._graph.as_default():
op = self.initializer.op_by_value_table(data_dict)
self.run(op)
def load_def(self,filename):
self.net_def.load(filename+'.modeldef')
@property
def in_shape(self):
if self._in is not None:
if self.num_gpu==0:
return self._in.get_shape().as_list()
else:
return self._in[0].get_shape().as_list()
return None
@property
def dtype(self):
return self._dtype
@property
def out_shape(self):
if self._out is not None:
if self.num_gpu==0:
return self._out.get_shape().as_list()
else:
return self._out[0].get_shape().as_list()
return None
def copy(self):
obj = Network()
obj.loss_input_layer_name = self.loss_input_layer_name
obj.setup_with_def(self.net_def,self.in_shape)
return obj
def __str__(self):
return '\n'.join([str(l) for l in self.nodes])
def print_shape(self):
for l in self.nodes:
print('%-20s %20s %s %-20s'%(
l.name,
l.input.get_shape(),
'->',
l.output.get_shape()))
def subnet(self,begin_index,end_index):
obj = Network()
obj.setup_with_def(self.layers[begin_index:end_index])
return obj
def supported_layers(self):
return func_table.keys()
def conv2d(self,
ksize,
knum,
strides,
activation=ops.relu,
padding='SAME',
group=1,
biased=True,
name=None):
self.net_def.append(
func_table['conv2d'](
self,ksize,knum,strides,activation,padding,group,biased,name
))
return self
def fc(self,
outdim,
activation = ops.relu,
name=None):
self.net_def.append(
func_table['fc'](
self,outdim,activation,name
))
return self
def dropout(self,
keep_prob,
name=None):
self.net_def.append(
func_table['dropout'](
self,keep_prob,name
))
return self
def lrn(self,
radius,
alpha,
beta,
bias=1.0,
name=None):
self.net_def.append(
func_table['lrn'](
self,radius,alpha,beta,bias,name
))
return self
def bn(self,
scale_offset=True,
activation=ops.relu,
name=None):
self.net_def.append(
func_table['bn'](
self,scale_offset,activation,name
))
return self
def softmax(self,
name=None):
self.net_def.append(
func_table['softmax'](
self,name
))
return self
def maxpool(self,
ksize,
strides,
padding='SAME',
name=None):
self.net_def.append(
func_table['maxpool'](
self,ksize,strides,padding,name
))
return self
def avgpool(self,
ksize,
strides,
padding='SAME',
name=None):
self.net_def.append(
func_table['avgpool'](
self,ksize,strides,padding,name
))
return self
class CustomNetwork(Network):
"""Automatically called setup and build when construct
"""
def __init__(self):
Network.__init__(self)
self.default_in_shape = None
self.setup()
in_shape = self.default_in_shape
if not in_shape:
raise ValueError("must sepecify the default_in_shape attributes, or pass the shape as an argument when construction")
def setup(self):
raise NotImplementedError("CustomNetwork Must Implement setup Method")
def build(self,inshape=None):
inshape = inshape or self.default_in_shape
return Network.build(self,inshape)
| self.run_initor(self.initializer) | identifier_body |
models.py | #!/usr/bin/python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""App Engine data model (schema) definition for Quiz."""
# Python imports
import base64
import logging
import md5
import operator
import os
import re
import time
# AppEngine imports
from google.appengine.ext import db
from google.appengine.api import memcache
class QuizBaseModel(db.Model):
"""Base class for quiz models."""
class QuizTrunkModel(QuizBaseModel):
"""Maintains trunk for quiz model.
Attributes:
head: Maintians the head of a quiz.
"""
head = db.StringProperty()
class QuizRevisionModel(QuizBaseModel):
"""Maintains list of revisions for a quiz.
Quiz trunk associated with the revision is made parent of the model.
Attributes:
quiz_id: Id (key) for particular version of the quiz.
time_stamp: Time_stamp for a new revision.
commit_message: Commit message associated with new version.
"""
quiz_id = db.StringProperty()
time_stamp = db.DateTimeProperty(auto_now=True)
commit_message = db.StringProperty(default='Commiting a new version')
class QuizPropertyModel(QuizBaseModel):
"""Defines various properties for a quiz.
Attributes:
shuffle_questions: If set questions are presented in random order.
min_options: minimum number of options to be presented.
max_options: maximum number of options to be presented.
min_questions: minimum number of questions required to complete the quiz.
Used to track the progress.
repeat_questions: If set questions are repeated.
repeat_wrongly_answered_questions: If set wrongly answered questions are
repeated.
"""
shuffle_questions = db.BooleanProperty(default=True)
min_options = db.IntegerProperty(default=2)
max_options = db.IntegerProperty(default=10) # 0 implies all
min_questions = db.IntegerProperty(default=0) # 0 implies all
repeat_questions = db.BooleanProperty(default=False)
repeat_wrongly_answered_questions = db.BooleanProperty(default=False)
class QuizModel(QuizBaseModel):
"""Represents a quiz.
Attributes:
difficulty_level: Difficulty level for the quiz (range 0-10).
quiz_property: Reference to property asscociated with quiz.
title: Title of the quiz.
tags: Associated tags with quiz.
trunk: Reference to asscociated trunk with the quiz.
introduction: Introduction text to be shown on the start page for quiz.
"""
# implicit id
difficulty_level = db.RatingProperty(default=5)
quiz_property = db.ReferenceProperty(QuizPropertyModel)
title = db.StringProperty()
tags = db.ListProperty(db.Category)
trunk = db.ReferenceProperty(QuizTrunkModel)
introduction = db.StringProperty()
class ChoiceModel(QuizBaseModel):
"""Represents a choice/option provided to user for a question model.
Attributes:
body: Body of the choice.
message: Message to be displayed when choice is selected.
May act like a hint.
is_correct: If the choice selected is correct.
"""
# implicit id
body = db.TextProperty()
message = db.StringProperty()
is_correct = db.BooleanProperty(default=False)
def dump_to_dict(self):
|
class QuestionModel(QuizBaseModel):
"""Represents a question.
Attributes:
body: Text asscociated with quiz.
choices: List of possible choices.
shuffle_choices: If set choices are randomly shuffled.
hints: Ordered list of progressive hints
"""
# implicit id
body = db.TextProperty()
choices = db.ListProperty(db.Key)
shuffle_choices = db.BooleanProperty(default=True)
hints = db.StringListProperty()
def dump_to_dict(self):
"""Dumps the question model to a dictionary for passing
around as JSON object."""
data_dict = {'id': str(self.key()),
'body': self.body,
'hints': self.hints,
'choices': [db.get(el).dump_to_dict() for el in self.choices]
}
if self.shuffle_choices and data_dict['choices']:
data_dict['choices'] = random.shuffle(data_dict['choices'])
return data_dict
class QuizQuestionListModel(QuizBaseModel):
"""Maintains a list of question with its quiz id.
This is necessary because questions may be shared between different quizes.
Attributes:
quiz: Reference to quiz object.
question: Reference to question object asscociated with quiz.
time_stamp: Time stamp.
"""
quiz = db.ReferenceProperty(QuizModel)
question = db.ReferenceProperty(QuestionModel)
time_stamp = db.DateTimeProperty(auto_now_add=True)
class ResponseModel(QuizBaseModel):
"""Stores response data required for producing next question.
Attributes:
session_id: Session Identifier.
answered_correctly: Set if the response resulted in correct answer.
question: Reference to question being answered.
quiz: Reference to associated quiz.
quiz_trunk: Reference to associated quiz trunk.
time_stamp: Time stamp of the response
attempts: Number of attempts so far, useful for scoring.
"""
session_id = db.StringProperty(required=True)
answered_correctly = db.BooleanProperty(db.Key)
question = db.ReferenceProperty(QuestionModel)
quiz = db.ReferenceProperty(QuizModel)
quiz_trunk = db.ReferenceProperty(QuizTrunkModel)
time_stamp = db.DateTimeProperty(auto_now=True)
attempts = db.IntegerProperty(default=0)
class QuizScoreModel(QuizBaseModel):
"""Stores progress status associated with a quiz and session.
Both score and progress are out of 100.
Attributes:
session_id: Session Identifier.
quiz: Reference to associated quiz.
quiz_trunk: Reference to associated quiz trunk.
score: Current score.
progress: Current progress status
questions_attempted: Number of questions attempted so far.
"""
quiz_trunk = db.ReferenceProperty(QuizTrunkModel)
session_id = db.StringProperty(required=True)
quiz = db.ReferenceProperty(QuizModel)
score = db.FloatProperty(default=0.0)
progress = db.FloatProperty(default=0.0)
questions_attempted = db.IntegerProperty(default=0)
| """Dumps choice to a dictionary for passing around as JSON object."""
data_dict = {'body': self.body,
'id': str(self.key())}
return data_dict | identifier_body |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.