text stringlengths 1 1.05M |
|---|
<filename>lib/instance.js
Picker = new PickerImp();
WebApp.rawConnectHandlers.use(function(req, res, next) {
Picker._dispatch(req, res, next);
});
|
from chatterbot import ChatBot
from chatterbot.trainers import ListTrainer
# Initialize a chatbot instance
chatbot = ChatBot('CustomChatBot')
# Custom conversational data provided as a list of strings
custom_conversations = [
"Hello",
"Hi there!",
"How are you?",
"I'm doing great.",
"That's good to hear.",
"Thank you.",
"You're welcome."
]
# Train the chatbot using the ListTrainer
trainer = ListTrainer(chatbot)
trainer.train(custom_conversations)
# Interaction loop to allow the user to interact with the trained chatbot
print("Chatbot: Hello! I am a trained chatbot. You can start a conversation with me.")
while True:
user_input = input("You: ")
if user_input.lower() == 'exit':
print("Chatbot: Goodbye!")
break
response = chatbot.get_response(user_input)
print("Chatbot:", response) |
<gh_stars>1-10
#include <gtest/gtest.h>
#include <set>
#include <vector>
#include <list>
#include "AdjacencyListVertex.h"
#include "AdjacencyListGraph.h"
#include "GraphExceptions.h"
#include "WeightedGraphAspect.h"
#include "WeightedVertexAspect.h"
#include "WeightedClusteringCoefficient.h"
namespace weightedClusterCoefficientTest
{
using namespace graphpp;
using namespace std;
using ::testing::Test;
class WeightedClusterCoefficientTest : public Test
{
protected:
WeightedClusterCoefficientTest() { }
virtual ~WeightedClusterCoefficientTest() { }
virtual void SetUp()
{
}
virtual void TearDown()
{
}
public:
typedef WeightedVertexAspect<AdjacencyListVertex> Vertex;
typedef AdjacencyListGraph<Vertex> Graph;
typedef WeightedGraphAspect<Vertex, Graph> WeightedGraph;
};
TEST_F(WeightedClusterCoefficientTest, GeneralTest)
{
WeightedGraph g;
Vertex* x = new Vertex(1);
Vertex* v1 = new Vertex(2);
Vertex* v2 = new Vertex(3);
Vertex* v3 = new Vertex(4);
Vertex* v4 = new Vertex(5);
g.addVertex(x);
g.addVertex(v1);
g.addVertex(v2);
g.addVertex(v3);
g.addVertex(v4);
g.addEdge(x, v1, 1);
g.addEdge(v1, v2, 1);
g.addEdge(v2, v3, 1);
g.addEdge(v3, x, 1);
g.addEdge(x, v4, 5);
g.addEdge(v3, v1, 1);
g.addEdge(x, v2, 1);
WeightedClusteringCoefficient<WeightedGraph, Vertex> clustering;
double c = clustering.vertexClusteringCoefficient(x);
double epsilon = 0.001;
ASSERT_TRUE(fabs(c - 0.25) < epsilon);
double c2 = clustering.clusteringCoefficient(g, Vertex::Degree(4));
ASSERT_TRUE(fabs(c2 - 0.25) < epsilon);
double c3 = clustering.clusteringCoefficient(g, Vertex::Degree(3));
ASSERT_TRUE(fabs(c3 - 1) < epsilon);
}
TEST_F(WeightedClusterCoefficientTest, AcyclicWeightedGraphTest)
{
WeightedGraph ig;
Vertex* x = new Vertex(1);
Vertex* v1 = new Vertex(2);
Vertex* v2 = new Vertex(3);
Vertex* v3 = new Vertex(4);
Vertex* v4 = new Vertex(5);
ig.addVertex(x);
ig.addVertex(v1);
ig.addVertex(v2);
ig.addVertex(v3);
ig.addVertex(v4);
ig.addEdge(x, v1, 1);
ig.addEdge(x, v2, 1);
ig.addEdge(x, v3, 1);
ig.addEdge(v4, x, 1);;
WeightedClusteringCoefficient<WeightedGraph, Vertex> clustering;
double c = clustering.vertexClusteringCoefficient(x);
double epsilon = 0.001;
ASSERT_TRUE(fabs(c - 0.0) < epsilon);
double c2 = clustering.clusteringCoefficient(ig, Vertex::Degree(4));
ASSERT_TRUE(fabs(c2 - 0.0) < epsilon);
}
TEST_F(WeightedClusterCoefficientTest, SingleNodeGraph)
{
WeightedGraph ig;
Vertex* x = new Vertex(1);
ig.addVertex(x);
WeightedClusteringCoefficient<WeightedGraph, Vertex> clustering;
double c = clustering.vertexClusteringCoefficient(x);
double epsilon = 0.001;
ASSERT_TRUE(fabs(c - 0.0) < epsilon);
double c2 = clustering.clusteringCoefficient(ig, Vertex::Degree(4));
ASSERT_TRUE(fabs(c2 - 0.0) < epsilon);
}
}
|
def detect_anagrams(dictionary):
"""
Detect anagrams using a dictionary
"""
result = {}
for word in dictionary:
key = ''.join(sorted(word))
if key not in result:
result[key] = [word]
else:
result[key].append(word)
return result
# example
dictionary = ["eat", "tea", "tan", "ate", "nat", "bat"]
print(detect_anagrams(dictionary)) |
package io.opensphere.mantle.data.geom.style.config.v1;
import java.util.Collection;
import java.util.Set;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import io.opensphere.core.util.Utilities;
import io.opensphere.core.util.collections.New;
import io.opensphere.mantle.data.geom.style.VisualizationStyle;
import io.opensphere.mantle.data.geom.style.VisualizationStyleParameter;
/**
* The Class StyleParameterSetConfig.
*/
@XmlRootElement(name = "StyleParameterSet")
@XmlAccessorType(XmlAccessType.FIELD)
public class StyleParameterSetConfig
{
/** The Parameter set. */
@XmlElement(name = "Parameter")
private Set<StyleParameterConfig> myParameterSet;
/** The Selected style class name. */
@XmlAttribute(name = "StyleClass")
private String myStyleClassName;
/**
* Instantiates a new style parameter set config.
*/
public StyleParameterSetConfig()
{
myParameterSet = New.set();
}
@Override
public int hashCode()
{
final int prime = 31;
int result = 1;
result = prime * result + (myParameterSet == null ? 0 : myParameterSet.hashCode());
result = prime * result + (myStyleClassName == null ? 0 : myStyleClassName.hashCode());
return result;
}
@Override
public boolean equals(Object obj)
{
if (this == obj)
{
return true;
}
if (obj == null)
{
return false;
}
if (getClass() != obj.getClass())
{
return false;
}
StyleParameterSetConfig other = (StyleParameterSetConfig)obj;
if (myParameterSet == null)
{
if (other.myParameterSet != null)
{
return false;
}
}
else if (!myParameterSet.equals(other.myParameterSet))
{
return false;
}
if (myStyleClassName == null)
{
if (other.myStyleClassName != null)
{
return false;
}
}
else if (!myStyleClassName.equals(other.myStyleClassName))
{
return false;
}
return true;
}
/**
* Instantiates a new style parameter set config.
*
* @param styleClass the style class
* @param paramCollection the param collection
*/
public StyleParameterSetConfig(String styleClass, Collection<VisualizationStyleParameter> paramCollection)
{
this();
myStyleClassName = styleClass;
if (paramCollection != null && !paramCollection.isEmpty())
{
paramCollection.stream().filter(v -> v.isSaved()).map(StyleParameterConfig::new).forEach(myParameterSet::add);
}
}
/**
* Copy constructor.
*
* @param other the StyleParameterSetConfig to copy.
*/
public StyleParameterSetConfig(StyleParameterSetConfig other)
{
Utilities.checkNull(other, "other");
myStyleClassName = other.myStyleClassName;
myParameterSet = New.set();
if (other.myParameterSet != null)
{
other.myParameterSet.stream().map(StyleParameterConfig::new).forEach(myParameterSet::add);
}
}
/**
* Instantiates a new style parameter set config.
*
* @param aStyle the a style
*/
public StyleParameterSetConfig(VisualizationStyle aStyle)
{
this(aStyle.getClass().getName(), aStyle.getStyleParameterSet());
}
/**
* Gets the parameter set.
*
* @return the parameter set
*/
public Set<StyleParameterConfig> getParameterSet()
{
return myParameterSet;
}
/**
* Gets the style class name.
*
* @return the style class name
*/
public String getStyleClassName()
{
return myStyleClassName;
}
/**
* Sets the parameter set.
*
* @param parameterSet the new parameter set
*/
public void setParameterSet(Set<StyleParameterConfig> parameterSet)
{
myParameterSet = parameterSet;
}
/**
* Sets the style class name.
*
* @param styleClassName the new style class name
*/
public void setStyleClassName(String styleClassName)
{
myStyleClassName = styleClassName;
}
@Override
public String toString()
{
StringBuilder sb = new StringBuilder(32);
sb.append(this.getClass().getSimpleName()).append(" StyleClass[").append(myStyleClassName).append("] Parameters[")
.append(myParameterSet.size()).append(']');
if (!myParameterSet.isEmpty())
{
sb.append('\n');
for (StyleParameterConfig spc : myParameterSet)
{
sb.append(" ");
sb.append(spc.toString()).append('\n');
}
}
return sb.toString();
}
}
|
def reverse_print_linked_list(head):
# If head is None
if head is None:
return
# print the tail of list first
reverse_print_linked_list(head.next)
# After everything else is printed, print the head
print(head.data, end=' ') |
<filename>src/modules/fields/components/index.js<gh_stars>0
import Manage from './Manage';
import ManageColumns from './ManageColumns';
import GenerateField from './GenerateField';
export { Manage, ManageColumns, GenerateField };
|
package bnetp.clan;
public class ClanMemberStatusFlags {
public static final byte CLANMEMBERSTATUS_OFFLINE = (byte)0x00;
public static final byte CLANMEMBERSTATUS_NOT_IN_CHAT = (byte)0x01;
public static final byte CLANMEMBERSTATUS_IN_CHAT = (byte)0x02;
public static final byte CLANMEMBERSTATUS_IN_A_PUBLIC_GAME = (byte)0x03;
public static final byte CLANMEMBERSTATUS_IN_A_PRIVATE_GAME_NOT_MUTUAL = (byte)0x04;
public static final byte CLANMEMBERSTATUS_IN_A_PRIVATE_GAME_MUTUAL = (byte)0x05;
}
|
#!/bin/sh
# Copyright (c) 2015-2021 Franco Fichtner <franco@opnsense.org>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
set -e
SCRIPTSDIR="/usr/local/opnsense/scripts/firmware"
RELEASE=$(opnsense-update -vR)
PROMPT="y/N"
ARGS=
run_action()
{
echo
if ! ${SCRIPTSDIR}/launcher.sh ${SCRIPTSDIR}/${1}.sh; then
echo "A firmware action is currently in progress."
fi
echo
read -p "Press any key to return to menu." WAIT
}
echo -n "Fetching change log information, please wait... "
if /usr/local/opnsense/scripts/firmware/changelog.sh fetch; then
echo "done"
fi
echo
echo "This will automatically fetch all available updates and apply them."
echo
if [ -n "${RELEASE}" ]; then
echo "A major firmware upgrade is available for this installation: ${RELEASE}"
echo
echo "Make sure you have read the release notes and migration guide before"
echo "attempting this upgrade. Around 500MB will need to be downloaded and"
echo "require 1000MB of free space. Continue with this major upgrade by"
echo "typing the major upgrade version number displayed above."
echo
echo "Minor updates may be available, answer 'y' to run them instead."
echo
PROMPT="${RELEASE}/${PROMPT}"
elif /usr/local/opnsense/scripts/firmware/reboot.sh; then
echo "This update requires a reboot."
echo
fi
read -p "Proceed with this action? [${PROMPT}]: " YN
case ${YN} in
[yY])
;;
${RELEASE:-y})
ARGS="upgrade ${RELEASE}"
;;
[sS])
run_action security
exit 0
;;
[hH])
run_action health
exit 0
;;
[cC])
run_action connection
exit 0
;;
*)
exit 0
;;
esac
echo
/usr/local/etc/rc.firmware ${ARGS}
|
#!/bin/bash
for i in $(echo $ONLINETOWN_SERVERS | sed "s/,/ /g")
do
fab -H "$i" $@ >> ./log/$i.log 2>> ./log/$i.err &
done
|
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @format
*/
import {VersionCheckResult} from '../chrome/UpdateIndicator';
import {getRenderHostInstance} from '../RenderHost';
const updateServer = 'https://www.facebook.com/fbflipper/public/latest.json';
const getPlatformSpecifier = (): string => {
switch (getRenderHostInstance().serverConfig.environmentInfo.os.platform) {
case 'win32':
return 'windows';
case 'linux':
return 'linux';
case 'darwin':
return 'mac';
default:
throw new Error('Unsupported platform.');
}
};
/**
* @param resp A parsed JSON object retrieved from the update server.
*/
const parseResponse = (resp: any): VersionCheckResult => {
const version = resp.version;
const platforms = resp.platforms;
if (!version || !platforms) {
return {kind: 'error', msg: 'Incomplete response.'};
}
const platformSpecifier = getPlatformSpecifier();
const platform = platforms[platformSpecifier];
if (!platform) {
return {kind: 'error', msg: `Unsupported platform: ${platformSpecifier}.`};
}
return {
kind: 'update-available',
url: platform,
version,
};
};
export async function checkForUpdate(
currentVersion: string,
): Promise<VersionCheckResult> {
return fetch(`${updateServer}?version=${currentVersion}`).then(
(res: Response) => {
switch (res.status) {
case 204:
return {kind: 'up-to-date'};
case 200:
if (res.url.startsWith('https://www.facebook.com/login/')) {
// We're being redirected because we're not on an authenticated network.
// Treat that as being up-to-date as there's special-casing the UI for
// this is not worth it.
console.log('Skipping version check on non-authenticated network.');
return {kind: 'up-to-date'};
}
// Good use of nesting.
// eslint-disable-next-line promise/no-nesting
return res.json().then(parseResponse);
default:
const msg = `Server responded with ${res.statusText}.`;
console.warn('Version check failure: ', msg);
return {
kind: 'error',
msg,
};
}
},
);
}
|
## brewfile for Homebrew brew
## To run this file use:
## brew bundle brewfile
export HOMEBREW_CASK_OPTS="--appdir=/Applications"
# Grab beta versions
# brew tap brewroom/versions
# Productivity bits
brew install flux
brew install alfred
brew install daisydisk
# Security
# brew install gpg-suite
# Backup
brew install backblaze
# Network
brew install little-snitch
# brew install encryptme
# brew install wireshark
# Editing stuff
brew install sublime-text
brew install visual-studio-code
# brew install balsamiq-mockups
# brew install intellij-idea
# brew install rubymine
# brew install omnigraffle
# brew install omnifocus
# brew install omnioutliner
# brew install mactex
# Development
brew install ngrok
brew install sourcetree
# DB
#brew install postico
brew install pgadmin4
brew install querious
# Languages
brew install java
#brew install darteditor
#brew install rust
#brew install scratch
# Geo
#brew install gdal-framework matplotlib
#brew install qgis
# Utilities
brew install 1password
# brew install qlstephen
brew install sizeup
brew install transmit
brew install iterm2
brew install licecap
# brew install skitch
# brew install sketchup
brew install slack
brew install telegram
# brew install skype
# brew install gitter
brew install vlc
brew install finicky
# Browsers
# brew install firefox
brew install google-chrome
# Virtualization
#brew install boot2docker
brew install docker
# brew install vagrant
# brew install virtualbox
# brew install minikube
#brew install vmware-fusion
#brew install chefdk
# Fun
brew install spotify
# brew install transmission
#brew install transmission-remote-gui
# brew install xld
# brew install steam
|
#!/usr/bin/env bash
#ENV VARS
OS=$(uname)
OS_VERSION=$(uname -r)
UNAME_M=$(uname -m)
ARCH=$(uname -m)
export OS
export OS_VERSION
export UNAME_M
export ARCH
report() {
echo OS:
echo "$OS" | awk '{print tolower($0)}'
echo OS_VERSION:
echo "$OS_VERSION" | awk '{print tolower($0)}'
echo UNAME_M:
echo "$UNAME_M" | awk '{print tolower($0)}'
echo ARCH:
echo "$ARCH" | awk '{print tolower($0)}'
echo OSTYPE:
echo "$OSTYPE" | awk '{print tolower($0)}'
}
checkbrew() {
if hash brew 2>/dev/null; then
if !hash $AWK 2>/dev/null; then
brew install $AWK
fi
if !hash git 2>/dev/null; then
brew install git
fi
else
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install.sh)"
checkbrew
fi
}
checkraspi(){
echo 'Checking Raspi'
if [ -e /etc/rpi-issue ]; then
echo "- Original Installation"
cat /etc/rpi-issue
fi
if [ -e /usr/bin/lsb_release ]; then
echo "- Current OS"
lsb_release -irdc
fi
echo "- Kernel"
uname -r
echo "- Model"
cat /proc/device-tree/model && echo
echo "- hostname"
hostname
echo "- Firmware"
/opt/vc/bin/vcgencmd version
}
if [[ "$OSTYPE" == "linux"* ]]; then
#CHECK APT
if [[ "$OSTYPE" == "linux-gnu" ]]; then
PACKAGE_MANAGER=apt
export PACKAGE_MANAGER
INSTALL=install
export INSTALL
AWK=gawk
export AWK
if hash apt 2>/dev/null; then
$PACKAGE_MANAGER $INSTALL $AWK
report
fi
fi
if [[ "$OSTYPE" == "linux-musl" ]]; then
PACKAGE_MANAGER=apk
export PACKAGE_MANAGER
INSTALL=install
export INSTALL
AWK=gawk
export AWK
if hash apk 2>/dev/null; then
$PACKAGE_MANAGER $INSTALL $AWK
report
fi
fi
if [[ "$OSTYPE" == "linux-arm"* ]]; then
PACKAGE_MANAGER=apt
export PACKAGE_MANAGER
INSTALL=install
export INSTALL
AWK=gawk
echo $AWK
export AWK
checkraspi
if hash apt 2>/dev/null; then
$PACKAGE_MANAGER $INSTALL $AWK
report
fi
fi
elif [[ "$OSTYPE" == "darwin"* ]]; then
report
PACKAGE_MANAGER=brew
export PACKAGE_MANAGER
INSTALL=install
export INSTALL
AWK=awk
export AWK
rm -rf '/Applications/VyprVPN.app'
brew install --cask vyprvpn
checkbrew
elif [[ "$OSTYPE" == "cygwin" ]]; then
echo TODO add support for $OSTYPE
elif [[ "$OSTYPE" == "msys" ]]; then
echo TODO add support for $OSTYPE
elif [[ "$OSTYPE" == "win32" ]]; then
echo TODO add support for $OSTYPE
elif [[ "$OSTYPE" == "freebsd"* ]]; then
echo TODO add support for $OSTYPE
else
echo TODO add support for $OSTYPE
fi
|
package collects_get_all_deps
import "github.com/spiral/errors"
type Plugin2 struct {
collectsDeps []interface{}
}
func (f *Plugin2) Init() error {
// should be 2 deps
f.collectsDeps = make([]interface{}, 0, 2)
return nil
}
func (f *Plugin2) Serve() chan error {
errCh := make(chan error)
if len(f.collectsDeps) != 2 {
errCh <- errors.E("not enough deps collected")
}
return errCh
}
func (f *Plugin2) Stop() error {
return nil
}
func (f *Plugin2) Collects() []interface{} {
return []interface{}{
f.GetSuper,
f.GetSuper2,
}
}
func (f *Plugin2) GetSuper(s SuperInterface) {
f.collectsDeps = append(f.collectsDeps, s)
}
func (f *Plugin2) GetSuper2(s Super2Interface) {
f.collectsDeps = append(f.collectsDeps, s)
}
|
def binary_to_decimal(binary):
decimal = 0
index = 0
while binary != 0:
dec = binary % 10
decimal += dec * pow(2, index)
binary //= 10
index += 1
return decimal
# usage
n = 100101
result = binary_to_decimal(n)
print(result) # 37 |
import pandas as pd
# Read in the data
data = pd.read_csv('stock_prices.csv')
# Explore the data
print(data.info())
# Calculate the average closing prices
average_closing_prices = data.groupby('stock_symbol')['closing_price'].mean()
# Calculate the maximum and minimum closing prices
max_closing_prices = data.groupby('stock_symbol')['closing_price'].max()
min_closing_prices = data.groupby('stock_symbol')['closing_price'].min()
# Calculate the volatility of the stock prices
volatility = (max_closing_prices - min_closing_prices) / average_closing_prices
# Create a report
report = pd.DataFrame(data={'average_closing_price': average_closing_prices,
'max_closing_price': max_closing_prices,
'min_closing_price': min_closing_prices,
'volatility': volatility})
# Print the report
print(report) |
BigDLFolder=~/Shailesh/MTP/BigDL
templateFile=~/Shailesh/MTP/BigDL/spark/dl/src/main/scala/com/intel/analytics/bigdl/models/lenet/LeNet5.template
targetFile=~/Shailesh/MTP/BigDL/spark/dl/src/main/scala/com/intel/analytics/bigdl/models/lenet/LeNet5.scala
echo ""> output.csv
while IFS=","
read config epoch batchsize filter first_conv_kernel second_conv_kernel output_height output_width dense_layer max_core driver_core executor_cores task_cpus executor_instances driver_memory executor_memory memory_fraction storage_fraction parallelism
do
echo -e "\e[1;31m RUNNING CONFIG \#$config"
echo -e "--------------------------------------------------- \e[0m"
echo step 1/10 !!!!!!!!Copying the skeleton file!!!!!!
#cp $templateFile $targetFile
echo step 2/10 !!!!!!!!Filling up the parameter in the model!!!!!!!!!!
#sed -i "s/%%filter%%/$filter/g" $targetFile
#sed -i "s/%%first_conv_kernel%%/$first_conv_kernel/g" $targetFile
#sed -i "s/%%second_conv_kernel%%/$second_conv_kernel/g" $targetFile
#sed -i "s/%%output_height%%/$output_height/g" $targetFile
#sed -i "s/%%output_width%%/$output_width/g" $targetFile
#sed -i "s/%%dense_layer%%/$dense_layer/g" $targetFile
#echo step 3/10 !!!!!!!starting compilation!!!!!!!!
cd $BigDLFolder
#bash make-dist.sh > /home/shady/Shailesh/MTP/automateBigDL/temp.log
echo step 4/10 !!!!!!copying JAR file to the server!!!!!!!!
#sshpass -e scp $BigDLFolder/dist/lib/bigdl-SPARK_2.0-0.13.0-SNAPSHOT-jar-with-dependencies.jar etcd@10.129.2.179:~/NASDrive/BigDL/dist/lib > /home/shady/Shailesh/MTP/automateBigDL/temp.log
echo step 5/10 !!!!!!!uploading jar file to hadoop server!!!!!!!!
#sshpass -e ssh -n hduser@10.129.2.179 "/home/etcd/NASDrive/hadoop/bin/hadoop dfs -copyFromLocal -f /home/etcd/NASDrive/BigDL/dist/lib/bigdl-SPARK_2.0-0.13.0-SNAPSHOT-jar-with-dependencies.jar /jars/BigDL" > /home/shady/Shailesh/MTP/automateBigDL/temp.log
echo step 6/10 !!!!!!!Modifying the run file!!!!!!!!
sshpass -e ssh -n spark@10.129.2.177 "cd shailesh && cp resnet.template resnet.sh && sed -i \"s/%%epoch%%/$epoch/g\" resnet.sh && sed -i \"s/%%batchsize%%/$batchsize/g\" resnet.sh && cat resnet.sh" > /home/shady/Shailesh/MTP/automateBigDL/temp.log
echo step 7/10 !!!!!!!!!Setting up the spark parameters!!!!!!!!!
sshpass -e ssh -n spark@10.129.2.177 "cd shailesh && cp properties.template properties.conf && sed -i \"s/%%max_core%%/$max_core/g\" properties.conf && sed -i \"s/%%driver_core%%/$driver_core/g\" properties.conf && sed -i \"s/%%executor_cores%%/$executor_cores/g\" properties.conf && sed -i \"s/%%task_cpus%%/$task_cpus/g\" properties.conf && sed -i \"s/%%executor_instances%%/$executor_instances/g\" properties.conf && sed -i \"s/%%driver_memory%%/$driver_memory/g\" properties.conf && sed -i \"s/%%executor_memory%%/$executor_memory/g\" properties.conf && sed -i \"s/%%memory_fraction%%/$memory_fraction/g\" properties.conf && sed -i \"s/%%storage_fraction%%/$storage_fraction/g\" properties.conf && sed -i \"s/%%parallelism%%/$parallelism/g\" properties.conf && cat properties.conf" > /home/shady/Shailesh/MTP/automateBigDL/temp.log
echo step 8/10 !!!!!!!Deleting the exisiting pods and running the job!!!!!!!!
timeout 1m sshpass -e ssh -n spark@10.129.2.177 "kubectl delete pods vgg" > /home/shady/Shailesh/MTP/automateBigDL/temp.log
timeout 1m sshpass -e ssh -n spark@10.129.2.177 "kubectl delete pods vgg" > /home/shady/Shailesh/MTP/automateBigDL/temp.log
echo step 8/10 !!!!!!!running the job!!!!!!!!
timeout 25m sshpass -e ssh -n spark@10.129.2.177 "cd shailesh && ./resnet.sh" > /home/shady/Shailesh/MTP/automateBigDL/temp.log
echo step 9/10 !!!!!!!Capturing the output!!!!!!!!
result=$(sshpass -e ssh -n spark@10.129.2.177 "kubectl logs vgg | grep InstrumentationResult")
echo step 10/10 !!!!!!!Storing the result!!!!!!!!
cd - && echo "$config,$epoch,$batchsize,$filter,$first_conv_kernel,$second_conv_kernel,$output_height,$output_width,$dense_layer,$max_core,$driver_core,$executor_cores,$task_cpus,$executor_instances,$driver_memory,$executor_memory,$memory_fraction,$storage_fraction,$parallelism,$result" >>output.csv
echo -------------------------------------------------
done < <(tail -n +257 /home/shady/Shailesh/MTP/automateBigDL/parameters.csv)
cat output.csv >> pers.csv
|
<reponame>abin1525/rose-edg
// Author: <NAME>
// $Id: AttributeMechanism.h,v 1.4 2006/04/24 00:21:27 dquinlan Exp $
// This class template was removed because nothing in the ROSE library, ROSE tests, or associated ROSE projects uses it. If
// you're trying to store attributes in a Sage IR node ("Sg*" class) then use the SgNode interface (SgNode::addAttribute, etc.)
// or the AstAttributeMechanism interface on which the SgNode attribute interface is built. If you're a class author trying to
// allow user-defined attributes to be stored directly (not via pointers to a base class) and without requiring the user to
// recompile ROSE, then use Sawyer::Attribute instead (the old AttributeMechanism couldn't be used for this anyway). [Robb
// Matzke 2015-11-16]
#ifndef ATTRIBUTEMECHANISM_H
#define ATTRIBUTEMECHANISM_H
// [<NAME> 2015-11-16]: deprecated. Do not document.
template<class Key, class Value>
class AttributeMechanism ROSE_DEPRECATED("Use AstAttributeMechanism instead") {};
#endif
|
import re
def transform_urls(urls):
transformed_urls = []
for url in urls:
match = re.search(r'(?<=<li>\d\d:\d\d Uhr - <a href=")([^"]+)', url)
if match:
extracted_url = match.group(1)
transformed_url = "https://tagesschau.de/api2" + extracted_url.replace(".html", ".json")
transformed_urls.append(transformed_url)
return transformed_urls
# Example usage
input_urls = [
'<li>08:30 Uhr - <a href="https://example.com/article1.html">',
'<li>12:45 Uhr - <a href="https://example.com/article2.html">'
]
output_urls = transform_urls(input_urls)
for url in output_urls:
print(url) |
#!/bin/sh
cp -r ./texmf $HOME/ && echo "Copied texmf to $HOME"
|
#!/bin/bash
set -e
# Overwrite HOME to WORKSPACE
export HOME="$WORKSPACE"
# Install gpuCI tools
curl -s https://raw.githubusercontent.com/rapidsai/gpuci-tools/main/install.sh | bash
source ~/.bashrc
cd ~
# Show env
gpuci_logger "Exposing current environment..."
env
# Login to docker
gpuci_logger "Logging into Docker..."
echo $DH_TOKEN | docker login --username $DH_USER --password-stdin &> /dev/null
# Select dockerfile based on matrix var
DOCKERFILE="${DOCKER_PREFIX}_${LINUX_VER}-${IMAGE_TYPE}.Dockerfile"
gpuci_logger "Using Dockerfile: generated-dockerfiles/${DOCKERFILE}"
# Debug output selected dockerfile
gpuci_logger ">>>> BEGIN Dockerfile <<<<"
cat generated-dockerfiles/${DOCKERFILE}
gpuci_logger ">>>> END Dockerfile <<<<"
# Get build info ready
gpuci_logger "Preparing build config..."
BUILD_TAG="cuda${CUDA_VER}-${IMAGE_TYPE}-${LINUX_VER}"
# Check if PR build and modify BUILD_IMAGE and BUILD_TAG
if [ ! -z "$PR_ID" ] ; then
echo "PR_ID is set to '$PR_ID', updating BUILD_IMAGE..."
BUILD_REPO=`echo $BUILD_IMAGE | tr '/' ' ' | awk '{ print $2 }'`
BUILD_IMAGE="rapidsaitesting/${BUILD_REPO}-pr${PR_ID}"
# Check if FROM_IMAGE to see if it is a root build
if [[ "$FROM_IMAGE" == "gpuci/rapidsai" ]] ; then
echo ">> No need to update FROM_IMAGE, using external image..."
else
echo ">> Need to update FROM_IMAGE to use PR's version for testing..."
FROM_REPO=`echo $FROM_IMAGE | tr '/' ' ' | awk '{ print $2 }'`
FROM_IMAGE="rapidsaitesting/${FROM_REPO}-pr${PR_ID}"
fi
fi
# Setup initial BUILD_ARGS
BUILD_ARGS="--no-cache \
--squash \
--build-arg FROM_IMAGE=${FROM_IMAGE} \
--build-arg CUDA_VER=${CUDA_VER} \
--build-arg IMAGE_TYPE=${IMAGE_TYPE} \
--build-arg LINUX_VER=${LINUX_VER} \
--build-arg UCX_PY_VER=${UCX_PY_VER}"
# Add BUILD_BRANCH arg for 'main' branch only
if [ "${BUILD_BRANCH}" = "main" ]; then
BUILD_ARGS+=" --build-arg BUILD_BRANCH=${BUILD_BRANCH}"
fi
# Check if PYTHON_VER is set
if [ -z "$PYTHON_VER" ] ; then
echo "PYTHON_VER is not set, skipping..."
else
echo "PYTHON_VER is set to '$PYTHON_VER', adding to build args/tag..."
BUILD_ARGS+=" --build-arg PYTHON_VER=${PYTHON_VER}"
BUILD_TAG="${BUILD_TAG}-py${PYTHON_VER}"
fi
# Check if RAPIDS_VER is set
if [ -z "$RAPIDS_VER" ] ; then
echo "RAPIDS_VER is not set, skipping..."
else
echo "RAPIDS_VER is set to '$RAPIDS_VER', adding to build args..."
BUILD_ARGS+=" --build-arg RAPIDS_VER=${RAPIDS_VER}"
BUILD_TAG="${RAPIDS_VER}-${BUILD_TAG}" #pre-prend version number
fi
# Ouput build config
gpuci_logger "Build config info..."
echo "Build image and tag: ${BUILD_IMAGE}:${BUILD_TAG}"
echo "Build args: ${BUILD_ARGS}"
gpuci_logger "Docker build command..."
echo "docker build --pull -t ${BUILD_IMAGE}:${BUILD_TAG} ${BUILD_ARGS} -f generated-dockerfiles/${DOCKERFILE} context/"
# Build image
gpuci_logger "Starting build..."
docker build --pull -t ${BUILD_IMAGE}:${BUILD_TAG} ${BUILD_ARGS} -f generated-dockerfiles/${DOCKERFILE} context/
# List image info
gpuci_logger "Displaying image info..."
docker images ${BUILD_IMAGE}:${BUILD_TAG}
# Upload image
gpuci_logger "Starting upload..."
GPUCI_RETRY_MAX=5
GPUCI_RETRY_SLEEP=120
gpuci_retry docker push ${BUILD_IMAGE}:${BUILD_TAG}
|
<reponame>jrfaller/maracas<filename>test-data/comp-changes/new/src/main/superclassRemoved/SuperclassRemovedAbs.java
package main.superclassRemoved;
public abstract class SuperclassRemovedAbs {
}
|
#!/bin/sh
# $1 fromNET $2 toNET $3 txHash
eventRelay(){
echo "relay $1 => $2 $3" 1>&2
mapReqProof=`node $1.js $3`
truffle --network=$2 exec ExecProof.js $mapReqProof | grep 0x
}
ethAccount=$1
hmyAccount=$2
ethnet="eth"
hmynet="hmy"
ERC20=`truffle --network=$ethnet exec newErc20.js | grep 0x`
echo eth: create ERC20 $ERC20
echo eth: CreateRainbow eth===hmy
mapReqTx=`truffle --network=$ethnet exec CreateRainbow.js $ERC20 | grep 0x`
mapAckTx=`eventRelay $ethnet $hmynet $mapReqTx`
eventRelay $hmynet $ethnet $mapAckTx
echo eth: rainbowTo
rainbowToTx=`truffle --network=$ethnet exec RainbowTo.js $ERC20 $ethAccount $hmyAccount 100`
eventRelay $ethnet $hmynet $rainbowToTx
echo eth: rainbowBack
rainbowBackTx=`truffle --network=$hmynet exec RainbowBack.js $ERC20 $ethAccount $hmyAccount 50`
eventRelay $hmynet $ethnet $rainbowBackTx |
<filename>src/service/Validator.ts
import { Service } from 'typedi';
import { HapifyVM } from '@hapify/vm';
import { Validator } from '../interface/Validator';
import { IModel } from '../interface/Generator';
import { InternalConfig } from '../config/Internal';
import { RichError } from '../class/RichError';
import { ValidatorResultSchema } from '../interface/schema/ValidatorResult';
@Service()
export class ValidatorService {
constructor() {}
/**
* Run validation on a single model for a single channel
*
*/
async run(content: string, model: IModel): Promise<Validator> {
let result: Validator;
// Try or die
try {
result = new HapifyVM({
timeout: InternalConfig.validatorTimeout,
allowAnyOutput: true,
}).run(content, { model });
} catch (error) {
if (error.code === 6003) {
throw new RichError(`Template processing timed out (${InternalConfig.validatorTimeout}ms)`, {
code: 4006,
type: 'CliValidatorTimeoutError',
});
}
if (error.code === 6002) {
// Clone error
const { lineNumber, columnNumber } = error;
throw new RichError(error.message, {
code: 4005,
type: 'CliValidatorEvaluationError',
details: `Error: ${error.message}. Line: ${lineNumber}, Column: ${columnNumber}`,
lineNumber,
columnNumber,
});
}
if (error.code === 6004) {
// Clone error
throw new RichError(error.message, {
code: error.code,
type: error.name,
});
}
throw error;
}
// Check result and return
const validation = ValidatorResultSchema.validate(result);
if (validation.error) {
throw new RichError(`Invalid validator output. Must return { errors: string[], warnings: string[] }`, {
code: 4007,
type: 'CliValidatorOutputError',
});
}
return result;
}
}
|
module Geometry
# method to calculate the area of a triangle
def self.triangle_area(base, height)
return (base * height) / 2.0
end
# method to calculate the area of a circle
def self.circle_area(radius)
return Math::PI * (radius * radius)
end
end |
package org.nem.core.utils;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
/**
* A two-level map of items. Items are automatically created on access. Item associations are order-dependent.
*/
public abstract class AbstractTwoLevelMap<TKey, TValue> {
private final Map<TKey, Map<TKey, TValue>> impl = new ConcurrentHashMap<>();
/**
* Gets the TValue associated with key1 and key2.
*
* @param key1 The first key.
* @param key2 The second key.
* @return The value associated with key and key2.
*/
public TValue getItem(final TKey key1, final TKey key2) {
final Map<TKey, TValue> keyOneValues = this.getItems(key1);
TValue value = keyOneValues.get(key2);
if (null == value) {
value = this.createValue();
keyOneValues.put(key2, value);
}
return value;
}
/**
* Gets the (TKey, TValue) map associated with key.
*
* @param key The first key.
* @return The map associated with key.
*/
public Map<TKey, TValue> getItems(final TKey key) {
Map<TKey, TValue> keyValues = this.impl.get(key);
if (null == keyValues) {
keyValues = new ConcurrentHashMap<>();
this.impl.put(key, keyValues);
}
return keyValues;
}
/**
* Removes a key from the map.
*
* @param key The key to remove.
*/
public void remove(final TKey key) {
this.impl.remove(key);
}
/**
* Gets the key set of this map.
*
* @return The key set.
*/
public Set<TKey> keySet() {
return this.impl.keySet();
}
/**
* Creates a new blank value.
*
* @return A new value.
*/
protected abstract TValue createValue();
}
|
<gh_stars>0
import { AppError } from "../../../../errors/AppError";
import { UsersRepositoryInMemory } from "../../repositories/InMemory/UsersRepositoryInMemory";
import { DeleteUserService } from "./DeleteUserService";
let usersRepositoryInMemory: UsersRepositoryInMemory;
let deleteUserService: DeleteUserService;
describe("Delete User", () => {
beforeEach(() => {
usersRepositoryInMemory = new UsersRepositoryInMemory();
deleteUserService = new DeleteUserService(usersRepositoryInMemory);
});
it("should be able to delete a user", async () => {
const user = await usersRepositoryInMemory.create({
email: "<EMAIL>",
givenName: "givenTest",
familyName: "familyTest",
});
const usersLengthBeforeDelete = (await usersRepositoryInMemory.list())
.length;
await deleteUserService.execute(user.id);
const users = await usersRepositoryInMemory.list();
expect(usersLengthBeforeDelete).toBe(1);
expect(users.length).toBe(0);
});
it("should not be able to delete a user that doesn't exists", () => {
expect(async () => {
await deleteUserService.execute("incorrectId");
}).rejects.toBeInstanceOf(AppError);
});
});
|
from datetime import datetime, time
class TradingSession:
def __init__(self, date, time, market_open_time, market_close_time):
self.date = date
self.time = time
self.market_open_time = market_open_time
self.market_close_time = market_close_time
self.current_date = None
def is_market_open(self, date):
# Implement logic to determine if the market is open on the given date
# Example: Check if the date is a weekday and not a holiday
# Replace the following line with the actual implementation
return date.weekday() < 5 # Assuming market is open on weekdays
def is_active_session(self):
return self.is_market_open(self.date)
def is_in_session(self):
if not self.is_active_session():
return False
if self.time >= self.market_open_time and self.time <= self.market_close_time:
return True
return False
def is_session_start(self):
if self.current_date is None and self.time >= self.market_open_time:
return True |
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.stopSpinnerWithFailure = exports.stopSpinnerWithSuccess = exports.startSpinner = void 0;
const ora_1 = __importDefault(require("ora"));
// The spinner instance.
let spinner = null;
/**
* startSpinner()
*
* starts the spinner.
* @param message the message to display.
*/
const startSpinner = (message) => {
spinner = ora_1.default({}).start(message);
};
exports.startSpinner = startSpinner;
/**
* stopSpinnerWithSuccess()
*
* stops the spinner with a success status.
* @param message the message to display.
*/
const stopSpinnerWithSuccess = (message) => {
if ((spinner !== null)) {
spinner = spinner.succeed(message);
spinner = null;
}
};
exports.stopSpinnerWithSuccess = stopSpinnerWithSuccess;
/**
* stopSpinnerWithFailure()
*
* stops the spinner with a failure.
* @param message the message to display.
*/
const stopSpinnerWithFailure = (message) => {
if ((spinner !== null)) {
spinner = spinner.fail(message);
spinner = null;
}
};
exports.stopSpinnerWithFailure = stopSpinnerWithFailure;
|
import random
def generate_password():
chars = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ!@£$%^&*(){}[]-_=+'
password = ''
for i in range(16):
password += random.choice(chars)
return password
print(generate_password()) |
#!/bin/bash
set -eu pipefail
start_time=`date +%s`
date1=$(date +"%s")
TF_IN_AUTOMATION=1 terraform init
TF_IN_AUTOMATION=1 terraform apply -auto-approve
ansible-galaxy install --force git+https://github.com/RocketChat/Rocket.Chat.Ansible.git,develop
ansible-playbook -i inventory.yml playbook.yml
end_time=`date +%s`
date2=$(date +"%s")
echo "###############"
echo Execution time was `expr $end_time - $start_time` s.
DIFF=$(($date2-$date1))
echo "Duration: $(($DIFF / 3600 )) hours $((($DIFF % 3600) / 60)) minutes $(($DIFF % 60)) seconds"
echo "###############"
|
def is_triangular_number_odd(n):
# take the square root of the given number which
# will give the index of the triangular number
i = math.sqrt(2*n + 0.25) - 0.5
if i%1 == 0:
# if the number is a whole number then
# check if it is divisible by 2
if (i%2 == 0):
return False
else:
return True |
# -*- coding: utf-8 -*-
#
# This file is part of django-content-licenses.
#
# django-content-licenses is a Django app that adds support for adding licensing information to content.
#
# Development Web Site:
# - http://www.codetrax.org/projects/django-content-licenses
# Public Source Code Repository:
# - https://source.codetrax.org/hgroot/django-content-licenses
#
# Copyright 2010 <NAME> <gnot [at] g-loaded.eu>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
In order to use these template tags you need to use the following in your templates
{% load content_licenses_tags %}
"""
from django import template
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
register = template.Library()
class SetLicenseLinkNode(template.Node):
def __init__(self, context_var):
self.context_var = context_var
def render(self, context):
LICENSE_LINK_TEMPLATE = "content_licenses/creative-commons-license-hyperlink.html"
t = template.loader.get_template(LICENSE_LINK_TEMPLATE)
context[self.context_var] = t.render(template.Context(context))
return ''
def do_set_license_link(parser, token):
"""
Stores the license link to a context variable.
Usage::
{% license_link as [varname] %}
"""
bits = token.contents.split()
if len(bits) != 3:
raise template.TemplateSyntaxError('%s tag requires two arguments' % bits[0])
if bits[1] != 'as':
raise template.TemplateSyntaxError("first argument to %s tag must be 'as'" % bits[0])
return SetLicenseLinkNode(bits[2])
register.tag('set_license_link', do_set_license_link)
|
alias i='iex'
alias ips='iex -S mix phoenix.server'
alias ism='iex -S mix'
alias m='mix'
alias mab='mix archive.build'
alias mai='mix archive.install'
alias mat='mix app.tree'
alias mc='mix compile'
alias mcv='mix compile --verbose'
alias mcr='mix credo'
alias mcrs='mix credo --strict'
alias mcx='mix compile.xref'
alias mdc='mix deps.compile'
alias mdg='mix deps.get'
alias mdgc='mix do deps.get, deps.compile'
alias mdu='mix deps.update'
alias mdt='mix deps.tree'
alias mdua='mix deps.update --all'
alias mdun='mix deps.unlock'
alias mduu='mix deps.unlock --unused'
alias meb='mix escript.build'
alias mec='mix ecto.create'
alias mecm='mix do ecto.create, ecto.migrate'
alias med='mix ecto.drop'
alias mem='mix ecto.migrate'
alias megm='mix ecto.gen.migration'
alias merb='mix ecto.rollback'
alias mers='mix ecto.reset'
alias mes='mix ecto.setup'
alias mho='mix hex.outdated'
alias mlh='mix local.hex'
alias mn='mix new'
alias mns='mix new --sup'
alias mpgc='mix phoenix.gen.channel'
alias mpgh='mix phoenix.gen.html'
alias mpgj='mix phoenix.gen.json'
alias mpgm='mix phoenix.gen.model'
alias mpgs='mix phoenix.gen.secret'
alias mpn='mix phoenix.new'
alias mpr='mix phoenix.routes'
alias mps='mix phoenix.server'
alias mr='mix run'
alias mrnh='mix run --no-halt'
alias mrl='mix release'
alias mt='mix test'
alias mts='mix test --stale'
alias mtw='mix test.watch'
alias mx='mix xref'
# Heroku
alias hrmec='heroku run mix ecto.create'
alias hrmem='heroku run mix ecto.migrate'
# Kiex - Elixir Version Manager: https://github.com/taylor/kiex
alias kd='kiex default'
alias ki='kiex install'
alias kl='kiex list'
alias klb='kiex list branches'
alias klk='kiex list known'
alias klr='kiex list releases'
alias ks='kiex shell'
alias ksu='kiex selfupdate'
alias ku='kiex use'
mncd() {
arg="$*"
mix new "$arg";cd "$arg";
}
|
package com.arslorem.hamzah.entities;
import com.arslorem.hamzah.entities.CsProjectno;
import com.arslorem.hamzah.entities.CsTor;
import javax.annotation.Generated;
import javax.persistence.metamodel.ListAttribute;
import javax.persistence.metamodel.SingularAttribute;
import javax.persistence.metamodel.StaticMetamodel;
@Generated(value="EclipseLink-2.5.2.v20140319-rNA", date="2019-01-11T22:12:00")
@StaticMetamodel(CsProject.class)
public class CsProject_ {
public static volatile ListAttribute<CsProject, CsProjectno> csProjectnoList;
public static volatile SingularAttribute<CsProject, String> proCountry;
public static volatile SingularAttribute<CsProject, String> profullName;
public static volatile SingularAttribute<CsProject, String> proCity;
public static volatile SingularAttribute<CsProject, Long> proID;
public static volatile SingularAttribute<CsProject, String> proshortName;
public static volatile ListAttribute<CsProject, CsTor> csTorList;
public static volatile SingularAttribute<CsProject, String> proAddress1;
public static volatile SingularAttribute<CsProject, String> proAddress2;
public static volatile SingularAttribute<CsProject, String> proX;
} |
package altinn.platform.pdf;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
import static org.junit.jupiter.api.Assertions.assertTrue;
/**
* Ensure application starts
*/
@SpringBootTest
class AppTest {
@Test
void testApp() {
assertTrue(true);
}
}
|
import os
from django.conf import settings
from django.contrib.staticfiles.finders import BaseFinder
from django.core.files.storage import FileSystemStorage
logo_paths = {
'header_logo': settings.HEADER_LOGO,
'favicon': settings.FAVICON,
}
logofinder_results = {
os.path.join(prefix, os.path.basename(path)): path
for prefix, path in logo_paths.items()
if path
}
class LogoFinder(BaseFinder):
def find(self, path, all=False):
result = logofinder_results.get(path)
if not result:
return []
if all:
return [result]
return result
def list(self, ignore_patterns):
result = []
for prefix, path in logo_paths.items():
if not path:
continue
basedir, filename = os.path.split(path)
storage = FileSystemStorage(location=basedir)
storage.prefix = prefix
result.append((filename, storage))
return result
|
<gh_stars>0
#include "match_manager.h"
#include "core/ttspeech.h"
#include "ksmm/ksmm_trader.h"
#include "ksmm/common_define.hpp"
IWKATS_NAMESPACE_BEGIN
struct MatchManager::Private {
QReadWriteLock rwlock;
QHash<QString, MatchInfoPtr> all_matchs;
QHash<QString, QHash<QString, MatchInfoPtr>> all_stocks_matchs;
};
//////////////////////////////////////////////////////////////////////////
MatchManager::MatchManager(QObject *parent)
: QObject(parent)
, d_(new Private) {
connect(Trader::Spi(), &KSMMTradingSpi::sigMatchArrive, this, &MatchManager::slotMatchArrive);
}
MatchManager* MatchManager::GetPtr() {
static MatchManager instance;
return &instance;
}
bool MatchManager::GetAllMatchByStock(const QString& stock, QList<MatchInfoPtr>& all_matchs)
{
RLockGuard rlock(d_->rwlock);
if(d_->all_stocks_matchs.contains(stock)) {
all_matchs = d_->all_stocks_matchs.value(stock).values();
return true;
}
return false;
}
bool MatchManager::GetMatchByOrderId(const QString& order_id, MatchInfoPtr& match_ptr)
{
RLockGuard rlock(d_->rwlock);
if(d_->all_matchs.contains(order_id)) {
match_ptr = d_->all_matchs.value(order_id);
return true;
}
return false;
}
void MatchManager::GetAllMatchs(QList<MatchInfoPtr>& all_matchs)
{
all_matchs = d_->all_matchs.values();
}
// 成交回报
void MatchManager::slotMatchArrive(std::shared_ptr<MATCH_INFO> pMatchInfo)
{
WLockGuard wlock(d_->rwlock);
MatchInfoPtr match_ptr = std::make_shared<MatchInfo>(pMatchInfo);
d_->all_matchs[pMatchInfo->szOrderID] = match_ptr;
d_->all_stocks_matchs[pMatchInfo->szStkCode].insert(pMatchInfo->szOrderID, match_ptr);
emit sigMatchArrive(match_ptr);
}
IWKATS_NAMESPACE_END
|
<gh_stars>0
import { ComponentFixture, TestBed, waitForAsync } from '@angular/core/testing';
import { FormItemRatingComponent } from './form-item-rating.component';
import { MatInputModule } from '@angular/material/input';
import { MatIconModule } from '@angular/material/icon';
import { SharedModule } from 'app/shared';
import { NoopAnimationsModule } from '@angular/platform-browser/animations';
import { FormsModule } from '@angular/forms';
import { testCreditClaimForm } from 'app/modules/qa/testing';
import * as _ from 'lodash';
describe('FormItemRatingComponent', () => {
let component: FormItemRatingComponent;
let fixture: ComponentFixture<FormItemRatingComponent>;
let item;
beforeEach(waitForAsync(() => {
TestBed.configureTestingModule({
imports: [
MatInputModule,
SharedModule,
NoopAnimationsModule,
FormsModule,
MatIconModule
],
declarations: [ FormItemRatingComponent ],
providers: [
],
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(FormItemRatingComponent);
component = fixture.componentInstance;
item=_.cloneDeep(testCreditClaimForm.sections[0].items[1]);
component.item=item;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
it('should field label', () => {
const compiled = fixture.debugElement.nativeElement;
const title='Test Field Title';
item.title=title;
fixture.detectChanges();
expect(compiled.querySelector('.rating-set .rating-set-title').textContent).toEqual(title);
});
it('should render 5 rating star buttons', () => {
const compiled = fixture.debugElement.nativeElement;
expect([...compiled.querySelector('mat-star-rating').children].length).toEqual(5);
});
it('should render active star button in accordance field value', () => {
const compiled = fixture.debugElement.nativeElement;
const starButtons=[...compiled.querySelector('mat-star-rating').children];
expect(starButtons.filter(btn=>btn.querySelector('mat-icon').textContent.trim()==='star').length).toEqual(0);
item.value=3;
fixture.detectChanges();
expect(starButtons.filter(btn=>btn.querySelector('mat-icon').textContent.trim()==='star').length).toEqual(item.value);
expect(starButtons.filter(btn=>btn.querySelector('mat-icon').textContent.trim()==='star_border').length).toEqual(5-item.value);
});
it('should render 2 error messages', waitForAsync(() => {
const compiled = fixture.debugElement.nativeElement;
const starButtons=[...compiled.querySelector('mat-star-rating').children];
const errors=[
{message: 'First Error Message'},
{message: 'Second Error Message'},
];
expect(compiled.querySelector('mat-error')).toEqual(null);
item.value=2;
item.errors=errors;
fixture.detectChanges();
errors.forEach(err=>{
expect(compiled.querySelector('mat-error').children[errors.indexOf(err)].textContent).toEqual(err.message);
});
expect(starButtons.filter(btn=>btn.querySelector('mat-icon').textContent.trim()==='star').length).toEqual(item.value);
expect(starButtons.filter(btn=>btn.querySelector('mat-icon').textContent.trim()==='star_border').length).toEqual(5-item.value);
}));
it('should change the field value by click on rating button', () => {
const compiled = fixture.debugElement.nativeElement;
const starButtons=[...compiled.querySelector('mat-star-rating').children];
expect(starButtons.filter(btn=>btn.querySelector('mat-icon').textContent.trim()==='star').length).toEqual(0);
starButtons[3].click();
fixture.detectChanges();
expect(item.value).toEqual(4);
});
});
|
<filename>database/orderDB.js
const db = require('./mainDB')
const Order = {
new: () => db.one( `INSERT INTO order_data ( price ) VALUES ( 0 ) RETURNING id` ),
getAll: () => db.any( `SELECT * FROM order_data` ),
getById: id => db.one( `SELECT * FROM order_data WHERE id = ${id}` ),
getContents: order_id => db.any( `BEGIN TRANSACTION;
SELECT pizza_id AS customP FROM ordered_custom_pizzas WHERE order_id=${order_id};
SELECT pizza_id AS specialtyP FROM ordered_specialty_pizzas WHERE order_id=${order_id};
SELECT beverage_id AS beverage FROM ordered_beverages WHERE order_id=${order_id};
COMMIT;` ),
getCustomPizzas: order_id => db.any( `SELECT pizza_id AS custom FROM ordered_custom_pizzas WHERE order_id=${order_id}` ),
getSpecialtyPizzas: order_id => db.any( `SELECT pizza_id AS specialty FROM ordered_specialty_pizzas WHERE order_id=${order_id}` ),
getBeverages: order_id => db.any( `SELECT beverage_id as beverage FROM ordered_beverages WHERE order_id=${order_id}` ),
addCustomPizza: ( order_id, pizza_id ) => db.none( `INSERT INTO ordered_custom_pizzas ( order_id, pizza_id ) VALUES ( ${order_id}, ${pizza_id} )` ),
addSpecialtyPizza: ( order_id, pizza_id ) => db.none( `INSERT INTO ordered_specialty_pizzas ( order_id, pizza_id ) VALUES ( ${order_id}, ${pizza_id} )` ),
addBeverage: ( order_id, beverage_id ) => db.none( `INSERT INTO ordered_beverages ( order_id, beverage_id ) VALUES ( ${order_id}, ${beverage_id} )` ),
removeCustomPizza: ( order_id, pizza_id ) => db.none( `DELETE FROM ordered_custom_pizzas WHERE order_id=${order_id} AND pizza_id=${pizza_id}` ),
removeSpecialtyPizza: ( order_id, pizza_id ) => db.none( `DELETE FROM ordered_specialty_pizzas WHERE order_id=${order_id} AND pizza_id=${pizza_id}` ),
removeBeverage: ( order_id, beverage_id ) => db.none( `DELETE FROM ordered_beverages WHERE order_id=${order_id} AND beverage_id=${beverage_id}` ),
calcPrice: order_id => db.one( `UPDATE order_data SET price =
( SELECT COALESCE(SUM(price), CAST( 0 AS MONEY)) FROM ordered_custom_pizzas JOIN custom_pizza ON ordered_custom_pizzas.pizza_id = custom_pizza.id WHERE ordered_custom_pizzas.order_id = ${order_id} )
+
( SELECT COALESCE(SUM(price), CAST( 0 AS MONEY)) FROM ordered_specialty_pizzas JOIN specialty_pizza ON ordered_specialty_pizzas.pizza_id = specialty_pizza.id WHERE ordered_specialty_pizzas.order_id = ${order_id} )
+
( SELECT COALESCE(SUM(price), CAST( 0 AS MONEY)) FROM ordered_beverages JOIN beverage ON ordered_beverages.beverage_id = beverage.id WHERE ordered_beverages.order_id = ${order_id} )
WHERE id = ${order_id} RETURNING price` ),
delete: order_id => db.none( `BEGIN TRANSACTION;
DELETE FROM pizza_toppings WHERE pizza_id = ( SELECT pizza_id FROM ordered_custom_pizzas WHERE order_id = ${order_id} );
DELETE FROM pizza_crusts WHERE pizza_id = ( SELECT pizza_id FROM ordered_custom_pizzas WHERE order_id = ${order_id} );
DELETE FROM custom_pizza WHERE id = ( SELECT pizza_id FROM ordered_custom_pizzas WHERE order_id = ${order_id} );
DELETE FROM ordered_custom_pizzas WHERE order_id = ${order_id};
DELETE FROM ordered_specialty_pizzas WHERE order_id = ${order_id};
DELETE FROM ordered_beverages WHERE order_id = ${order_id};
COMMIT;
` )
}
module.exports = { Order }
|
package com.finbourne.lusid.utilities;
import com.finbourne.lusid.ApiClient;
import com.finbourne.lusid.utilities.auth.HttpLusidTokenProvider;
import com.finbourne.lusid.utilities.auth.RefreshingTokenProvider;
import com.finbourne.lusid.utilities.auth.LusidToken;
import com.finbourne.lusid.utilities.auth.LusidTokenException;
import okhttp3.OkHttpClient;
/**
* Utility class to build an ApiClient from a set of configuration
*/
public class ApiClientBuilder {
/**
* Builds an ApiClient implementation configured against a secrets file. Typically used
* for communicating with LUSID via the APIs (e.g. {@link com.finbourne.lusid.api.TransactionPortfoliosApi}, {@link com.finbourne.lusid.api.QuotesApi}.
*
* ApiClient implementation enables use of REFRESH tokens (see https://support.finbourne.com/using-a-refresh-token)
* and automatically handles token refreshing on expiry.
*
* @param apiConfiguration configuration to connect to LUSID API
* @return
*
* @throws LusidTokenException on failing to authenticate and retrieve an initial {@link LusidToken}
*/
public ApiClient build(ApiConfiguration apiConfiguration) throws LusidTokenException {
// http client to use for api and auth calls
OkHttpClient httpClient = createHttpClient(apiConfiguration);
// token provider to keep client authenticated with automated token refreshing
RefreshingTokenProvider refreshingTokenProvider = new RefreshingTokenProvider(new HttpLusidTokenProvider(apiConfiguration, httpClient));
LusidToken lusidToken = refreshingTokenProvider.get();
// setup api client that managed submissions with latest token
ApiClient defaultApiClient = createDefaultApiClient(apiConfiguration, httpClient, lusidToken);
return new RefreshingTokenApiClient(defaultApiClient, refreshingTokenProvider);
}
ApiClient createDefaultApiClient(ApiConfiguration apiConfiguration, OkHttpClient httpClient, LusidToken lusidToken) throws LusidTokenException {
ApiClient apiClient = createApiClient();
if (apiConfiguration.getProxyAddress() != null) {
apiClient.setHttpClient(httpClient);
}
if (lusidToken.getAccessToken() == null) {
throw new LusidTokenException("Cannot construct an API client with a null authorisation header. Ensure " +
"lusid token generated is valid");
} else {
apiClient.addDefaultHeader("Authorization", "Bearer " + lusidToken.getAccessToken());
}
if (apiConfiguration.getApplicationName() != null) {
apiClient.addDefaultHeader("X-LUSID-Application", apiConfiguration.getApplicationName());
}
apiClient.setBasePath(apiConfiguration.getApiUrl());
return apiClient;
}
private OkHttpClient createHttpClient(ApiConfiguration apiConfiguration){
return new HttpClientFactory().build(apiConfiguration);
}
// allows us to mock out api client for testing purposes
ApiClient createApiClient(){
return new ApiClient();
}
}
|
from dagster import graph, op, schedule
@op(config_schema={"date": str})
def do_something(_):
...
@graph
def do_it_all():
do_something()
@schedule(cron_schedule="0 0 * * *", job=do_it_all.to_job(), execution_timezone="US/Central")
def do_it_all_schedule(context):
date = context.scheduled_execution_time.strftime("%Y-%m-%d")
return {"solids": {"do_something": {"config": {"date": date}}}}
|
"""
Functions to fix fonts so they conform to the Google Fonts
specification:
https://github.com/googlefonts/gf-docs/tree/main/Spec
"""
from fontTools.misc.fixedTools import otRound
from fontTools.ttLib import TTFont, newTable, getTableModule
from fontTools.ttLib.tables import ttProgram
from fontTools.ttLib.tables._c_m_a_p import CmapSubtable
from fontTools.ttLib.tables._f_v_a_r import NamedInstance
from gftools.util.google_fonts import _KNOWN_WEIGHTS
from gftools.utils import (
download_family_from_Google_Fonts,
Google_Fonts_has_family,
font_stylename,
font_familyname,
family_bounding_box,
get_unencoded_glyphs,
normalize_unicode_marks,
partition_cmap,
typo_metrics_enabled,
validate_family,
unique_name,
)
from gftools.util.styles import (get_stylename, is_regular, is_bold, is_italic)
from os.path import basename
from copy import deepcopy
import logging
log = logging.getLogger(__name__)
__all__ = [
"remove_tables",
"add_dummy_dsig",
"fix_unhinted_font",
"fix_hinted_font",
"fix_fs_type",
"fix_weight_class",
"fix_fs_selection",
"fix_mac_style",
"fix_fvar_instances",
"update_nametable",
"fix_nametable",
"inherit_vertical_metrics",
"fix_vertical_metrics",
"fix_ascii_fontmetadata",
"drop_nonpid0_cmap",
"drop_mac_cmap",
"fix_pua",
"fix_isFixedPitch",
"drop_mac_names",
"drop_superfluous_mac_names",
"fix_font",
"fix_family",
]
# The _KNOWN_WEIGHT_VALUES constant is used internally by the GF Engineering
# team so we cannot update ourselves. TODO (<NAME>) unify this one day
WEIGHT_NAMES = _KNOWN_WEIGHTS
del WEIGHT_NAMES[""]
WEIGHT_NAMES["Hairline"] = 1
WEIGHT_NAMES["ExtraBlack"] = 1000
WEIGHT_VALUES = {v: k for k, v in WEIGHT_NAMES.items()}
UNWANTED_TABLES = frozenset(
[
"FFTM",
"TTFA",
"TSI0",
"TSI1",
"TSI2",
"TSI3",
"TSI5",
"prop",
"MVAR",
]
)
def remove_tables(ttFont, tables=None):
"""Remove unwanted tables from a font. The unwanted tables must belong
to the UNWANTED_TABLES set.
Args:
ttFont: a TTFont instance
tables: an iterable containing tables remove
"""
tables_to_remove = UNWANTED_TABLES if not tables else frozenset(tables)
font_tables = frozenset(ttFont.keys())
tables_not_in_font = tables_to_remove - font_tables
if tables_not_in_font:
log.warning(
f"Cannot remove tables '{list(tables_not_in_font)}' since they are "
f"not in the font."
)
required_tables = tables_to_remove - UNWANTED_TABLES
if required_tables:
log.warning(
f"Cannot remove tables '{list(required_tables)}' since they are required"
)
tables_to_remove = UNWANTED_TABLES & font_tables & tables_to_remove
if not tables_to_remove:
return
log.info(f"Removing tables '{list(tables_to_remove)}' from font")
for tbl in tables_to_remove:
del ttFont[tbl]
def add_dummy_dsig(ttFont):
"""Add a dummy dsig table to a font. Older versions of MS Word
require this table.
Args:
ttFont: a TTFont instance
"""
newDSIG = newTable("DSIG")
newDSIG.ulVersion = 1
newDSIG.usFlag = 0
newDSIG.usNumSigs = 0
newDSIG.signatureRecords = []
ttFont.tables["DSIG"] = newDSIG
def fix_unhinted_font(ttFont):
"""Improve the appearance of an unhinted font on Win platforms by:
- Add a new GASP table with a newtable that has a single
range which is set to smooth.
Args:
ttFont: a TTFont instance
"""
gasp = newTable("gasp")
# Set GASP so all sizes are smooth
gasp.gaspRange = {65535: 0x000a}
ttFont["gasp"] = gasp
ttFont["prep"] = prep
def fix_hinted_font(ttFont):
"""Improve the appearance of a hinted font on Win platforms by enabling
the head table's flag 3.
Args:
ttFont: a TTFont instance
"""
if not 'fpgm' in ttFont:
return False, ["Skipping. Font is not hinted."]
old = ttFont["head"].flags
ttFont["head"].flags |= 1 << 3
return ttFont["head"].flags != old
def fix_fs_type(ttFont):
"""Set the OS/2 table's fsType flag to 0 (Installable embedding).
Args:
ttFont: a TTFont instance
"""
old = ttFont["OS/2"].fsType
ttFont["OS/2"].fsType = 0
return old != 0
def fix_weight_class(ttFont):
"""Set the OS/2 table's usWeightClass so it conforms to GF's supported
styles table:
https://github.com/googlefonts/gf-docs/tree/main/Spec#supported-styles
Args:
ttFont: a TTFont instance
"""
old_weight_class = ttFont["OS/2"].usWeightClass
stylename = font_stylename(ttFont)
tokens = stylename.split()
# Order WEIGHT_NAMES so longest names are first
for style in sorted(WEIGHT_NAMES, key=lambda k: len(k), reverse=True):
if style in tokens:
ttFont["OS/2"].usWeightClass = WEIGHT_NAMES[style]
return ttFont["OS/2"].usWeightClass != old_weight_class
if "Italic" in tokens:
ttFont["OS/2"].usWeightClass = 400
return ttFont["OS/2"].usWeightClass != old_weight_class
raise ValueError(
f"Cannot determine usWeightClass because font style, '{stylename}' "
f"doesn't have a weight token which is in our known "
f"weights, '{WEIGHT_NAMES.keys()}'"
)
def fix_fs_selection(ttFont):
"""Fix the OS/2 table's fsSelection so it conforms to GF's supported
styles table:
https://github.com/googlefonts/gf-docs/tree/main/Spec#supported-styles
Args:
ttFont: a TTFont instance
"""
stylename = font_stylename(ttFont)
tokens = set(stylename.split())
old_selection = fs_selection = ttFont["OS/2"].fsSelection
# turn off all bits except for bit 7 (USE_TYPO_METRICS)
fs_selection &= 1 << 7
if "Italic" in tokens:
fs_selection |= 1 << 0
if "Bold" in tokens:
fs_selection |= 1 << 5
# enable Regular bit for all other styles
if not tokens & set(["Bold", "Italic"]):
fs_selection |= 1 << 6
ttFont["OS/2"].fsSelection = fs_selection
return old_selection != fs_selection
def fix_mac_style(ttFont):
"""Fix the head table's macStyle so it conforms to GF's supported
styles table:
https://github.com/googlefonts/gf-docs/tree/main/Spec#supported-styles
Args:
ttFont: a TTFont instance
"""
stylename = font_stylename(ttFont)
tokens = set(stylename.split())
mac_style = 0
if "Italic" in tokens:
mac_style |= 1 << 1
if "Bold" in tokens:
mac_style |= 1 << 0
ttFont["head"].macStyle = mac_style
def fix_fvar_instances(ttFont):
"""Replace a variable font's fvar instances with a set of new instances
that conform to the Google Fonts instance spec:
https://github.com/googlefonts/gf-docs/tree/main/Spec#fvar-instances
Args:
ttFont: a TTFont instance
"""
if "fvar" not in ttFont:
raise ValueError("ttFont is not a variable font")
fvar = ttFont["fvar"]
default_axis_vals = {a.axisTag: a.defaultValue for a in fvar.axes}
stylename = font_stylename(ttFont)
is_italic = "Italic" in stylename
is_roman_and_italic = any(a for a in ("slnt", "ital") if a in default_axis_vals)
wght_axis = next((a for a in fvar.axes if a.axisTag == "wght"), None)
wght_min = int(wght_axis.minValue)
wght_max = int(wght_axis.maxValue)
nametable = ttFont["name"]
def gen_instances(is_italic):
results = []
for wght_val in range(wght_min, wght_max + 100, 100):
name = (
WEIGHT_VALUES[wght_val]
if not is_italic
else f"{WEIGHT_VALUES[wght_val]} Italic".strip()
)
name = name.replace("Regular Italic", "Italic")
coordinates = deepcopy(default_axis_vals)
coordinates["wght"] = wght_val
inst = NamedInstance()
inst.subfamilyNameID = nametable.addName(name)
inst.coordinates = coordinates
results.append(inst)
return results
instances = []
if is_roman_and_italic:
for bool_ in (False, True):
instances += gen_instances(is_italic=bool_)
elif is_italic:
instances += gen_instances(is_italic=True)
else:
instances += gen_instances(is_italic=False)
fvar.instances = instances
def update_nametable(ttFont, family_name=None, style_name=None):
"""Update a static font's name table. The updated name table will conform
to the Google Fonts support styles table:
https://github.com/googlefonts/gf-docs/tree/main/Spec#supported-styles
If a style_name includes tokens other than wght and ital, these tokens
will be appended to the family name e.g
Input:
family_name="MyFont"
style_name="SemiCondensed SemiBold"
Output:
familyName (nameID 1) = "MyFont SemiCondensed SemiBold
subFamilyName (nameID 2) = "Regular"
typo familyName (nameID 16) = "MyFont SemiCondensed"
typo subFamilyName (nameID 17) = "SemiBold"
Google Fonts has used this model for several years e.g
https://fonts.google.com/?query=cabin
Args:
ttFont:
family_name: New family name
style_name: New style name
"""
if "fvar" in ttFont:
raise ValueError("Cannot update the nametable for a variable font")
nametable = ttFont["name"]
# Remove nametable records which are not Win US English
# TODO this is too greedy. We should preserve multilingual
# names in the future. Please note, this has always been an issue.
platforms = set()
for rec in nametable.names:
platforms.add((rec.platformID, rec.platEncID, rec.langID))
platforms_to_remove = platforms ^ set([(3, 1, 0x409)])
if platforms_to_remove:
log.warning(
f"Removing records which are not Win US English, {list(platforms_to_remove)}"
)
for platformID, platEncID, langID in platforms_to_remove:
nametable.removeNames(
platformID=platformID, platEncID=platEncID, langID=langID
)
# Remove any name records which contain linebreaks
contains_linebreaks = []
for r in nametable.names:
for char in ("\n", "\r"):
if char in r.toUnicode():
contains_linebreaks.append(r.nameID)
for nameID in contains_linebreaks:
nametable.removeNames(nameID)
if not family_name:
family_name = font_familyname(ttFont)
if not style_name:
style_name = font_stylename(ttFont)
is_ribbi = style_name in ("Regular", "Bold", "Italic", "Bold Italic")
nameids = {}
if is_ribbi:
nameids[1] = family_name
nameids[2] = style_name
else:
tokens = style_name.split()
family_name_suffix = " ".join([t for t in tokens if t not in ["Italic"]])
nameids[1] = f"{family_name} {family_name_suffix}".strip()
nameids[2] = "Regular" if "Italic" not in tokens else "Italic"
typo_family_suffix = " ".join(
t for t in tokens if t not in list(WEIGHT_NAMES) + ["Italic"]
)
nameids[16] = f"{family_name} {typo_family_suffix}".strip()
typo_style = " ".join(t for t in tokens if t in list(WEIGHT_NAMES) + ["Italic"])
nameids[17] = typo_style
family_name = nameids.get(16) or nameids.get(1)
style_name = nameids.get(17) or nameids.get(2)
# create NameIDs 3, 4, 6
nameids[4] = f"{family_name} {style_name}"
nameids[6] = f"{family_name.replace(' ', '')}-{style_name.replace(' ', '')}"
nameids[3] = unique_name(ttFont, nameids)
# Pass through all records and replace occurences of the old family name
# with the new family name
current_family_name = font_familyname(ttFont)
for record in nametable.names:
string = record.toUnicode()
if current_family_name in string:
nametable.setName(
string.replace(current_family_name, family_name),
record.nameID,
record.platformID,
record.platEncID,
record.langID,
)
# Remove previous typographic names
for nameID in (16, 17):
nametable.removeNames(nameID=nameID)
# Update nametable with new names
for nameID, string in nameids.items():
nametable.setName(string, nameID, 3, 1, 0x409)
def fix_nametable(ttFont):
"""Fix a static font's name table so it conforms to the Google Fonts
supported styles table:
https://github.com/googlefonts/gf-docs/tree/main/Spec#supported-styles
Args:
ttFont: a TTFont instance
"""
if "fvar" in ttFont:
from fontTools.varLib.instancer.names import updateNameTable
dflt_axes = {a.axisTag: a.defaultValue for a in ttFont['fvar'].axes}
updateNameTable(ttFont, dflt_axes)
return
family_name = font_familyname(ttFont)
style_name = font_stylename(ttFont)
update_nametable(ttFont, family_name, style_name)
def inherit_vertical_metrics(ttFonts, family_name=None):
"""Inherit the vertical metrics from the same family which is
hosted on Google Fonts.
Args:
ttFonts: a list of TTFont instances which belong to a family
family_name: Optional string which allows users to specify a
different family to inherit from e.g "Maven Pro".
"""
family_name = font_familyname(ttFonts[0]) if not family_name else family_name
gf_fonts = list(map(TTFont, download_family_from_Google_Fonts(family_name)))
gf_fonts = {font_stylename(f): f for f in gf_fonts}
# TODO (<NAME>) use Regular font instead. If VF use font which has Regular
# instance
gf_fallback = list(gf_fonts.values())[0]
fonts = {font_stylename(f): f for f in ttFonts}
for style, font in fonts.items():
if style in gf_fonts:
src_font = gf_fonts[style]
else:
src_font = gf_fallback
copy_vertical_metrics(src_font, font)
if typo_metrics_enabled(src_font):
font["OS/2"].fsSelection |= 1 << 7
def fix_vertical_metrics(ttFonts):
"""Fix a family's vertical metrics based on:
https://github.com/googlefonts/gf-docs/tree/main/VerticalMetrics
Args:
ttFonts: a list of TTFont instances which belong to a family
"""
src_font = next((f for f in ttFonts if font_stylename(f) == "Regular"), ttFonts[0])
# TODO (<NAME>) CJK Fonts?
# If OS/2.fsSelection bit 7 isn't enabled, enable it and set the typo metrics
# to the previous win metrics.
if not typo_metrics_enabled(src_font):
src_font["OS/2"].fsSelection |= 1 << 7 # enable USE_TYPO_METRICS
src_font["OS/2"].sTypoAscender = src_font["OS/2"].usWinAscent
src_font["OS/2"].sTypoDescender = -src_font["OS/2"].usWinDescent
src_font["OS/2"].sTypoLineGap = 0
# Set the hhea metrics so they are the same as the typo
src_font["hhea"].ascent = src_font["OS/2"].sTypoAscender
src_font["hhea"].descent = src_font["OS/2"].sTypoDescender
src_font["hhea"].lineGap = src_font["OS/2"].sTypoLineGap
# Set the win Ascent and win Descent to match the family's bounding box
win_desc, win_asc = family_bounding_box(ttFonts)
src_font["OS/2"].usWinAscent = win_asc
src_font["OS/2"].usWinDescent = abs(win_desc)
# Set all fonts vertical metrics so they match the src_font
for ttFont in ttFonts:
ttFont["OS/2"].fsSelection |= 1 << 7
copy_vertical_metrics(src_font, ttFont)
def copy_vertical_metrics(src_font, dst_font):
for table, key in [
("OS/2", "usWinAscent"),
("OS/2", "usWinDescent"),
("OS/2", "sTypoAscender"),
("OS/2", "sTypoDescender"),
("OS/2", "sTypoLineGap"),
("hhea", "ascent"),
("hhea", "descent"),
("hhea", "lineGap"),
]:
val = getattr(src_font[table], key)
setattr(dst_font[table], key, val)
def fix_italic_angle(ttFont):
style_name = font_stylename(ttFont)
if "Italic" not in style_name and ttFont["post"].italicAngle != 0:
ttFont["post"].italicAngle = 0
# TODO (<NAME>) implement for italic fonts
def fix_ascii_fontmetadata(font):
"""Fixes TTF 'name' table strings to be ascii only"""
for name in font['name'].names:
title = name.string.decode(name.getEncoding())
title = normalize_unicode_marks(title)
name.string = title.encode(name.getEncoding())
def convert_cmap_subtables_to_v4(font):
"""Converts all cmap subtables to format 4.
Returns a list of tuples (format, platformID, platEncID) of the tables
which needed conversion."""
cmap = font['cmap']
outtables = []
converted = []
for table in cmap.tables:
if table.format != 4:
converted.append((table.format, table.platformID, table.platEncID))
newtable = CmapSubtable.newSubtable(4)
newtable.platformID = table.platformID
newtable.platEncID = table.platEncID
newtable.language = table.language
newtable.cmap = table.cmap
outtables.append(newtable)
font['cmap'].tables = outtables
return converted
def drop_nonpid0_cmap(font, report=True):
keep, drop = partition_cmap(font, lambda table: table.platformID == 0, report)
return drop
def drop_mac_cmap(font, report=True):
keep, drop = partition_cmap(font, lambda table: table.platformID != 1 or table.platEncID != 0, report)
return drop
def fix_pua(font):
unencoded_glyphs = get_unencoded_glyphs(font)
if not unencoded_glyphs:
return
ucs2cmap = None
cmap = font["cmap"]
# Check if an UCS-2 cmap exists
for ucs2cmapid in ((3, 1), (0, 3), (3, 0)):
ucs2cmap = cmap.getcmap(ucs2cmapid[0], ucs2cmapid[1])
if ucs2cmap:
break
# Create UCS-4 cmap and copy the contents of UCS-2 cmap
# unless UCS 4 cmap already exists
ucs4cmap = cmap.getcmap(3, 10)
if not ucs4cmap:
cmapModule = getTableModule('cmap')
ucs4cmap = cmapModule.cmap_format_12(12)
ucs4cmap.platformID = 3
ucs4cmap.platEncID = 10
ucs4cmap.language = 0
if ucs2cmap:
ucs4cmap.cmap = deepcopy(ucs2cmap.cmap)
cmap.tables.append(ucs4cmap)
# Map all glyphs to UCS-4 cmap Supplementary PUA-A codepoints
# by 0xF0000 + glyphID
ucs4cmap = cmap.getcmap(3, 10)
for glyphID, glyph in enumerate(font.getGlyphOrder()):
if glyph in unencoded_glyphs:
ucs4cmap.cmap[0xF0000 + glyphID] = glyph
font['cmap'] = cmap
return True
def fix_isFixedPitch(ttfont):
same_width = set()
glyph_metrics = ttfont['hmtx'].metrics
messages = []
changed = False
for character in [chr(c) for c in range(65, 91)]:
same_width.add(glyph_metrics[character][0])
if len(same_width) == 1:
if ttfont['post'].isFixedPitch == 1:
messages.append("Skipping isFixedPitch is set correctly")
else:
messages.append("Font is monospace. Updating isFixedPitch to 0")
ttfont['post'].isFixedPitch = 1
changed = True
familyType = ttfont['OS/2'].panose.bFamilyType
if familyType == 2:
expected = 9
elif familyType == 3 or familyType == 5:
expected = 3
elif familyType == 0:
messages.append("Font is monospace but panose fields seems to be not set."
" Setting values to defaults (FamilyType = 2, Proportion = 9).")
ttfont['OS/2'].panose.bFamilyType = 2
ttfont['OS/2'].panose.bProportion = 9
changed = True
expected = None
else:
expected = None
if expected:
if ttfont['OS/2'].panose.bProportion == expected:
messages.append("Skipping OS/2.panose.bProportion is set correctly")
else:
messages.append(("Font is monospace."
" Since OS/2.panose.bFamilyType is {}"
" we're updating OS/2.panose.bProportion"
" to {}").format(familyType, expected))
ttfont['OS/2'].panose.bProportion = expected
changed = True
widths = [m[0] for m in ttfont['hmtx'].metrics.values() if m[0] > 0]
width_max = max(widths)
if ttfont['hhea'].advanceWidthMax == width_max:
messages.append("Skipping hhea.advanceWidthMax is set correctly")
else:
messsages.append("Font is monospace. Updating hhea.advanceWidthMax to %i" %
width_max)
ttfont['hhea'].advanceWidthMax = width_max
changed = True
avg_width = otRound(sum(widths) / len(widths))
if avg_width == ttfont['OS/2'].xAvgCharWidth:
messages.append("Skipping OS/2.xAvgCharWidth is set correctly")
else:
messages.append("Font is monospace. Updating OS/2.xAvgCharWidth to %i" %
avg_width)
ttfont['OS/2'].xAvgCharWidth = avg_width
changed = True
else:
if ttfont['post'].isFixedPitch != 0 or ttfont['OS/2'].panose.bProportion != 0:
changed = True
ttfont['post'].isFixedPitch = 0
ttfont['OS/2'].panose.bProportion = 0
return changed, messages
def drop_superfluous_mac_names(ttfont):
"""Drop superfluous Mac nameIDs.
The following nameIDS are kept:
1: Font Family name,
2: Font Family Subfamily name,
3: Unique font identifier,
4: Full font name,
5: Version string,
6: Postscript name,
16: Typographic family name,
17: Typographic Subfamily name
18: Compatible full (Macintosh only),
20: PostScript CID,
21: WWS Family Name,
22: WWS Subfamily Name,
25: Variations PostScript Name Prefix.
We keep these IDs in order for certain application to still function
such as Word 2011. IDs 1-6 are very common, > 16 are edge cases.
https://www.microsoft.com/typography/otspec/name.htm"""
keep_ids = [1, 2, 3, 4, 5, 6, 16, 17, 18, 20, 21, 22, 25]
changed = False
for n in range(255):
if n not in keep_ids:
name = ttfont['name'].getName(n, 1, 0, 0)
if name:
changed = True
ttfont['name'].names.remove(name)
return changed
def drop_mac_names(ttfont):
"""Drop all mac names"""
changed = False
for n in range(255):
name = ttfont['name'].getName(n, 1, 0, 0)
if name:
ttfont['name'].names.remove(name)
changed = True
return changed
def fix_font(font, include_source_fixes=False):
font["OS/2"].version = 4
if "DSIG" not in font:
add_dummy_dsig(font)
if "fpgm" in font:
fix_hinted_font(font)
else:
fix_unhinted_font(font)
if "fvar" in font:
remove_tables(font, ["MVAR"])
if include_source_fixes:
log.warning(
"include-source-fixes is enabled. Please consider fixing the "
"source files instead."
)
remove_tables(font)
fix_nametable(font)
fix_fs_type(font)
fix_fs_selection(font)
fix_mac_style(font)
fix_weight_class(font)
fix_italic_angle(font)
if "fvar" in font:
fix_fvar_instances(font)
# TODO (<NAME>) add gen-stat once merged
# https://github.com/googlefonts/gftools/pull/263
def fix_family(fonts, include_source_fixes=False):
"""Fix all fonts in a family"""
validate_family(fonts)
family_name = font_familyname(fonts[0])
for font in fonts:
fix_font(font, include_source_fixes=include_source_fixes)
if include_source_fixes:
try:
if Google_Fonts_has_family(family_name):
inherit_vertical_metrics(fonts)
else:
log.warning(
f"{family_name} is not on Google Fonts. Skipping "
"regression fixes"
)
except FileNotFoundError:
log.warning(
f"Google Fonts api key not found so we can't regression "
"fix fonts. See Repo readme to add keys."
)
fix_vertical_metrics(fonts)
class FontFixer():
def __init__(self, path, report=True, verbose=False, **kwargs):
self.font = TTFont(path)
self.path = path
self.font_filename = basename(path)
self.saveit = False
self.report = report
self.verbose = verbose
self.messages = []
self.args = kwargs
self.fixes = []
if "fixes" in kwargs:
self.fixes = kwargs["fixes"]
def __del__(self):
if self.report:
print("\n".join(self.messages))
if self.saveit:
if self.verbose:
print('Saving %s to %s.fix' % (self.font_filename, self.path))
self.font.save(self.path + ".fix")
elif self.verbose:
print('There were no changes needed on %s!' % self.font_filename)
def show(self):
pass
def fix(self):
for f in self.fixes:
rv = f(self.font)
if isinstance(rv, tuple) and len(rv) == 2:
changed, messages = rv
self.messages.extend(messages)
else:
changed = rv
if changed:
self.saveit = True
class GaspFixer(FontFixer):
def fix(self, value=15):
try:
table = self.font.get('gasp')
table.gaspRange[65535] = value
self.saveit = True
except:
print(('ER: {}: no table gasp... '
'Creating new table. ').format(self.path))
table = ttLib.newTable('gasp')
table.gaspRange = {65535: value}
self.font['gasp'] = table
self.saveit = True
def show(self):
try:
self.font.get('gasp')
except:
print('ER: {}: no table gasp'.format(self.path))
return
try:
print(self.font.get('gasp').gaspRange[65535])
except IndexError:
print('ER: {}: no index 65535'.format(self.path))
|
def most_common(nums):
# Create a dictionary to store the counts
counts = {}
# Iterate over the elements
for num in nums:
# If the element is not present in the dictionary, add it
if num not in counts:
counts[num] = 0
# Update the count
counts[num] += 1
# Get the maximum count
max_count = max(counts.values())
# Return the elements that have maximum count
return [num for num, count in counts.items() if count == max_count] |
#!/bin/bash
dieharder -d 204 -g 12 -S 1525533324
|
#!/usr/bin/env bash
base="$(dirname "$0")"
bash "$base/scripts/install-system-requirements.sh"
bash "$base/scripts/install-docker.sh"
|
describe('/cbl/student-competencies API', () => {
// load sample database before tests
before(() => {
cy.resetDatabase();
});
// authenticate as 'teacher' user
beforeEach(() => {
cy.loginAs('teacher');
});
it('Expected student competencies exist', () => {
cy.request('/cbl/student-competencies?format=json&limit=0').then(response => {
expect(response).property('status').to.eq(200);
expect(response).property('body').to.be.an('object');
expect(response.body).property('success').to.be.true;
expect(response.body).property('total').to.eq(635);
expect(response.body).property('limit').to.eq(0);
expect(response.body).property('data').to.be.an('array');
expect(response.body.data).to.have.length(635);
expect(response.body.data[0]).to.include({
ID: 635,
Class: 'Slate\\CBL\\StudentCompetency',
Created: 1628468422,
CreatorID: 2,
StudentID: 6,
CompetencyID: 40,
Level: 9,
EnteredVia: 'enrollment',
BaselineRating: null
});
});
cy.request('/cbl/student-competencies/1?format=json&include=completion,effectiveDemonstrationsData').then(response => {
expect(response).property('status').to.eq(200);
expect(response).property('body').to.be.an('object');
expect(response.body).property('data').to.be.an('object');
expect(response.body.data).to.include({
ID: 1,
Class: 'Slate\\CBL\\StudentCompetency',
Created: 1546401845,
CreatorID: 2,
StudentID: 4,
CompetencyID: 1,
Level: 9,
EnteredVia: 'enrollment',
BaselineRating: 9
});
expect(response.body.data).property('completion').to.be.an('object');
expect(response.body.data.completion).to.include({
StudentID: 4,
CompetencyID: 1,
currentLevel: 9,
baselineRating: 9,
demonstrationsLogged: 12,
demonstrationsMissed: 0,
demonstrationsComplete: 12,
demonstrationsAverage: 10,
demonstrationsRequired: 12
});
expect(response.body.data).property('effectiveDemonstrationsData').to.be.an('object');
expect(response.body.data.effectiveDemonstrationsData).to.have.all.keys('1', '2', '3', '4');
expect(response.body.data.effectiveDemonstrationsData['1']).to.be.an('array');
expect(response.body.data.effectiveDemonstrationsData['1']).to.have.length(3);
expect(response.body.data.effectiveDemonstrationsData['1'][0]).to.include({
ID: 1,
Class: 'Slate\\CBL\\Demonstrations\\DemonstrationSkill',
Created: 1546401845,
CreatorID: 3,
Modified: null,
ModifierID: null,
DemonstrationID: 1,
SkillID: 1,
TargetLevel: 9,
DemonstratedLevel: 9,
Override: false,
DemonstrationDate: 1546304460
});
});
});
it('Can change DemonstratedLevel on existing student task rating', () => {
cy.request('POST', '/cbl/student-tasks/save?format=json&include=Demonstration.DemonstrationSkills', {
data: [{
ID: 1,
DemonstrationSkills: [{
ID: 1,
SkillID: 1,
DemonstratedLevel: 10
}]
}]
}).then(response => {
expect(response).property('status').to.eq(200);
expect(response).property('body').to.be.an('object');
expect(response.body).property('data').to.be.an('array').that.has.length(1);
expect(response.body.data[0]).property('Demonstration').to.be.an('object');
expect(response.body.data[0].Demonstration).property('DemonstrationSkills').to.be.an('array').that.has.length(1);
expect(response.body.data[0].Demonstration.DemonstrationSkills[0]).to.be.an('object');
expect(response.body.data[0].Demonstration.DemonstrationSkills[0]).to.include({
ID: 1,
SkillID: 1,
DemonstratedLevel: 10,
TargetLevel: 9,
Override: 0
});
});
});
it('Validation prevents TargetLevel changes to existing StudentTask rating', () => {
cy.request('POST', '/cbl/student-tasks/save?format=json&include=Demonstration.DemonstrationSkills', {
data: [{
ID: 1,
DemonstrationSkills: [{
ID: 1,
SkillID: 1,
TargetLevel: 10,
DemonstratedLevel: 11
}]
}]
}).then(response => {
expect(response).property('status').to.eq(200);
expect(response).property('body').to.be.an('object');
expect(response.body).property('data').to.be.an('array');
expect(response.body).property('message', 'TargetLevel cannot be changed on existing records');
expect(response.body).property('success', false);
expect(response.body).property('failed').to.be.an('array').that.has.length(1);
expect(response.body.failed[0]).to.be.an('object').that.has.all.keys('record', 'validationErrors');
expect(response.body.failed[0].record).to.be.an('object');
expect(response.body.failed[0].validationErrors).to.be.an('object').that.has.all.keys('Demonstration');
expect(response.body.failed[0].validationErrors.Demonstration).to.be.an('object').that.has.all.keys('DemonstrationSkills');
expect(response.body.failed[0].validationErrors.Demonstration.DemonstrationSkills).to.be.an('array').that.has.length(1);
expect(response.body.failed[0].validationErrors.Demonstration.DemonstrationSkills[0]).to.be.an('object').that.has.property('TargetLevel', 'TargetLevel cannot be changed on existing records')
});
});
it('Rating remains unchanged after invalid edit', () => {
cy.request('/cbl/student-tasks/1?format=json&include=Demonstration.DemonstrationSkills').then(response => {
expect(response).property('status').to.eq(200);
expect(response).property('body').to.be.an('object');
expect(response.body).property('data').to.be.an('object');
expect(response.body.data).property('Demonstration').to.be.an('object');
expect(response.body.data.Demonstration).property('DemonstrationSkills').to.be.an('array').that.has.length(1);
expect(response.body.data.Demonstration.DemonstrationSkills[0]).to.be.an('object');
expect(response.body.data.Demonstration.DemonstrationSkills[0]).to.include({
ID: 1,
SkillID: 1,
DemonstratedLevel: 10,
TargetLevel: 9,
Override: false
});
});
});
}); |
<reponame>darwinbeing/deepdriving-tensorflow
import deep_learning as dl
import numpy as np
import os
import re
import tensorflow as tf
from .. import db
class CReader(dl.data.CReader):
def __init__(self, Settings, IsTraining, UsePreprocessing, ForceDataAugmentation):
self._BatchesInQueue = 30
self._ImageShape = [Settings['Data']['ImageHeight'], Settings['Data']['ImageWidth'], 3]
self._Outputs = {
# "Features": tf.placeholder(dtype=tf.float32, shape=[None, ] + self._ImageShape, name="Image"),
# "Label": tf.placeholder(dtype=tf.int32, shape=[None, ], name="Label"),
"Image": None,
"Labels": None,
"IsTraining": tf.placeholder(dtype=tf.bool, name="IsTraining"),
"Lambda": tf.placeholder(dtype=tf.float32, name="Lambda")
}
super().__init__(Settings, IsTraining, UsePreprocessing, ForceDataAugmentation)
def _getOutputs(self, Inputs):
return self._Outputs
def _build(self, Settings):
print("Build File-Reader Graph:")
print("* Training is enabled: {}".format(self._IsTraining))
# perform preprocessing of CPU to not switch between GPU/CPU all the time
# see: https://www.tensorflow.org/performance/performance_guide
with tf.device('/cpu:0'):
if self._IsTraining:
with tf.name_scope("TrainingReader"):
TrainingFilenames = db.getDBFilenames(Settings['Data']['TrainingPath'])
TrainingFileQueue = self._createFileQueue(TrainingFilenames, self._IsTraining)
TrainingInputs = self._buildRawReader(Settings, TrainingFileQueue)
TrainingPreprocInputs = self._buildPreprocessing(Settings, TrainingInputs, self._IsTraining)
TrainingBatchedInputs = self._createBatch(TrainingPreprocInputs, self.getBatchSize(), self._IsTraining)
with tf.name_scope("ValidationReader"):
TestingFilenames = db.getDBFilenames(Settings['Data']['ValidatingPath'])
TestingFileQueue = self._createFileQueue(TestingFilenames, self._IsTraining)
TestingInputs = self._buildRawReader(Settings, TestingFileQueue)
TestingPreprocInputs = self._buildPreprocessing(Settings, TestingInputs, False)
TestingBatchedInputs = self._createBatch(TestingPreprocInputs, self.getBatchSize(), self._IsTraining)
if self._IsTraining:
BatchedInput = tf.cond(self._Outputs['IsTraining'], lambda: TrainingBatchedInputs, lambda: TestingBatchedInputs)
else:
BatchedInput = TestingBatchedInputs
self._Outputs["Image"] = BatchedInput[0]
self._Outputs["Labels"] = BatchedInput[1:15]
print("* Input-Image has shape {}".format(self._Outputs["Image"].shape))
for i, Output in enumerate(self._Outputs['Labels']):
print("* Input-Label {} has shape {}".format(i, Output.shape))
return BatchedInput
def _readBatch(self, Session, Inputs):
# BatchList = list(Session.run(Inputs))
#print("Training: {}".format(self._IsTraining))
return {
# self._Outputs['Features']: BatchList[0],
# self._Outputs['Label']: BatchList[1],
self._Outputs['IsTraining']: self._IsTraining,
self._Outputs['Lambda']: self._getWeightDecayFactor()
}
def _getBatchSize(self, Settings):
return Settings['Data']['BatchSize']
def _addSummaries(self, Inputs):
tf.summary.image('Images', Inputs[0])
tf.summary.scalar('IsTraining', tf.cast(self._Outputs['IsTraining'], tf.uint8))
## Custom Methods
def _buildRawReader(self, Settings, FileQueue):
with tf.name_scope("FileReader"):
Reader = tf.TFRecordReader()
_, SerializedExample = Reader.read(FileQueue)
Inputs = db.buildFeatureParser(SerializedExample)
Inputs[0] = tf.image.resize_images(Inputs[0], size=(self._ImageShape[0], self._ImageShape[1]))
return Inputs
def _buildPreprocessing(self, Settings, Inputs, UseDataAugmentation):
Image = Inputs[0]
if self._ForceDataAugmentation or UseDataAugmentation:
with tf.name_scope("DataAugmentation"):
#print("* Perform data-augmentation")
#Image = tf.image.random_brightness(Image, max_delta=0.10)
#Image = tf.image.random_contrast(Image, lower=0.90, upper=1.10)
#Image = tf.image.random_saturation(Image, lower=0.90, upper=1.10)
#Image = tf.image.random_hue(Image, max_delta=0.05)
pass
if self._UsePreprocessing:
with tf.name_scope("Preprocessing"):
print("* Perform per-pixel normalization")
MeanReader = dl.data.CMeanReader()
MeanReader.read(Settings['PreProcessing']['MeanFile'])
MeanImage = tf.image.resize_images(MeanReader.MeanImage, size=(int(Image.shape[0]), int(Image.shape[1])))
Image = tf.subtract(Image, MeanImage)
Inputs[0] = Image
return Inputs
def _getWeightDecayFactor(self):
if "Optimizer" in self._Settings:
if "WeightDecay" in self._Settings["Optimizer"]:
return self._Settings["Optimizer"]["WeightDecay"]
return 0
|
/**
* @fileoverview
* Bootstrap for application
*/
goog.require("app.Core");
goog.require("app.Interface");
goog.require("app.Module");
// Plugins
goog.require("app.plugins.State");
goog.require("app.plugins.Fx");
var core = new app.Core(app.Interface);
core.use([
app.plugins.State,
app.plugins.Fx
]);
core.register(app.Module, "app");
core.start("app");
|
def initialize_info_coupler(module_name: str):
module_map = {
'refcocog_infocpler': 'RefCOCOgInfoCpler',
'refclef_infocpler': 'RefClefInfoCpler',
'hatefulememes_infocpler': 'HatefulMemesInfoCpler',
'visualentailment_infocpler': 'VisualEntailmentInfoCpler',
'lxmertpretrain_infocpler': 'LXMERTPreTrainInfoCpler',
'm4c': 'M4C'
}
if module_name in module_map:
module = __import__(module_name, fromlist=[module_map[module_name]])
info_coupler_class = getattr(module, module_map[module_name])
return info_coupler_class()
else:
raise ValueError(f"Module '{module_name}' not found or not supported") |
#!/usr/bin/env bash
# Copyright 2014 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
# Unset CDPATH so that path interpolation can work correctly
# https://github.com/kubernetes/kubernetes/issues/52255
unset CDPATH
# Until all GOPATH references are removed from all build scripts as well,
# explicitly disable module mode to avoid picking up user-set GO111MODULE preferences.
# As individual scripts (like hack/update-vendor.sh) make use of go modules,
# they can explicitly set GO111MODULE=on
export GO111MODULE=off
# The root of the build/dist directory
KUBE_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd -P)"
KUBE_OUTPUT_SUBPATH="${KUBE_OUTPUT_SUBPATH:-_output/local}"
KUBE_OUTPUT="${KUBE_ROOT}/${KUBE_OUTPUT_SUBPATH}"
KUBE_OUTPUT_BINPATH="${KUBE_OUTPUT}/bin"
# This controls rsync compression. Set to a value > 0 to enable rsync
# compression for build container
KUBE_RSYNC_COMPRESS="${KUBE_RSYNC_COMPRESS:-0}"
# Set no_proxy for localhost if behind a proxy, otherwise,
# the connections to localhost in scripts will time out
export no_proxy=127.0.0.1,localhost
# This is a symlink to binaries for "this platform", e.g. build tools.
export THIS_PLATFORM_BIN="${KUBE_ROOT}/_output/bin"
source "${KUBE_ROOT}/hack/lib/util.sh"
source "${KUBE_ROOT}/hack/lib/logging.sh"
kube::log::install_errexit
source "${KUBE_ROOT}/hack/lib/version.sh"
source "${KUBE_ROOT}/hack/lib/golang.sh"
source "${KUBE_ROOT}/hack/lib/etcd.sh"
KUBE_OUTPUT_HOSTBIN="${KUBE_OUTPUT_BINPATH}/$(kube::util::host_platform)"
export KUBE_OUTPUT_HOSTBIN
# list of all available group versions. This should be used when generated code
# or when starting an API server that you want to have everything.
# most preferred version for a group should appear first
KUBE_AVAILABLE_GROUP_VERSIONS="${KUBE_AVAILABLE_GROUP_VERSIONS:-\
v1 \
admissionregistration.k8s.io/v1 \
admissionregistration.k8s.io/v1beta1 \
admission.k8s.io/v1 \
admission.k8s.io/v1beta1 \
apps/v1 \
apps/v1beta1 \
apps/v1beta2 \
authentication.k8s.io/v1 \
authentication.k8s.io/v1beta1 \
authorization.k8s.io/v1 \
authorization.k8s.io/v1beta1 \
autoscaling/v1 \
autoscaling/v2beta1 \
autoscaling/v2beta2 \
batch/v1 \
batch/v1beta1 \
batch/v2alpha1 \
certificates.k8s.io/v1 \
certificates.k8s.io/v1beta1 \
coordination.k8s.io/v1beta1 \
coordination.k8s.io/v1 \
discovery.k8s.io/v1alpha1 \
discovery.k8s.io/v1beta1 \
extensions/v1beta1 \
events.k8s.io/v1 \
events.k8s.io/v1beta1 \
imagepolicy.k8s.io/v1alpha1 \
networking.k8s.io/v1 \
networking.k8s.io/v1beta1 \
node.k8s.io/v1 \
node.k8s.io/v1alpha1 \
node.k8s.io/v1beta1 \
policy/v1beta1 \
rbac.authorization.k8s.io/v1 \
rbac.authorization.k8s.io/v1beta1 \
rbac.authorization.k8s.io/v1alpha1 \
scheduling.k8s.io/v1alpha1 \
scheduling.k8s.io/v1beta1 \
scheduling.k8s.io/v1 \
storage.k8s.io/v1beta1 \
storage.k8s.io/v1 \
storage.k8s.io/v1alpha1 \
flowcontrol.apiserver.k8s.io/v1alpha1 \
internal.apiserver.k8s.io/v1alpha1 \
}"
# not all group versions are exposed by the server. This list contains those
# which are not available so we don't generate clients or swagger for them
KUBE_NONSERVER_GROUP_VERSIONS="
abac.authorization.kubernetes.io/v0 \
abac.authorization.kubernetes.io/v1beta1 \
componentconfig/v1alpha1 \
imagepolicy.k8s.io/v1alpha1\
admission.k8s.io/v1\
admission.k8s.io/v1beta1\
"
export KUBE_NONSERVER_GROUP_VERSIONS
# This emulates "readlink -f" which is not available on MacOS X.
# Test:
# T=/tmp/$$.$RANDOM
# mkdir $T
# touch $T/file
# mkdir $T/dir
# ln -s $T/file $T/linkfile
# ln -s $T/dir $T/linkdir
# function testone() {
# X=$(readlink -f $1 2>&1)
# Y=$(kube::readlinkdashf $1 2>&1)
# if [ "$X" != "$Y" ]; then
# echo readlinkdashf $1: expected "$X", got "$Y"
# fi
# }
# testone /
# testone /tmp
# testone $T
# testone $T/file
# testone $T/dir
# testone $T/linkfile
# testone $T/linkdir
# testone $T/nonexistant
# testone $T/linkdir/file
# testone $T/linkdir/dir
# testone $T/linkdir/linkfile
# testone $T/linkdir/linkdir
function kube::readlinkdashf {
# run in a subshell for simpler 'cd'
(
if [[ -d "${1}" ]]; then # This also catch symlinks to dirs.
cd "${1}"
pwd -P
else
cd "$(dirname "${1}")"
local f
f=$(basename "${1}")
if [[ -L "${f}" ]]; then
readlink "${f}"
else
echo "$(pwd -P)/${f}"
fi
fi
)
}
# This emulates "realpath" which is not available on MacOS X
# Test:
# T=/tmp/$$.$RANDOM
# mkdir $T
# touch $T/file
# mkdir $T/dir
# ln -s $T/file $T/linkfile
# ln -s $T/dir $T/linkdir
# function testone() {
# X=$(realpath $1 2>&1)
# Y=$(kube::realpath $1 2>&1)
# if [ "$X" != "$Y" ]; then
# echo realpath $1: expected "$X", got "$Y"
# fi
# }
# testone /
# testone /tmp
# testone $T
# testone $T/file
# testone $T/dir
# testone $T/linkfile
# testone $T/linkdir
# testone $T/nonexistant
# testone $T/linkdir/file
# testone $T/linkdir/dir
# testone $T/linkdir/linkfile
# testone $T/linkdir/linkdir
kube::realpath() {
if [[ ! -e "${1}" ]]; then
echo "${1}: No such file or directory" >&2
return 1
fi
kube::readlinkdashf "${1}"
}
|
<filename>src/twg2/io/files/Bytes.java
package twg2.io.files;
/** A utility class for converting primitive types to/from bytes in big-endian order.
* For example converting an integer to 4 bytes and storing those 4 bytes
* at a specific location in a byte array.
* @author TeamworkGuy2
* @since 2014-4-27
*/
public final class Bytes {
private Bytes() { throw new AssertionError("cannot instantiate Bytes"); }
/** Write a double to the specified byte array as 8 bytes (big-endian order)
* @param value the double to write
* @param b the byte array to write the 'value' to
* @param offset the offset into the array at which to write the 8 bytes
*/
public static void writeDouble(double value, byte[] b, int offset) {
writeLong(Double.doubleToRawLongBits(value), b, offset);
}
/** Write a float to the specified byte array as 4 bytes (big-endian order)
* @param value the float to write
* @param b the byte array to write the 'value' to
* @param offset the offset into the array at which to write the 4 bytes
*/
public static void writeFloat(float value, byte[] b, int offset) {
writeInt(Float.floatToRawIntBits(value), b, offset);
}
/** Write a long to the specified byte array as 8 bytes (big-endian order)
* @param value the long to write
* @param b the byte array to write the 'value' to
* @param offset the offset into the array at which to write the 8 bytes
*/
public static void writeLong(long value, byte[] b, int offset) {
b[offset] = (byte)(value >>> 56);
b[offset+1] = (byte)(value >>> 48);
b[offset+2] = (byte)(value >>> 40);
b[offset+3] = (byte)(value >>> 32);
b[offset+4] = (byte)(value >>> 24);
b[offset+5] = (byte)(value >>> 16);
b[offset+6] = (byte)(value >>> 8);
b[offset+7] = (byte)(value );
}
/** Write an int to the specified byte array as 4 bytes (big-endian order)
* @param value the integer to write
* @param b the byte array to write the 'value' to
* @param offset the offset into the array at which to write the 4 bytes
*/
public static void writeInt(int value, byte[] b, int offset) {
b[offset] = (byte)(value >>> 24);
b[offset+1] = (byte)(value >>> 16);
b[offset+2] = (byte)(value >>> 8);
b[offset+3] = (byte)(value );
}
/** Write a short to the specified byte array as 2 bytes (big-endian order)
* @param value the short to write
* @param b the byte array to write the 'value' to
* @param offset the offset into the array at which to write the 2 bytes
*/
public static void writeShort(short value, byte[] b, int offset) {
b[offset] = (byte)(value >>> 8);
b[offset+1] = (byte)(value );
}
/** Write a boolean to the specified byte array as 1 byte (1=true, 0=false) (big-endian order)
* @param value the boolean to write
* @param b the byte array to write the 'value' to
* @param offset the offset into the array at which to write the byte
*/
public static void writeBoolean(boolean value, byte[] b, int offset) {
b[offset] = (byte)(value ? 1 : 0);
}
/** Read a double value from the specified location in the specified array (assumes big-endian order)
* @param b the array to read the double from
* @param offset the offset into the array at which to read the 8 bytes
* @return eight bytes read from the indices {@code [offset, offset+3]} and
* converted to a double
*/
public static double readDouble(byte[] b, int offset) {
return Double.longBitsToDouble(readLong(b, offset));
}
/** Read a float value from the specified location in the specified array (assumes big-endian order)
* @param b the array to read the float from
* @param offset the offset into the array at which to read the 4 bytes
* @return four bytes read from the indices {@code [offset, offset+3]} and
* converted to a float
*/
public static final float readFloat(byte[] b, int offset) {
return Float.intBitsToFloat(readInt(b, offset));
}
/** Read a long value from the specified location in the specified array (assumes big-endian order)
* @param b the array to read the long from
* @param offset the offset into the array at which to read the 8 bytes
* @return eight bytes read from the indices {@code [offset, offset+3]} and converted to
* a long by {@code ((long)b[offset] << 56) | ((long)(b[offset+1] & 0xFF) << 48) |
* ((long)(b[offset+2] & 0xFF) << 40) | ((long)(b[offset+3] & 0xFF) << 32) |
* ((long)(b[offset+4] & 0xFF) << 24) | ((b[offset+5] & 0xFF) << 16) |
* ((b[offset+6] & 0xFF) << 8) | (b[offset+7] & 0xFF);}
*/
public static final long readLong(byte[] b, int offset) {
return ((long)b[offset] << 56) |
((long)(b[offset+1] & 0xFF) << 48) |
((long)(b[offset+2] & 0xFF) << 40) |
((long)(b[offset+3] & 0xFF) << 32) |
((long)(b[offset+4] & 0xFF) << 24) |
((b[offset+5] & 0xFF) << 16) |
((b[offset+6] & 0xFF) << 8) |
(b[offset+7] & 0xFF);
}
/** Read an integer value from the specified location in the specified array (assumes big-endian order)
* @param b the array to read the integer from
* @param offset the offset into the array at which to read the 4 bytes
* @return four bytes read from the indices {@code [offset, offset+3]} and converted to
* an integer by {@code (b[offset] << 24) | (b[offset+1] << 16) | (b[offset+2] << 8) | b[offset+3]}
*/
public static final int readInt(byte[] b, int offset) {
return (b[offset] << 24) | (b[offset+1] << 16) | (b[offset+2] << 8) | b[offset+3];
}
/** Read a short value from the specified location in the specified array (assumes big-endian order)
* @param b the array to read the short from
* @param offset the offset into the array at which to read the 2 bytes
* @return two bytes read from indices {@code offset} and {@code offset+1} and converted to
* a short by {@code (b[offset] << 8) | b[offset+1]}
*/
public static final short readShort(byte[] b, int offset) {
return (short)((b[offset] << 8) | b[offset+1]);
}
/** Read a boolean value from the specified location in the specified byte array (1=true, 0=false) (assumes big-endian order)
* @param b the array to read the boolean from
* @param offset the offset into the array at which to read the byte
* @return two bytes read from indices {@code offset} and {@code offset+1} and converted to
* a short by {@code (b[offset] << 8) | b[offset+1]}
*/
public static final boolean readBoolean(byte[] b, int offset) {
return (b[offset] == 0 ? false : true);
}
}
|
Selection sort is a sorting algorithm that works by repeatedly selecting the minimum element from the unsorteding part of the list, swaping it with the first element of the unsorted part, and moving the sublist boundaries one element to the right.
Selection sort has a time complexity of O(n2) and is an in-place sorting alogrithm, meaning it does not require any additional space for sorting. It is often considered to be a less efficient algorithm than other sorting algorithms technologies like quick sort or merge sort. |
require './config/application'
app = BestQuotes::Application.new
use Rack::ContentType
app.route do
match "", "quotes#index"
match "sub-app",
proc { [200, {}, ["Hello, sub-app!"]] }
# default routes
match ":controller/:id/:action"
match ":controller/:id",
:default => { "action" => "show" }
match ":controller",
:default => { "action" => "index" }
end
run app
|
import random
import string
def random_password():
char_length = 8
characters = string.ascii_letters + string.digits + string.punctuation
password = ''.join(random.choice(characters) for x in range(char_length))
has_uppercase = any(x.isupper() for x in password)
has_lowercase = any(x.islower() for x in password)
has_numbers = any(x.isnumeric() for x in password)
has_specials = any(x in string.punctuation for x in password)
if has_uppercase+has_lowercase+has_numbers+has_specials >= 3:
return password
else:
return random_password()
pwd = random_password |
import { Directive, Field, ObjectType } from '@nestjs/graphql';
import { Feature, Node, Price } from '@ultimatebackend/contracts';
@Directive(`@key(fields: "id")`)
@ObjectType()
export class Plan extends Node {
@Field()
name: string;
@Field()
normalizedName!: string;
@Field(() => [Price], { nullable: true })
prices: Price[];
@Field(() => [Feature], { nullable: true })
features: Feature[];
@Field({ nullable: true })
free: boolean;
@Field()
active: boolean;
}
|
package com.zhy.flexboxlayout.tag;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
public class MainActivity extends AppCompatActivity
{
TagFlowLayout tagFlowLayout;
LayoutInflater mInflater;
private String[] mVals = new String[]
{"Hello", "Android", "Weclome Hi ", "Button", "TextView", "Hello",
"Android", "Weclome", "Button ImageView", "TextView", "Helloworld",
"Android", "Weclome Hello", "Button Text", "TextView"};
@Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mInflater = LayoutInflater.from(this);
tagFlowLayout = (TagFlowLayout) findViewById(R.id.id_tagFlowLayout);
tagFlowLayout.setAdapter(new TagAdapter<String>(mVals)
{
@Override
protected View getView(ViewGroup parent, int position, String s)
{
TextView tv = (TextView) mInflater.inflate(R.layout.tv,
parent, false);
tv.setText(s);
return tv;
}
@Override
protected void onSelect(ViewGroup parent, View view, int position)
{
view.setBackgroundResource(R.drawable.checked_bg);
}
@Override
protected void onUnSelect(ViewGroup parent, View view, int position)
{
view.setBackgroundResource(R.drawable.normal_bg);
}
// @Override
// protected boolean select(int position)
// {
//// if (position == 0) return true;
//
// return super.select(position);
// }
});
// tagFlowLayout.setJustifyContent(FlexboxLayout.JUSTIFY_CONTENT_CENTER);
}
}
|
<gh_stars>0
package cyclops.pure.typeclasses.taglessfinal;
import cyclops.function.higherkinded.DataWitness.io;
import cyclops.container.control.Option;
import cyclops.container.immutable.tuple.Tuple2;
import cyclops.pure.instances.reactive.IOInstances;
import cyclops.reactive.IO;
import org.junit.Before;
import org.junit.Test;
import static cyclops.pure.typeclasses.taglessfinal.Cases.Account;
public class TaglessFinal4Test {
private Account acc1;
private Account acc2;
ProgramStore<io> prog;
@Before
public void setup(){
acc1 = new Account(10000d,10);
acc2 = new Account(0d,11);
StoreIO<Long,Account> store = new StoreIO<>();
store.put(acc1.getId(),acc1);
store.put(acc2.getId(),acc2);
prog = new ProgramStore<>(IOInstances.monad(),new AccountIO2(store),acc2,acc1);
}
@Test
public void programB(){
IO<Tuple2<Option<Account>, Option<Account>>> res = prog.transfer(100, IO::narrowK);
res.run().peek(System.out::println);
}
}
|
"use strict";
var axios = require("axios");
var secretary = require("./secretary.js");
var listManager = require("./listManager.js");
var emojis = require("../assets/emojis.json");
var greetings = ["Hello", "Bonjour", "Salut"];
var pageUrl =
"https://graph.facebook.com/v2.6/me/messages?access_token=" +
process.env.FB_PAGE_ACCESS_TOKEN;
var sendMessage = function sendMessage(senderId, messageObject) {
return axios.post(pageUrl, {
recipient: {
id: senderId
},
message: messageObject
});
};
var notifyProcessing = function notifyProcessing(senderId) {
return axios.post(pageUrl, {
recipient: {
id: senderId
},
sender_action: "typing_on"
});
};
var sendQuestion = function sendQuestion(senderId, randomPerson, cheer) {
var randomEmoji = listManager.getRandomItem(emojis.happy);
var randGreeting = listManager.getRandomItem(greetings);
return sendMessage(senderId, secretary.getImageMsg(randomPerson.img))
.then(function() {
return sendMessage(
senderId,
secretary.getMsgWithButtons(
(cheer ? "Bravo " + String.fromCodePoint(randomEmoji) + " ! " : "") +
(randGreeting + "...?"),
[
secretary.getButton("Voir la réponse (r)", "ANSWER"),
secretary.getButton("Indice (i)", "HINT")
]
)
);
})
.catch(function(err) {
return err;
});
};
var sendResponseToBadAnswer = function sendResponseToBadAnswer(senderId) {
var randomEmoji = listManager.getRandomItem(emojis.happy);
var message = "Essaye encore " + String.fromCodePoint(randomEmoji);
var buttons = [
secretary.getButton("Un indice ! (i)", "HINT"),
secretary.getButton("La réponse (r)", "ANSWER")
];
return sendMessage(senderId, secretary.getMsgWithButtons(message, buttons));
};
var sendAnswer = function sendAnswer(senderId, firstname) {
var randomEmoji = listManager.getRandomItem(emojis.happy);
return sendMessage(
senderId,
secretary.getMsgWithButtons(
"C'est " + firstname + " " + String.fromCodePoint(randomEmoji),
[
secretary.getButton("Rejouer (p)", "INIT_PLAY"),
secretary.getButton("Score (s)", "SCORE")
]
)
);
};
var sendHint = function sendHint(senderId, personKey) {
return sendMessage(
senderId,
secretary.getMsgWithButtons(
personKey.substring(0, 1).toUpperCase() + "...",
[
secretary.getButton("Voir la réponse (r)", "ANSWER"),
secretary.getButton("Score (s)", "SCORE")
]
)
);
};
var sendPuzzledApology = function sendPuzzledApology(senderId) {
return sendMessage(
senderId,
secretary.getMsgWithHelpers("Hum, j'ai peur de ne pas comprendre", [
secretary.getQuickReply("À l'aide ! (h)", "INIT_HELP")
])
);
};
var sendHelpMessage = function sendHelpMessage(senderId) {
return sendMessage(
senderId,
secretary.getMsgWithHelpers(
'Dire bonjour à ses collègues avec leur prénom ça fait toujours marquer des points !\nLa liste se trouve ici : https://www.theodo.fr/equipe \n"Play" (p) pour commencer une série\n"Indice" (i) pour un petit coup de pouce\n"Stats" (s) pour le score\n"Réponse" (r) pour donner sa langue au chat',
[secretary.getQuickReply("C'est parti !", "INIT_PLAY")]
)
);
};
var sendInitMessage = function sendInitMessage(senderId) {
return sendMessage(
senderId,
secretary.getMsgWithHelpers("Que veux-tu faire ?", [
secretary.getQuickReply("Jouer (p)", "INIT_PLAY"),
secretary.getQuickReply("Aide (h)", "INIT_HELP")
])
);
};
var sendScore = function sendScore(senderId, scoreValues) {
return sendMessage(senderId, {
text:
"Score : " +
scoreValues.score.value +
"% " +
String.fromCodePoint(scoreValues.score.emoji) +
"\nCoverage: " +
scoreValues.coverage.value +
"% " +
String.fromCodePoint(scoreValues.coverage.emoji)
});
};
module.exports = {
notifyProcessing: notifyProcessing,
sendAnswer: sendAnswer,
sendHelpMessage: sendHelpMessage,
sendHint: sendHint,
sendInitMessage: sendInitMessage,
sendPuzzledApology: sendPuzzledApology,
sendQuestion: sendQuestion,
sendResponseToBadAnswer: sendResponseToBadAnswer,
sendScore: sendScore
};
|
<reponame>jkhoel/storekeeper
import chai from 'chai';
import chaiHttp from 'chai-http';
import app from '../src/app';
// Configure Chai
chai.use(chaiHttp);
chai.should();
describe('/api/v1/:', function () {
it('GET /api/v1/ - Should respond to a basic request to root endpoint', function (done) {
chai
.request(app)
.get('/api/v1/')
.end(function (err, res) {
res.should.have.status(200);
res.body.should.be.a('object');
done();
});
});
it('POST /api/v1/ - Should return the header and body of the requests sent', function (done) {
chai
.request(app)
.post('/api/v1/')
.type('application/json')
.send({ foo: 'bar' })
.end(function (err, res) {
res.should.have.status(200);
res.body.should.be.a('object');
res.body.should.include.keys('header', 'body');
done();
});
});
});
|
<filename>app/framework/scroll-helpers.js
import settings from 'kursausschreibung/framework/settings';
/**
* scroll to target after timeout
* @param {string} elementId id of html element
*/
export function scrollToTimeout(elementId) {
setTimeout(function(){
scrollToTargetAdjusted(elementId);
},500);
}
/**
* set offset from settings.headerOffset to uk-sticky attribut
* @param {string} elementId id of html element
*/
export function setOffsetStickyHeader(elementId) {
document.getElementById(elementId).setAttribute('uk-sticky','offset: '+settings.headerOffset+'; bottom: #top');
}
/**
* scroll to position of a element consider settings.headerOffset
* @param {string} elementId id of html element
*/
function scrollToTargetAdjusted(elementId){
var element = document.getElementById(elementId);
var elementPosition = element.getBoundingClientRect().top;
var offsetPosition = window.scrollY + elementPosition - settings.headerOffset;
window.scrollTo({
top: offsetPosition,
behavior: "smooth"
});
} |
<gh_stars>0
export { default } from 'ember-data-on-redux/utils/x-sync';
|
#!/bin/bash
TASK=18
MODEL=ctrl_uniter
MODEL_CONFIG=ctrl_uniter_base
TASKS_CONFIG=xm-influence_test_tasks
PRETRAINED=/science/image/nlp-datasets/emanuele/checkpoints/xm-influence/ctrl_uniter_thr04gt/conceptual_captions/ctrl_uniter_base/pytorch_model_9.bin
OUTPUT_DIR=/science/image/nlp-datasets/emanuele/results/xm-influence/flickr30kentities_vis4lang/${MODEL}_thr04gt
THR=$1
source activate /science/image/nlp-datasets/emanuele/envs/xm-influence
cd ../../../../volta
python ablate_vis4lang.py \
--bert_model bert-base-uncased --config_file config/${MODEL_CONFIG}.json --from_pretrained ${PRETRAINED} \
--tasks_config_file config_tasks/${TASKS_CONFIG}.yml --task $TASK --split val \
--output_dir ${OUTPUT_DIR} --dump_results --masking object --overlap_threshold $THR
conda deactivate
|
'use strict';
angular.module('ffffng')
.factory('ConfirmDeletionDialog', function ($uibModal, config) {
var ctrl = function ($scope, $uibModalInstance, node) {
$scope.node = node;
$scope.config = config;
$scope.proceed = function () {
$uibModalInstance.close();
};
$scope.cancel = function () {
$uibModalInstance.dismiss('cancel');
};
};
return {
open: function (node) {
return $uibModal.open({
controller: ctrl,
templateUrl: 'views/dialogs/confirmDeletionDialog.html',
resolve: {
node: function () { return node; }
}
});
}
};
});
|
<reponame>MerlinRed/crud_nodejs
const mongoose = require("mongoose");
const { UsersSchema } = require("./schemas")
// создание таблицы Users
module.exports.UsersModel = mongoose.model("main_users", UsersSchema)
|
#!/bin/sh
# Since CMake cannot build the static and dynamic libraries in the same
# directory, this script helps build both static and dynamic versions of it and
# have the common flags in one place instead of split between two places.
set -e
if [ "$#" -eq "0" ]; then
echo "Usage: $0 <--dynamic|--static>">&2
exit 1
fi
ROOT="$(cd "$(dirname "$0")/.." && echo "${PWD}")"
VENDORED_PATH="${ROOT}/vendor/libgit2"
case "$1" in
--static)
BUILD_PATH="${ROOT}/static-build"
BUILD_SHARED_LIBS=OFF
;;
--dynamic)
BUILD_PATH="${ROOT}/dynamic-build"
BUILD_SHARED_LIBS=ON
;;
*)
echo "Usage: $0 <--dynamic|--static>">&2
exit 1
;;
esac
mkdir -p "${BUILD_PATH}/build" "${BUILD_PATH}/install/lib"
cd "${BUILD_PATH}/build" &&
cmake -DTHREADSAFE=ON \
-DBUILD_CLAR=OFF \
-DBUILD_SHARED_LIBS"=${BUILD_SHARED_LIBS}" \
-DREGEX_BACKEND=builtin \
-DCMAKE_C_FLAGS=-fPIC \
-DCMAKE_BUILD_TYPE="RelWithDebInfo" \
-DCMAKE_INSTALL_PREFIX="${BUILD_PATH}/install" \
"${VENDORED_PATH}" &&
exec cmake --build . --target install
|
import {objKeyToLowerCase} from '../bookModuleByName.js';
describe('reducer - bookModuleByName', () => {
describe('objKeyToLowerCase', () => {
it('should transform keys of object to lowercase - 1', () => {
const obj = {
Title: 'react',
Descrition: 'react is awesome'
};
const result = objKeyToLowerCase(obj);
expect(result).toEqual({title: 'react', descrition: 'react is awesome'});
expect(result).toBe(obj);
});
it('should transform keys of object to lowercase - 2', () => {
const obj = {};
const result = objKeyToLowerCase(obj);
expect(result).toEqual({});
});
});
});
|
#!/usr/bin/env node
const { JsonRpc } = require('eosjs')
const fetch = require('node-fetch')
const massive = require('massive')
const dbConfig = require('../config/dbConfig')
const EOS_API_ENDPOINT =
process.env.EOS_API_ENDPOINT || 'https://jungle.eosio.cr'
// gets data from blockchain
const getUserRatings = async () => {
const eos = new JsonRpc(EOS_API_ENDPOINT, { fetch })
let ratings = await eos.get_table_rows({
json: true,
code: 'rateproducer',
scope: 'rateproducer',
table: 'ratings',
limit: 1000,
reverse: false,
show_payer: false
})
//console.log(ratings)
return ratings
}
// updates the postgresdb
const updateUserRatings = async (userAccount, bpAccount) => {
console.log('\x1b[33m%s\x1b[0m', '==== updating user ratings ====')
try {
const db = await massive(dbConfig)
const userRatings = await getUserRatings()
if (!userAccount || !bpAccount)
throw new Error('User Account and Block Producer owner are required!')
const [blockProducer] = userRatings.rows.filter(
({ user, bp }) => user == userAccount && bp === bpAccount
)
if (!blockProducer) throw new Error('Block Producer rate not found')
const ratings = {
transparency: blockProducer.transparency || 0,
infrastructure: blockProducer.infrastructure || 0,
trustiness: blockProducer.trustiness || 0,
development: blockProducer.development || 0,
community: blockProducer.community || 0
}
const result = await db.user_ratings.save({
uniq_rating: blockProducer.uniq_rating,
user: blockProducer.user,
bp: blockProducer.bp,
ratings: ratings
})
if (!result) {
const insertResult = await db.user_ratings.insert({
uniq_rating: blockProducer.uniq_rating,
user: blockProducer.user,
bp: blockProducer.bp,
ratings
})
if (!insertResult)
throw new Error(`could not save or insert ${blockProducer.uniq_rating}`)
}
return {
uniq_rating: blockProducer.uniq_rating,
user: blockProducer.user,
bp: blockProducer.bp,
ratings
}
} catch (error) {
console.error('updateUserRatings', error)
return error
}
}
module.exports = updateUserRatings
|
/**
* Autogenerated code by SdkModelGenerator.
* Do not edit. Any modification on this file will be removed automatically after project build
*
*/
package test.backend.www.model.hotelbeds.basic.model;
import javax.validation.constraints.NotNull;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.ToString;
@JsonInclude(Include.NON_NULL)
@ToString
@NoArgsConstructor
@Data
public class Keyword {
@NotNull
private Integer code;
private Integer rating;
} |
#!/bin/bash
# store current kernel log level
read -r printk < /proc/sys/kernel/printk
printk=${printk:0:1}
# Set it to 0
echo 0 > /proc/sys/kernel/printk
test -f /lib/zfsbootmenu-lib.sh && source /lib/zfsbootmenu-lib.sh
test -f zfsbootmenu-lib.sh && source zfsbootmenu-lib.sh
echo "Loading boot menu ..."
TERM=linux
tput reset
OLDIFS="$IFS"
export FZF_DEFAULT_OPTS="--layout=reverse-list --cycle \
--inline-info --tac"
BASE="$( mktemp -d /tmp/zfs.XXXX )"
# I should probably just modprobe zfs right off the bat
modprobe zfs 2>/dev/null
udevadm settle
# Find all pools by name that are listed as ONLINE, then import them
response="$( find_online_pools )"
ret=$?
if [ $ret -gt 0 ]; then
import_success=0
IFS=',' read -a zpools <<<"${response}"
for pool in "${zpools[@]}"; do
import_pool ${pool}
ret=$?
if [ $ret -eq 0 ]; then
import_success=1
fi
done
if [ $import_success -ne 1 ]; then
emergency_shell "unable to successfully import a pool"
fi
else
if [ $die_on_import_failure -eq 1 ]; then
emergency_shell "no pools available to import"
exit;
fi
fi
# Prefer a specific pool when checking for a bootfs value
if [ "${root}" = "zfsbootmenu" ]; then
pool=
else
pool="${root}"
fi
# Attempt to find the bootfs property
datasets="$( zpool list -H -o bootfs ${pool} )"
while read -r line; do
if [ "${line}" = "-" ]; then
BOOTFS=
else
BOOTFS="${line}"
break
fi
done <<<"${datasets}"
# If BOOTFS is not empty display the fast boot menu
fast_boot=0
if [[ ! -z "${BOOTFS}" ]]; then
# Draw a countdown menu
if [[ ${menu_timeout} -gt 0 ]]; then
# Clear the screen
tput civis
HEIGHT=$(tput lines)
WIDTH=$(tput cols)
tput clear
# Draw the line centered on the screen
mes="[ENTER] to boot"
x=$(( ($HEIGHT - 0) / 2 ))
y=$(( ($WIDTH - ${#mes}) / 2 ))
tput cup $x $y
echo -n ${mes}
# Draw the line centered on the screen
mes="[ESC] boot menu"
x=$(( $x + 1 ))
y=$(( ($WIDTH - ${#mes}) / 2 ))
tput cup $x $y
echo -n ${mes}
x=$(( $x + 1 ))
tput cup $x $y
IFS=''
for (( i=${menu_timeout}; i>0; i--)); do
mes="$( printf 'Booting %s in %0.2d seconds' ${BOOTFS} ${i} )"
y=$(( ($WIDTH - ${#mes}) / 2 ))
tput cup $x $y
echo -ne "${mes}"
# Wait 1 second for input
read -s -N 1 -t 1 key
# Escape key
if [ "$key" = $'\e' ]; then
break
# Enter key
elif [ "$key" = $'\x0a' ]; then
fast_boot=1
break
fi
done
IFS="${OLDIFS}"
elif [[ ${menu_timeout} -eq 0 ]]; then
# Bypass the menu, immediately boot $BOOTFS
fast_boot=1
else
# Make sure we bypass the other fastboot check
i=1
fi
# Boot up if we timed out, or if the enter key was pressed
if [[ ${fast_boot} -eq 1 || $i -eq 0 ]]; then
if ! key_wrapper "${BOOTFS}" ; then
emergency_shell "unable to load required key for ${BOOTFS}"
fi
# Generate a list of valid kernels for our bootfs
if output=$( find_be_kernels "${BOOTFS}" ); then
# Automatically select a kernel and boot it
kexec_kernel "$( select_kernel "${BOOTFS}" )"
fi
fi
fi
##
# No automatic boot has taken place
# Find all ZFS filesystems on any pool that mount to /
# Load any keys as we come across them
# If any kernels were found in /boot for a BE, add that BE to our menu
##
# Find any filesystems that mount to /, see if there are any kernels present
for FS in $( zfs list -H -o name,mountpoint | grep -E "/$" | cut -f1 ); do
if ! key_wrapper "${FS}" ; then
continue
fi
# Check for kernels under the mountpoint, add to our BE list
if output="$( find_be_kernels "${FS}" )" ; then
echo ${FS} >> ${BASE}/env
fi
done
if [ ! -f ${BASE}/env ]; then
emergency_shell "no boot environments with kernels found"
fi
# This is the actual menuing system
BE_SELECTED=0
tput civis
while true; do
if [ ${BE_SELECTED} -eq 0 ]; then
bootenv="$( draw_be "${BASE}/env" )"
ret=$?
# key press
# bootenv
IFS=, read key selected_be <<<"${bootenv}"
if [ $ret -eq 0 ]; then
BE_SELECTED=1
fi
fi
if [ ${BE_SELECTED} -eq 1 ]; then
case "${key}" in
"enter")
kexec_kernel "$( select_kernel "${selected_be}" )"
exit
;;
"alt-k")
selected_kernel="$( draw_kernel "${selected_be}" )"
ret=$?
if [ $ret -eq 130 ]; then
BE_SELECTED=0
elif [ $ret -eq 0 ] ; then
kexec_kernel "${selected_kernel}"
exit
fi
;;
"alt-d")
set_default_env "${selected_be}"
BE_SELECTED=0
;;
"alt-s")
selected_snap="$( draw_snapshots ${selected_be} )"
ret=$?
if [ $ret -eq 130 ]; then
BE_SELECTED=0
elif [ $ret -eq 0 ] ; then
clone_snapshot "${selected_snap}"
BE_SELECTED=0
fi
;;
"alt-a")
selected_snap="$( draw_snapshots )"
ret=$?
if [ $ret -eq 130 ]; then
BE_SELECTED=0
elif [ $ret -eq 0 ] ; then
clone_snapshot "${selected_snap}"
BE_SELECTED=0
fi
;;
"alt-r")
emergency_shell "alt-r invoked"
esac
fi
done
|
//
// MBFakerCompany.h
// Faker
//
// Created by <NAME> on 11/6/12.
// Copyright (c) 2012 <NAME>. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface MBFakerCompany : NSObject
+ (NSString*)name;
+ (NSString*)suffix;
@end
|
#!/usr/bin/env bash
dir=./speech-recognition/kaldi/
cd $dir/scripts
# Download models and change paths
./prepare.sh
echo "Kaldi preparation done."
|
#!/bin/sh
cd ConvE
# train the original model
echo 'Training original model'
CUDA_VISIBLE_DEVICES=0 python -u main.py --model distmult --data WN18RR --save-influence-map --reproduce-results
echo 'Selecting target triples'
mkdir data/target_distmult_WN18RR_0
CUDA_VISIBLE_DEVICES=0 python -u select_targets.py --model distmult --data WN18RR --reproduce-results
CUDA_VISIBLE_DEVICES=0 python -u select_rand_targets.py --model distmult --data WN18RR --reproduce-results
echo 'Generating random deletions for the neighbourhood'
CUDA_VISIBLE_DEVICES=0 python -u rand_del_n.py --model distmult --data WN18RR
CUDA_VISIBLE_DEVICES=0 python -u rand_del_g.py --model distmult --data WN18RR
python -u wrangle_KG.py rand_del_n_distmult_WN18RR_0_100_1_1_1
CUDA_VISIBLE_DEVICES=0 python -u main.py --model distmult --data rand_del_n_distmult_WN18RR_0_100_1_1_1 --reproduce-results --original-data WN18RR
python -u wrangle_KG.py rand_del_g_distmult_WN18RR_0_100_1_1_1
CUDA_VISIBLE_DEVICES=0 python -u main.py --model distmult --data rand_del_g_distmult_WN18RR_0_100_1_1_1 --reproduce-results --original-data WN18RR
echo 'Generating deletions for the neighbourhood using baselines'
CUDA_VISIBLE_DEVICES=0 python -u gr_del.py --model distmult --data WN18RR --reproduce-results
CUDA_VISIBLE_DEVICES=0 python -u ijcai_del.py --model distmult --data WN18RR --reproduce-results
CUDA_VISIBLE_DEVICES=0 python -u criage_del.py --model distmult --data WN18RR --reproduce-results
CUDA_VISIBLE_DEVICES=0 python -u criage_del_2.py --model distmult --data WN18RR --reproduce-results
CUDA_VISIBLE_DEVICES=0 python -u score_del.py --model distmult --data WN18RR --reproduce-results
python -u wrangle_KG.py gr_del_distmult_WN18RR_0_100_1_1_1
CUDA_VISIBLE_DEVICES=0 python -u main.py --model distmult --data gr_del_distmult_WN18RR_0_100_1_1_1 --reproduce-results --original-data WN18RR
python -u wrangle_KG.py ijcai_del_distmult_WN18RR_0_100_1_1_1
CUDA_VISIBLE_DEVICES=0 python -u main.py --model distmult --data ijcai_del_distmult_WN18RR_0_100_1_1_1 --reproduce-results --original-data WN18RR
python -u wrangle_KG.py criage_del_distmult_WN18RR_0_100_1_1_1
CUDA_VISIBLE_DEVICES=0 python -u main.py --model distmult --data criage_del_distmult_WN18RR_0_100_1_1_1 --reproduce-results --original-data WN18RR
python -u wrangle_KG.py criage_del_2_distmult_WN18RR_0_100_1_1_1
CUDA_VISIBLE_DEVICES=0 python -u main.py --model distmult --data criage_del_2_distmult_WN18RR_0_100_1_1_1 --reproduce-results --original-data WN18RR
python -u wrangle_KG.py score_del_distmult_WN18RR_0_100_1_1_1
CUDA_VISIBLE_DEVICES=0 python -u main.py --model distmult --data score_del_distmult_WN18RR_0_100_1_1_1 --reproduce-results --original-data WN18RR
echo 'Generating deletions for the neighbourhood using similarity metrics'
CUDA_VISIBLE_DEVICES=0 python -u cos_del.py --model distmult --data WN18RR --reproduce-results
CUDA_VISIBLE_DEVICES=0 python -u dot_del.py --model distmult --data WN18RR --reproduce-results
CUDA_VISIBLE_DEVICES=0 python -u l2_del.py --model distmult --data WN18RR --reproduce-results
python -u wrangle_KG.py cos_del_distmult_WN18RR_0_100_1_1_1
CUDA_VISIBLE_DEVICES=0 python -u main.py --model distmult --data cos_del_distmult_WN18RR_0_100_1_1_1 --reproduce-results --original-data WN18RR
python -u wrangle_KG.py dot_del_distmult_WN18RR_0_100_1_1_1
CUDA_VISIBLE_DEVICES=0 python -u main.py --model distmult --data dot_del_distmult_WN18RR_0_100_1_1_1 --reproduce-results --original-data WN18RR
python -u wrangle_KG.py l2_del_distmult_WN18RR_0_100_1_1_1
CUDA_VISIBLE_DEVICES=0 python -u main.py --model distmult --data l2_del_distmult_WN18RR_0_100_1_1_1 --reproduce-results --original-data WN18RR
echo 'Generating deletions for the neighbourhood using gradient based metrics'
CUDA_VISIBLE_DEVICES=0 python -u cos_grad_del.py --model distmult --data WN18RR --reproduce-results
CUDA_VISIBLE_DEVICES=0 python -u dot_grad_del.py --model distmult --data WN18RR --reproduce-results
CUDA_VISIBLE_DEVICES=0 python -u l2_grad_del.py --model distmult --data WN18RR --reproduce-results
python -u wrangle_KG.py cos_grad_del_distmult_WN18RR_0_100_1_1_1
CUDA_VISIBLE_DEVICES=0 python -u main.py --model distmult --data cos_grad_del_distmult_WN18RR_0_100_1_1_1 --reproduce-results --original-data WN18RR
python -u wrangle_KG.py dot_grad_del_distmult_WN18RR_0_100_1_1_1
CUDA_VISIBLE_DEVICES=0 python -u main.py --model distmult --data dot_grad_del_distmult_WN18RR_0_100_1_1_1 --reproduce-results --original-data WN18RR
python -u wrangle_KG.py l2_grad_del_distmult_WN18RR_0_100_1_1_1
CUDA_VISIBLE_DEVICES=0 python -u main.py --model distmult --data l2_grad_del_distmult_WN18RR_0_100_1_1_1 --reproduce-results --original-data WN18RR
echo 'Generating deletions for the neighbourhood using influence functions'
CUDA_VISIBLE_DEVICES=0 python -u if_del.py --model distmult --data WN18RR --reproduce-results
python -u wrangle_KG.py if_del_distmult_WN18RR_0_100_1_1_1
CUDA_VISIBLE_DEVICES=0 python -u main.py --model distmult --data if_del_distmult_WN18RR_0_100_1_1_1 --reproduce-results --original-data WN18RR
echo 'Generating random additions for the neighbourhood'
# CUDA_VISIBLE_DEVICES=0 python -u rand_add_n.py --model distmult --data WN18RR --reproduce-results
# CUDA_VISIBLE_DEVICES=0 python -u rand_add_g.py --model distmult --data WN18RR --reproduce-results
python -u wrangle_KG.py rand_add_n_distmult_WN18RR_0_100_1_1_1
CUDA_VISIBLE_DEVICES=0 python -u main.py --model distmult --data rand_add_n_distmult_WN18RR_0_100_1_1_1 --reproduce-results --original-data WN18RR
python -u wrangle_KG.py rand_add_g_distmult_WN18RR_0_100_1_1_1
CUDA_VISIBLE_DEVICES=0 python -u main.py --model distmult --data rand_add_g_distmult_WN18RR_0_100_1_1_1 --reproduce-results --original-data WN18RR
echo 'Generating additions for the neighbourhood using baselines'
CUDA_VISIBLE_DEVICES=0 python -u ijcai_add.py --model distmult --data WN18RR --reproduce-results
python -u wrangle_KG.py ijcai_add_distmult_WN18RR_0_100_1_1_1
CUDA_VISIBLE_DEVICES=0 python -u main.py --model distmult --data ijcai_add_distmult_WN18RR_0_100_1_1_1 --reproduce-results --original-data WN18RR
CUDA_VISIBLE_DEVICES=0 python -u ijcai_add.py --model distmult --data WN18RR --reproduce-results --corruption-factor 20 --budget 2
python -u wrangle_KG.py ijcai_add_distmult_WN18RR_0_100_1_2_1
CUDA_VISIBLE_DEVICES=0 python -u main.py --model distmult --data ijcai_add_distmult_WN18RR_0_100_1_2_1 --reproduce-results --original-data WN18RR
CUDA_VISIBLE_DEVICES=0 python -u criage_inverter.py --model distmult --data WN18RR --reproduce-results
CUDA_VISIBLE_DEVICES=0 python -u criage_add.py --model distmult --data WN18RR --reproduce-results
python -u wrangle_KG.py criage_add_distmult_WN18RR_0_100_1_1_1
CUDA_VISIBLE_DEVICES=0 python -u main.py --model distmult --data criage_add_distmult_WN18RR_0_100_1_1_1 --reproduce-results --original-data WN18RR
echo 'Generating additions for the neighbourhood using similarity metrics'
CUDA_VISIBLE_DEVICES=0 python -u if_add_5.py --model distmult --data WN18RR --sim-metric 'cos' --reproduce-results
CUDA_VISIBLE_DEVICES=0 python -u if_add_5.py --model distmult --data WN18RR --sim-metric 'dot' --reproduce-results
CUDA_VISIBLE_DEVICES=0 python -u if_add_5.py --model distmult --data WN18RR --sim-metric 'l2' --reproduce-results
CUDA_VISIBLE_DEVICES=0 python -u if_add_5.py --model distmult --data WN18RR --sim-metric 'score' --reproduce-results
python -u wrangle_KG.py cos_add_5_distmult_WN18RR_0_100_1_1_1
CUDA_VISIBLE_DEVICES=0 python -u main.py --model distmult --data cos_add_5_distmult_WN18RR_0_100_1_1_1 --reproduce-results --original-data WN18RR
python -u wrangle_KG.py dot_add_5_distmult_WN18RR_0_100_1_1_1
CUDA_VISIBLE_DEVICES=0 python -u main.py --model distmult --data dot_add_5_distmult_WN18RR_0_100_1_1_1 --reproduce-results --original-data WN18RR
python -u wrangle_KG.py l2_add_5_distmult_WN18RR_0_100_1_1_1
CUDA_VISIBLE_DEVICES=0 python -u main.py --model distmult --data l2_add_5_distmult_WN18RR_0_100_1_1_1 --reproduce-results --original-data WN18RR
python -u wrangle_KG.py score_add_5_distmult_WN18RR_0_100_1_1_1
CUDA_VISIBLE_DEVICES=0 python -u main.py --model distmult --data score_add_5_distmult_WN18RR_0_100_1_1_1 --reproduce-results --original-data WN18RR
echo 'Generating additions for the neighbourhood using similarity of gradients'
CUDA_VISIBLE_DEVICES=0 python -u if_add_5.py --model distmult --data WN18RR --sim-metric 'cos_grad' --reproduce-results
CUDA_VISIBLE_DEVICES=0 python -u if_add_5.py --model distmult --data WN18RR --sim-metric 'dot_grad' --reproduce-results
CUDA_VISIBLE_DEVICES=0 python -u if_add_5.py --model distmult --data WN18RR --sim-metric 'l2_grad' --reproduce-results
python -u wrangle_KG.py cos_grad_add_5_distmult_WN18RR_0_100_1_1_1
CUDA_VISIBLE_DEVICES=0 python -u main.py --model distmult --data cos_grad_add_5_distmult_WN18RR_0_100_1_1_1 --reproduce-results --original-data WN18RR
python -u wrangle_KG.py dot_grad_add_5_distmult_WN18RR_0_100_1_1_1
CUDA_VISIBLE_DEVICES=0 python -u main.py --model distmult --data dot_grad_add_5_distmult_WN18RR_0_100_1_1_1 --reproduce-results --original-data WN18RR
python -u wrangle_KG.py l2_grad_add_5_distmult_WN18RR_0_100_1_1_1
CUDA_VISIBLE_DEVICES=0 python -u main.py --model distmult --data l2_grad_add_5_distmult_WN18RR_0_100_1_1_1 --reproduce-results --original-data WN18RR
echo 'Generating additions for the neighbourhood using influence function'
CUDA_VISIBLE_DEVICES=0 python -u if_add_5.py --model distmult --data WN18RR --sim-metric 'if' --reproduce-results
python -u wrangle_KG.py if_add_5_distmult_WN18RR_0_100_1_1_1
CUDA_VISIBLE_DEVICES=0 python -u main.py --model distmult --data if_add_5_distmult_WN18RR_0_100_1_1_1 --reproduce-results --original-data WN18RR
|
const express = require('express');
const nodemailer = require('nodemailer');
const app = express();
app.post('/send-email', (req, res) => {
const { sender, receiver, subject, message } = req.body;
const transporter = nodemailer.createTransport({
host: 'smtp.example.com',
port: 587,
secure: false
});
const mailOptions = {
from: sender,
to: receiver,
subject,
text: message
};
transporter.sendMail(mailOptions, (err, info) => {
if (err) {
res.status(500).json({
message: 'Error sending email',
});
} else {
res.status(200).json({
message: 'Email sent successfully',
});
}
});
});
app.listen(3000); |
def check_integer(num):
if num >= 0:
print("positive")
else:
print("negative")
num = 1
check_integer(num) |
module SQL
class TableCreator
attr_accessor :table_name, :opts
def initialize(adapter, table_name, opts = {}, &block)
@adapter = adapter
@table_name = table_name.to_s
@opts = opts
@columns = []
self.instance_eval &block
end
def quoted_table_name
@adapter.send(:quote_table_name, table_name)
end
def column(name, type, opts = {})
@columns << Column.new(@adapter, name, type, opts)
end
def to_sql
"CREATE TABLE #{quoted_table_name} (#{@columns.map{ |c| c.to_sql }.join(', ')})"
end
class Column
attr_accessor :name, :type
def initialize(adapter, name, type, opts = {})
@adapter = adapter
@name = name.to_s
@opts = opts
@type = build_type(type)
end
def build_type(type_class)
schema = {:name => @name, :quote_column_name => quoted_name}.merge(@opts)
schema = @adapter.class.type_map[type_class].merge(schema)
@adapter.property_schema_statement(schema)
end
def to_sql
type
end
def quoted_name
@adapter.send(:quote_column_name, name)
end
end
end
end
|
import services from '@example/services';
import { IUser } from '@example/services/user';
import createSliceState from './common/createSliceState';
export default createSliceState({
name: 'user',
initialState: null as IUser | null,
reducers: {
set: (state, action) => state,
},
effects: {
fetch: async (state) => {
const data = await services.user.getInfo();
return data;
},
}
});
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
var _AnchorRenderer = _interopRequireDefault(require("../renderers/AnchorRenderer"));
var _ImageRenderer = _interopRequireDefault(require("../renderers/ImageRenderer"));
var _OrderedListRenderer = _interopRequireDefault(require("../renderers/OrderedListRenderer"));
var _UnorderedListRenderer = _interopRequireDefault(require("../renderers/UnorderedListRenderer"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const defaultRenderers = {
img: _ImageRenderer.default,
ul: _UnorderedListRenderer.default,
ol: _OrderedListRenderer.default,
a: _AnchorRenderer.default
};
var _default = defaultRenderers;
exports.default = _default;
//# sourceMappingURL=defaultRenderers.js.map |
def extract_app_config_class(default_app_config: str) -> str:
# Split the default app configuration string by '.' and extract the last element
app_config_class = default_app_config.split('.')[-1]
return app_config_class |
<gh_stars>10-100
export const description = `
TODO:
- for sampleCount = { 1, 4 } and various combinations of:
- rasterization mask = { 0, 1, 2, 3, 15 }
- sample mask = { 0, 1, 2, 3, 15, 30 }
- fragment shader output mask (SV_Coverage) = { 0, 1, 2, 3, 15, 30 }
- test that final sample mask is the logical AND of all the
relevant masks -- meaning that the samples not included in the final mask are discarded
for all the { color outputs, depth tests, stencil operations } on any attachments.
- [choosing 30 = 2 + 4 + 8 + 16 because the 5th bit should be ignored]
`;
import { makeTestGroup } from '../../../../common/framework/test_group.js';
import { GPUTest } from '../../../gpu_test.js';
export const g = makeTestGroup(GPUTest);
|
The most frequent integer is 2 and it appears 3 times. |
<gh_stars>1-10
package medicalsystem;
import org.semanticweb.owlapi.manchestersyntax.renderer.ManchesterOWLSyntaxOWLObjectRendererImpl;
import org.semanticweb.owlapi.model.OWLClassExpression;
public class InferenceWrapper {
private static ManchesterOWLSyntaxOWLObjectRendererImpl man = new ManchesterOWLSyntaxOWLObjectRendererImpl();
private OWLClassExpression cls;
public InferenceWrapper(OWLClassExpression c) {
cls = c;
}
public OWLClassExpression getClassExpression() {
return cls;
}
public String toString() {
String result = man.render(cls);
int index = result.indexOf("Pizza");
if (index != -1)
return "This pizza is " + result.substring(0, index);
return result;
}
}
|
/*******************************************************************************
* Copyright 2015 InfinitiesSoft Solutions Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package com.infinities.skyport;
import java.io.Closeable;
import com.infinities.skyport.model.SystemInfo;
import com.infinities.skyport.service.ConfigurationHome;
import com.infinities.skyport.service.DriverHome;
import com.infinities.skyport.service.ModuleHome;
import com.infinities.skyport.service.WebsockifyService;
public interface Skyport extends Closeable {
void initialize() throws Throwable;
@Override
void close();
ConfigurationHome getConfigurationHome();
DriverHome getDriverHome();
ModuleHome getModuleHome();
SystemInfo getSystemInfo() throws Exception;
WebsockifyService getWebsockifyService();
}
|
#!/usr/bin/env bash
find . -name "*.properties" | while read fname; do
job_name=`sed -n "/^job\.name.*$/ p" $fname | sed -n "s/=/\\t/g p" | cut -f 2`
folder_path=$(dirname `dirname "$fname"`)
folder_name=`basename $folder_path`
echo "$folder_name:$job_name:---:stopped"
done > $1
|
<gh_stars>1-10
package com.ethan.smarthome;
import androidx.appcompat.app.AppCompatActivity;
import android.app.Activity;
import android.app.Dialog;
import android.os.Bundle;
import android.view.Gravity;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.NumberPicker;
import android.widget.TextView;
import com.wx.wheelview.adapter.ArrayWheelAdapter;
import com.wx.wheelview.widget.WheelView;
import java.util.ArrayList;
import java.util.List;
public class Settings extends AppCompatActivity implements View.OnClickListener, MyDialogFragment.NumberPick {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_settings);
Button btn = findViewById(R.id.btn);
btn.setOnClickListener((View.OnClickListener) this);
}
@Override
public void onClick(View v) {
MyDialogFragment myDialogFragment = new MyDialogFragment();
myDialogFragment.show(getSupportFragmentManager(), "MyFragment");
}
@Override
public void onFinishNumberPick(String inputText) {
Button btn = findViewById(R.id.btn);
btn.setText(inputText);
}
}
|
package org.rs2server.rs2.model.combat.npcs;
import org.rs2server.rs2.model.Animation;
import org.rs2server.rs2.model.Graphic;
import org.rs2server.rs2.model.Hit;
import org.rs2server.rs2.model.Mob;
import org.rs2server.rs2.model.Prayers;
import org.rs2server.rs2.model.Skills;
import org.rs2server.rs2.model.World;
import org.rs2server.rs2.model.combat.CombatAction;
import org.rs2server.rs2.model.combat.impl.AbstractCombatAction;
import org.rs2server.rs2.model.combat.impl.MagicCombatAction.Spell;
import org.rs2server.rs2.model.npc.NPC;
import org.rs2server.rs2.tickable.Tickable;
import org.rs2server.rs2.util.Misc;
public class Kolodion extends AbstractCombatAction{
/**
* The singleton instance.
*/
private static final Kolodion INSTANCE = new Kolodion();
/**
* Gets the singleton instance.
* @return The singleton instance.
*/
public static CombatAction getAction() {
return INSTANCE;
}
@Override
public void hit(final Mob attacker, final Mob victim) {
super.hit(attacker, victim);
if(!attacker.isNPC()) {
return; //this should be an NPC!
}
int damage;
int maxHit = 20;
NPC npc = (NPC) attacker;
attacker.playAnimation(getAttackAnim(npc));
Spell[] spell = {Spell.CLAWS_OF_GUTHIX, Spell.FLAMES_OF_ZAMORAK, Spell.SARADOMIN_STRIKE};
int random = Misc.random(spell.length - 1);
int clientSpeed;
int gfxDelay;
if(attacker.getLocation().isWithinDistance(attacker, victim, 1)) {
clientSpeed = 70;
gfxDelay = 80;
} else if(attacker.getLocation().isWithinDistance(attacker, victim, 5)) {
clientSpeed = 90;
gfxDelay = 100;
} else if(attacker.getLocation().isWithinDistance(attacker, victim, 8)) {
clientSpeed = 110;
gfxDelay = 120;
} else {
clientSpeed = 130;
gfxDelay = 140;
}
Graphic[] gfx = {Graphic.create(77, gfxDelay, 100), Graphic.create(78, gfxDelay, 0), Graphic.create(76, gfxDelay, 100)};
npc.setAutocastSpell(spell[random]);
damage = Misc.random(damage(maxHit, attacker, victim, attacker.getCombatState().getAttackType(), Skills.MAGIC , Prayers.PROTECT_FROM_MAGIC, false, false));
victim.playGraphics(damage <= 0 ? Graphic.create(85, gfxDelay, 100) : gfx[random]);
int delay = (gfxDelay / 20) - 1;
if (damage > 0) {
World.getWorld().submit(new Tickable(delay) {
@Override
public void execute() {
this.stop();
victim.inflictDamage(new Hit(damage), attacker);
smite(attacker, victim, damage);
recoil(attacker, victim, damage);
}
});
vengeance(attacker, victim, damage, 1);
}
victim.getActiveCombatAction().defend(attacker, victim, false);
attacker.getCombatState().setSpellDelay(5);
attacker.getCombatState().setAttackDelay(4);
}
private Animation getAttackAnim(NPC n) {
switch (n.getId()) {
case 1606:
return Animation.create(132);
case 1607:
return Animation.create(5319);
case 1608:
return Animation.create(729);
case 1609:
return Animation.create(69);
}
return Animation.create(811);
}
@Override
public int distance(Mob attacker) {
return 5;
}
}
|
# Open bash in a container. Pass container name
dbash() {
docker exec -it "$1" /bin/bash
}
|
<reponame>arnavsharma93/eden
# -*- coding: utf-8 -*-
""" GIS Module
@requires: U{B{I{gluon}} <http://web2py.com>}
@requires: U{B{I{shapely}} <http://trac.gispython.org/lab/wiki/Shapely>}
@copyright: (c) 2010-2013 Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ["GIS",
"S3Map",
"S3ExportPOI",
"S3ImportPOI",
]
import os
import re
import sys
#import logging
import urllib # Needed for urlencoding
import urllib2 # Needed for quoting & error handling on fetch
try:
from cStringIO import StringIO # Faster, where available
except:
from StringIO import StringIO
from datetime import timedelta # Needed for Feed Refresh checks
try:
from lxml import etree # Needed to follow NetworkLinks
except ImportError:
print >> sys.stderr, "ERROR: lxml module needed for XML handling"
raise
KML_NAMESPACE = "http://earth.google.com/kml/2.2"
try:
import json # try stdlib (Python 2.6)
except ImportError:
try:
import simplejson as json # try external module
except:
import gluon.contrib.simplejson as json # fallback to pure-Python module
try:
# Python 2.7
from collections import OrderedDict
except:
# Python 2.6
from gluon.contrib.simplejson.ordered_dict import OrderedDict
from gluon import *
# Here are dependencies listed for reference:
#from gluon import current
#from gluon.html import *
#from gluon.http import HTTP, redirect
from gluon.dal import Rows
from gluon.storage import Storage
from s3fields import s3_all_meta_field_names
from s3rest import S3Method
from s3track import S3Trackable
from s3utils import s3_include_ext, s3_unicode
DEBUG = False
if DEBUG:
import datetime
print >> sys.stderr, "S3GIS: DEBUG MODE"
def _debug(m):
print >> sys.stderr, m
else:
_debug = lambda m: None
# Map WKT types to db types
GEOM_TYPES = {"point": 1,
"linestring": 2,
"polygon": 3,
"multipoint": 4,
"multilinestring": 5,
"multipolygon": 6,
"geometrycollection": 7,
}
# km
RADIUS_EARTH = 6371.01
# Compact JSON encoding
SEPARATORS = (",", ":")
# Map Defaults
# Also in static/S3/s3.gis.js
# http://dev.openlayers.org/docs/files/OpenLayers/Strategy/Cluster-js.html
CLUSTER_ATTRIBUTE = "colour"
CLUSTER_DISTANCE = 20 # pixels
CLUSTER_THRESHOLD = 2 # minimum # of features to form a cluster
# Garmin GPS Symbols
GPS_SYMBOLS = ["Airport",
"Amusement Park"
"Ball Park",
"Bank",
"Bar",
"Beach",
"Bell",
"Boat Ramp",
"Bowling",
"Bridge",
"Building",
"Campground",
"Car",
"Car Rental",
"Car Repair",
"Cemetery",
"Church",
"Circle with X",
"City (Capitol)",
"City (Large)",
"City (Medium)",
"City (Small)",
"Civil",
"Contact, Dreadlocks",
"Controlled Area",
"Convenience Store",
"Crossing",
"Dam",
"Danger Area",
"Department Store",
"Diver Down Flag 1",
"Diver Down Flag 2",
"Drinking Water",
"Exit",
"Fast Food",
"Fishing Area",
"Fitness Center",
"Flag",
"Forest",
"Gas Station",
"Geocache",
"Geocache Found",
"Ghost Town",
"Glider Area",
"Golf Course",
"Green Diamond",
"Green Square",
"Heliport",
"Horn",
"Hunting Area",
"Information",
"Levee",
"Light",
"Live Theater",
"Lodging",
"Man Overboard",
"Marina",
"Medical Facility",
"Mile Marker",
"Military",
"Mine",
"Movie Theater",
"Museum",
"Navaid, Amber",
"Navaid, Black",
"Navaid, Blue",
"Navaid, Green",
"Navaid, Green/Red",
"Navaid, Green/White",
"Navaid, Orange",
"Navaid, Red",
"Navaid, Red/Green",
"Navaid, Red/White",
"Navaid, Violet",
"Navaid, White",
"Navaid, White/Green",
"Navaid, White/Red",
"Oil Field",
"Parachute Area",
"Park",
"Parking Area",
"Pharmacy",
"Picnic Area",
"Pizza",
"Post Office",
"Private Field",
"Radio Beacon",
"Red Diamond",
"Red Square",
"Residence",
"Restaurant",
"Restricted Area",
"Restroom",
"RV Park",
"Scales",
"Scenic Area",
"School",
"Seaplane Base",
"Shipwreck",
"Shopping Center",
"Short Tower",
"Shower",
"Skiing Area",
"Skull and Crossbones",
"Soft Field",
"Stadium",
"Summit",
"Swimming Area",
"Tall Tower",
"Telephone",
"Toll Booth",
"TracBack Point",
"Trail Head",
"Truck Stop",
"Tunnel",
"Ultralight Area",
"Water Hydrant",
"Waypoint",
"White Buoy",
"White Dot",
"Zoo"
]
# -----------------------------------------------------------------------------
class GIS(object):
"""
GeoSpatial functions
"""
def __init__(self):
messages = current.messages
#messages.centroid_error = str(A("Shapely", _href="http://pypi.python.org/pypi/Shapely/", _target="_blank")) + " library not found, so can't find centroid!"
messages.centroid_error = "Shapely library not functional, so can't find centroid! Install Geos & Shapely for Line/Polygon support"
messages.unknown_type = "Unknown Type!"
messages.invalid_wkt_point = "Invalid WKT: must be like POINT(3 4)"
messages.invalid_wkt = "Invalid WKT: see http://en.wikipedia.org/wiki/Well-known_text"
messages.lon_empty = "Invalid: Longitude can't be empty if Latitude specified!"
messages.lat_empty = "Invalid: Latitude can't be empty if Longitude specified!"
messages.unknown_parent = "Invalid: %(parent_id)s is not a known Location"
self.DEFAULT_SYMBOL = "White Dot"
self.hierarchy_level_keys = ["L0", "L1", "L2", "L3", "L4", "L5"]
self.hierarchy_levels = {}
self.max_allowed_level_num = 4
# -------------------------------------------------------------------------
@staticmethod
def gps_symbols():
return GPS_SYMBOLS
# -------------------------------------------------------------------------
def download_kml(self, record_id, filename, session_id_name, session_id):
"""
Download a KML file:
- unzip it if-required
- follow NetworkLinks recursively if-required
Save the file to the /uploads folder
Designed to be called asynchronously using:
current.s3task.async("download_kml", [record_id, filename])
@param record_id: id of the record in db.gis_layer_kml
@param filename: name to save the file as
@param session_id_name: name of the session
@param session_id: id of the session
@ToDo: Pass error messages to Result & have JavaScript listen for these
"""
request = current.request
table = current.s3db.gis_layer_kml
record = current.db(table.id == record_id).select(table.url,
limitby=(0, 1)
).first()
url = record.url
filepath = os.path.join(request.global_settings.applications_parent,
request.folder,
"uploads",
"gis_cache",
filename)
warning = self.fetch_kml(url, filepath, session_id_name, session_id)
# @ToDo: Handle errors
#query = (cachetable.name == name)
if "URLError" in warning or "HTTPError" in warning:
# URL inaccessible
if os.access(filepath, os.R_OK):
statinfo = os.stat(filepath)
if statinfo.st_size:
# Use cached version
#date = db(query).select(cachetable.modified_on,
# limitby=(0, 1)).first().modified_on
#response.warning += "%s %s %s\n" % (url,
# T("not accessible - using cached version from"),
# str(date))
#url = URL(c="default", f="download",
# args=[filename])
pass
else:
# 0k file is all that is available
#response.warning += "%s %s\n" % (url,
# T("not accessible - no cached version available!"))
# skip layer
return
else:
# No cached version available
#response.warning += "%s %s\n" % (url,
# T("not accessible - no cached version available!"))
# skip layer
return
else:
# Download was succesful
#db(query).update(modified_on=request.utcnow)
if "ParseError" in warning:
# @ToDo Parse detail
#response.warning += "%s: %s %s\n" % (T("Layer"),
# name,
# T("couldn't be parsed so NetworkLinks not followed."))
pass
if "GroundOverlay" in warning or "ScreenOverlay" in warning:
#response.warning += "%s: %s %s\n" % (T("Layer"),
# name,
# T("includes a GroundOverlay or ScreenOverlay which aren't supported in OpenLayers yet, so it may not work properly."))
# Code to support GroundOverlay:
# https://github.com/openlayers/openlayers/pull/759
pass
# -------------------------------------------------------------------------
def fetch_kml(self, url, filepath, session_id_name, session_id):
"""
Fetch a KML file:
- unzip it if-required
- follow NetworkLinks recursively if-required
Returns a file object
Designed as a helper function for download_kml()
"""
from gluon.tools import fetch
response = current.response
public_url = current.deployment_settings.get_base_public_url()
warning = ""
local = False
if not url.startswith("http"):
local = True
url = "%s%s" % (public_url, url)
elif len(url) > len(public_url) and url[:len(public_url)] == public_url:
local = True
if local:
# Keep Session for local URLs
import Cookie
cookie = Cookie.SimpleCookie()
cookie[session_id_name] = session_id
# For sync connections
current.session._unlock(response)
try:
file = fetch(url, cookie=cookie)
except urllib2.URLError:
warning = "URLError"
return warning
except urllib2.HTTPError:
warning = "HTTPError"
return warning
else:
try:
file = fetch(url)
except urllib2.URLError:
warning = "URLError"
return warning
except urllib2.HTTPError:
warning = "HTTPError"
return warning
filenames = []
if file[:2] == "PK":
# Unzip
fp = StringIO(file)
import zipfile
myfile = zipfile.ZipFile(fp)
files = myfile.infolist()
main = None
candidates = []
for _file in files:
filename = _file.filename
if filename == "doc.kml":
main = filename
elif filename[-4:] == ".kml":
candidates.append(filename)
if not main:
if candidates:
# Any better way than this to guess which KML file is the main one?
main = candidates[0]
else:
response.error = "KMZ contains no KML Files!"
return ""
# Write files to cache (other than the main one)
request = current.request
path = os.path.join(request.folder, "static", "cache", "kml")
if not os.path.exists(path):
os.makedirs(path)
for _file in files:
filename = _file.filename
if filename != main:
if "/" in filename:
_filename = filename.split("/")
dir = os.path.join(path, _filename[0])
if not os.path.exists(dir):
os.mkdir(dir)
_filepath = os.path.join(path, *_filename)
else:
_filepath = os.path.join(path, filename)
try:
f = open(_filepath, "wb")
except:
# Trying to write the Folder
pass
else:
filenames.append(filename)
__file = myfile.read(filename)
f.write(__file)
f.close()
# Now read the main one (to parse)
file = myfile.read(main)
myfile.close()
# Check for NetworkLink
if "<NetworkLink>" in file:
try:
# Remove extraneous whitespace
parser = etree.XMLParser(recover=True, remove_blank_text=True)
tree = etree.XML(file, parser)
# Find contents of href tag (must be a better way?)
url = ""
for element in tree.iter():
if element.tag == "{%s}href" % KML_NAMESPACE:
url = element.text
if url:
# Follow NetworkLink (synchronously)
warning2 = self.fetch_kml(url, filepath)
warning += warning2
except (etree.XMLSyntaxError,):
e = sys.exc_info()[1]
warning += "<ParseError>%s %s</ParseError>" % (e.line, e.errormsg)
# Check for Overlays
if "<GroundOverlay>" in file:
warning += "GroundOverlay"
if "<ScreenOverlay>" in file:
warning += "ScreenOverlay"
for filename in filenames:
replace = "%s/%s" % (URL(c="static", f="cache", args=["kml"]),
filename)
# Rewrite all references to point to the correct place
# need to catch <Icon><href> (which could be done via lxml)
# & also <description><![CDATA[<img src=" (which can't)
file = file.replace(filename, replace)
# Write main file to cache
f = open(filepath, "w")
f.write(file)
f.close()
return warning
# -------------------------------------------------------------------------
@staticmethod
def geocode(address, postcode=None, Lx_ids=None, geocoder="google"):
"""
Geocode an Address
- used by S3LocationSelectorWidget2
settings.get_gis_geocode_imported_addresses
@param address: street address
@param postcode: postcode
@param Lx_ids: list of ancestor IDs
@param geocoder: which geocoder service to use
"""
from geopy import geocoders
if geocoder == "google":
g = geocoders.GoogleV3()
elif geocoder == "yahoo":
apikey = current.deployment_settings.get_gis_api_yahoo()
g = geocoders.Yahoo(apikey)
else:
# @ToDo
raise NotImplementedError
location = address
if postcode:
location = "%s,%s" % (location, postcode)
Lx = L5 = L4 = L3 = L2 = L1 = L0 = None
if Lx_ids:
# Convert Lx IDs to Names
table = current.s3db.gis_location
limit = len(Lx_ids)
if limit > 1:
query = (table.id.belongs(Lx_ids))
else:
query = (table.id == Lx_ids[0])
db = current.db
Lx = db(query).select(table.id,
table.name,
table.level,
table.gis_feature_type,
# Better as separate query
#table.lon_min,
#table.lat_min,
#table.lon_max,
#table.lat_max,
# Better as separate query
#table.wkt,
limitby=(0, limit),
orderby=~table.level
)
if Lx:
Lx_names = ",".join([l.name for l in Lx])
location = "%s,%s" % (location, Lx_names)
for l in Lx:
if l.level == "L0":
L0 = l.id
continue
elif l.level == "L1":
L1 = l.id
continue
elif l.level == "L2":
L2 = l.id
continue
elif l.level == "L3":
L3 = l.id
continue
elif l.level == "L4":
L4 = l.id
continue
elif l.level == "L5":
L5 = l.id
Lx = Lx.as_dict()
try:
results = g.geocode(location, exactly_one=False)
if len(results) == 1:
place, (lat, lon) = results[0]
if Lx:
output = None
# Check Results are for a specific address & not just that for the City
results = g.geocode(Lx_names, exactly_one=False)
if not results:
output = "Can't check that these results are specific enough"
for result in results:
place2, (lat2, lon2) = result
if place == place2:
output = "We can only geocode to the Lx"
break
if not output:
# Check Results are within relevant bounds
L0_row = None
wkt = None
if L5 and Lx[L5]["gis_feature_type"] != 1:
wkt = db(table.id == L5).select(table.wkt,
limitby=(0, 1)
).first().wkt
used_Lx = L5
elif L4 and Lx[L4]["gis_feature_type"] != 1:
wkt = db(table.id == L4).select(table.wkt,
limitby=(0, 1)
).first().wkt
used_Lx = L4
elif L3 and Lx[L3]["gis_feature_type"] != 1:
wkt = db(table.id == L3).select(table.wkt,
limitby=(0, 1)
).first().wkt
used_Lx = L3
elif L2 and Lx[L2]["gis_feature_type"] != 1:
wkt = db(table.id == L2).select(table.wkt,
limitby=(0, 1)
).first().wkt
used_Lx = L2
elif L1 and Lx[L1]["gis_feature_type"] != 1:
wkt = db(table.id == L1).select(table.wkt,
limitby=(0, 1)
).first().wkt
used_Lx = L1
elif L0:
L0_row = db(table.id == L0).select(table.wkt,
table.lon_min,
table.lat_min,
table.lon_max,
table.lat_max,
limitby=(0, 1)
).first()
if not L0_row.wkt.startswith("POI"): # Point
wkt = L0_row.wkt
used_Lx = L0
if wkt:
from shapely.geometry import point
from shapely.wkt import loads as wkt_loads
try:
# Enable C-based speedups available from 1.2.10+
from shapely import speedups
speedups.enable()
except:
current.log.info("S3GIS",
"Upgrade Shapely for Performance enhancements")
test = point.Point(lon, lat)
shape = wkt_loads(wkt)
ok = test.intersects(shape)
if not ok:
output = "Returned value not within %s" % Lx[used_Lx]["name"]
elif L0:
# Check within country at least
if not L0_row:
L0_row = db(table.id == L0).select(table.lon_min,
table.lat_min,
table.lon_max,
table.lat_max,
limitby=(0, 1)
).first()
if lat < L0_row["lat_max"] and \
lat > L0_row["lat_min"] and \
lon < L0_row["lon_max"] and \
lon > L0_row["lon_min"]:
ok = True
else:
ok = False
output = "Returned value not within %s" % Lx["name"]
else:
# We'll just have to trust it!
ok = True
if ok:
output = dict(lat=lat, lon=lon)
else:
# We'll just have to trust it!
output = dict(lat=lat, lon=lon)
elif len(results):
output = "Multiple results found"
# @ToDo: Iterate through the results to see if just 1 is within the right bounds
else:
output = "No results found"
except:
error = sys.exc_info()[1]
output = str(error)
return output
# -------------------------------------------------------------------------
@staticmethod
def geocode_r(lat, lon):
"""
Geocode an Address
- used by S3LocationSelectorWidget2
settings.get_gis_geocode_imported_addresses
@param address: street address
@param postcode: postcode
@param Lx_ids: list of ancestor IDs
@param geocoder: which geocoder service to use
"""
if not lat or not lon:
return "Need Lat & Lon"
results = ""
# Check vaguely valid
try:
lat = float(lat)
except ValueError:
results = "Latitude is Invalid!"
try:
lon = float(lon)
except ValueError:
results += "Longitude is Invalid!"
if not results:
if lon > 180 or lon < -180:
results = "Longitude must be between -180 & 180!"
elif lat > 90 or lat < -90:
results = "Latitude must be between -90 & 90!"
else:
table = current.s3db.gis_location
query = (table.level != None) & \
(table.deleted != True)
if current.deployment_settings.get_gis_spatialdb():
point = "POINT(%s %s)" % (lon, lat)
query &= (table.the_geom.st_intersects(point))
rows = current.db(query).select(table.id,
table.level,
)
results = {}
for row in rows:
results[row.level] = row.id
else:
# Oh dear, this is going to be slow :/
query &= (table.lat_min < lat) & \
(table.lat_max > lat) & \
(table.lon_min < lon) & \
(table.lon_max > lon)
rows = current.db(query).select(table.id,
table.level,
table.wkt,
)
from shapely.geometry import point
from shapely.wkt import loads as wkt_loads
test = point.Point(lon, lat)
results = {}
for row in rows:
shape = wkt_loads(row.wkt)
ok = test.intersects(shape)
if ok:
results[row.level] = row.id
return results
# -------------------------------------------------------------------------
@staticmethod
def get_bearing(lat_start, lon_start, lat_end, lon_end):
"""
Given a Start & End set of Coordinates, return a Bearing
Formula from: http://www.movable-type.co.uk/scripts/latlong.html
"""
import math
# shortcuts
cos = math.cos
sin = math.sin
delta_lon = lon_start - lon_end
bearing = math.atan2(sin(delta_lon) * cos(lat_end),
(cos(lat_start) * sin(lat_end)) - \
(sin(lat_start) * cos(lat_end) * cos(delta_lon))
)
# Convert to a compass bearing
bearing = (bearing + 360) % 360
return bearing
# -------------------------------------------------------------------------
def get_bounds(self, features=None, parent=None):
"""
Calculate the Bounds of a list of Point Features
e.g. When a map is displayed that focuses on a collection of points,
the map is zoomed to show just the region bounding the points.
e.g. To use in GPX export for correct zooming
`
Ensure a minimum size of bounding box, and that the points
are inset from the border.
@param features: A list of point features
@param parent: A location_id to provide a polygonal bounds suitable
for validating child locations
@ToDo: Support Polygons (separate function?)
"""
if parent:
table = current.s3db.gis_location
db = current.db
parent = db(table.id == parent).select(table.id,
table.level,
table.name,
table.parent,
table.path,
table.lon,
table.lat,
table.lon_min,
table.lat_min,
table.lon_max,
table.lat_max).first()
if parent.lon_min is None or \
parent.lon_max is None or \
parent.lat_min is None or \
parent.lat_max is None or \
parent.lon == parent.lon_min or \
parent.lon == parent.lon_max or \
parent.lat == parent.lat_min or \
parent.lat == parent.lat_max:
# This is unsuitable - try higher parent
if parent.level == "L1":
if parent.parent:
# We can trust that L0 should have the data from prepop
L0 = db(table.id == parent.parent).select(table.name,
table.lon_min,
table.lat_min,
table.lon_max,
table.lat_max).first()
return L0.lat_min, L0.lon_min, L0.lat_max, L0.lon_max, L0.name
if parent.path:
path = parent.path
else:
path = GIS.update_location_tree(dict(id=parent.id))
path_list = map(int, path.split("/"))
rows = db(table.id.belongs(path_list)).select(table.level,
table.name,
table.lat,
table.lon,
table.lon_min,
table.lat_min,
table.lon_max,
table.lat_max,
orderby=table.level)
row_list = rows.as_list()
row_list.reverse()
ok = False
for row in row_list:
if row["lon_min"] is not None and row["lon_max"] is not None and \
row["lat_min"] is not None and row["lat_max"] is not None and \
row["lon"] != row["lon_min"] != row["lon_max"] and \
row["lat"] != row["lat_min"] != row["lat_max"]:
ok = True
break
if ok:
# This level is suitable
return row["lat_min"], row["lon_min"], row["lat_max"], row["lon_max"], row["name"]
else:
# This level is suitable
return parent.lat_min, parent.lon_min, parent.lat_max, parent.lon_max, parent.name
return -90, -180, 90, 180, None
# Minimum Bounding Box
# - gives a minimum width and height in degrees for the region shown.
# Without this, a map showing a single point would not show any extent around that point.
bbox_min_size = 0.05
# Bounding Box Insets
# - adds a small amount of distance outside the points.
# Without this, the outermost points would be on the bounding box, and might not be visible.
bbox_inset = 0.007
if features:
lon_min = 180
lat_min = 90
lon_max = -180
lat_max = -90
# Is this a simple feature set or the result of a join?
try:
lon = features[0].lon
simple = True
except (AttributeError, KeyError):
simple = False
# @ToDo: Optimised Geospatial routines rather than this crude hack
for feature in features:
try:
if simple:
lon = feature.lon
lat = feature.lat
else:
# A Join
lon = feature.gis_location.lon
lat = feature.gis_location.lat
except AttributeError:
# Skip any rows without the necessary lat/lon fields
continue
# Also skip those set to None. Note must use explicit test,
# as zero is a legal value.
if lon is None or lat is None:
continue
lon_min = min(lon, lon_min)
lat_min = min(lat, lat_min)
lon_max = max(lon, lon_max)
lat_max = max(lat, lat_max)
# Assure a reasonable-sized box.
delta_lon = (bbox_min_size - (lon_max - lon_min)) / 2.0
if delta_lon > 0:
lon_min -= delta_lon
lon_max += delta_lon
delta_lat = (bbox_min_size - (lat_max - lat_min)) / 2.0
if delta_lat > 0:
lat_min -= delta_lat
lat_max += delta_lat
# Move bounds outward by specified inset.
lon_min -= bbox_inset
lon_max += bbox_inset
lat_min -= bbox_inset
lat_max += bbox_inset
else:
# no features
config = GIS.get_config()
if config.lat_min is not None:
lat_min = config.lat_min
else:
lat_min = -90
if config.lon_min is not None:
lon_min = config.lon_min
else:
lon_min = -180
if config.lat_max is not None:
lat_max = config.lat_max
else:
lat_max = 90
if config.lon_max is not None:
lon_max = config.lon_max
else:
lon_max = 180
return dict(lon_min=lon_min, lat_min=lat_min,
lon_max=lon_max, lat_max=lat_max)
# -------------------------------------------------------------------------
@staticmethod
def _lookup_parent_path(feature_id):
"""
Helper that gets parent and path for a location.
"""
db = current.db
table = db.gis_location
feature = db(table.id == feature_id).select(table.id,
table.name,
table.level,
table.path,
table.parent,
limitby=(0, 1)).first()
return feature
# -------------------------------------------------------------------------
@staticmethod
def get_children(id, level=None):
"""
Return a list of IDs of all GIS Features which are children of
the requested feature, using Materialized path for retrieving
the children
This has been chosen over Modified Preorder Tree Traversal for
greater efficiency:
http://eden.sahanafoundation.org/wiki/HaitiGISToDo#HierarchicalTrees
@param: level - optionally filter by level
@return: Rows object containing IDs & Names
Note: This does NOT include the parent location itself
"""
db = current.db
try:
table = db.gis_location
except:
# Being run from CLI for debugging
table = current.s3db.gis_location
query = (table.deleted == False)
if level:
query &= (table.level == level)
term = str(id)
path = table.path
query &= ((path.like(term + "/%")) | \
(path.like("%/" + term + "/%")))
children = db(query).select(table.id,
table.name)
return children
# -------------------------------------------------------------------------
@staticmethod
def get_parents(feature_id, feature=None, ids_only=False):
"""
Returns a list containing ancestors of the requested feature.
If the caller already has the location row, including path and
parent fields, they can supply it via feature to avoid a db lookup.
If ids_only is false, each element in the list is a gluon.sql.Row
containing the gis_location record of an ancestor of the specified
location.
If ids_only is true, just returns a list of ids of the parents.
This avoids a db lookup for the parents if the specified feature
has a path.
List elements are in the opposite order as the location path and
exclude the specified location itself, i.e. element 0 is the parent
and the last element is the most distant ancestor.
Assists lazy update of a database without location paths by calling
update_location_tree to get the path.
"""
if not feature or "path" not in feature or "parent" not in feature:
feature = GIS._lookup_parent_path(feature_id)
if feature and (feature.path or feature.parent):
if feature.path:
path = feature.path
else:
path = GIS.update_location_tree(feature)
path_list = map(int, path.split("/"))
if len(path_list) == 1:
# No parents - path contains only this feature.
return None
# Get path in the desired order, without current feature.
reverse_path = path_list[:-1]
reverse_path.reverse()
# If only ids are wanted, stop here.
if ids_only:
return reverse_path
# Retrieve parents - order in which they're returned is arbitrary.
s3db = current.s3db
table = s3db.gis_location
query = (table.id.belongs(reverse_path))
fields = [table.id, table.name, table.level, table.lat, table.lon]
unordered_parents = current.db(query).select(cache=s3db.cache,
*fields)
# Reorder parents in order of reversed path.
unordered_ids = [row.id for row in unordered_parents]
parents = [unordered_parents[unordered_ids.index(path_id)]
for path_id in reverse_path if path_id in unordered_ids]
return parents
else:
return None
# -------------------------------------------------------------------------
def get_parent_per_level(self, results, feature_id,
feature=None,
ids=True,
names=True):
"""
Adds ancestor of requested feature for each level to supplied dict.
If the caller already has the location row, including path and
parent fields, they can supply it via feature to avoid a db lookup.
If a dict is not supplied in results, one is created. The results
dict is returned in either case.
If ids=True and names=False (used by old S3LocationSelectorWidget):
For each ancestor, an entry is added to results, like
ancestor.level : ancestor.id
If ids=False and names=True (used by address_onvalidation):
For each ancestor, an entry is added to results, like
ancestor.level : ancestor.name
If ids=True and names=True (used by new S3LocationSelectorWidget):
For each ancestor, an entry is added to results, like
ancestor.level : {name : ancestor.name, id: ancestor.id}
"""
if not results:
results = {}
id = feature_id
# if we don't have a feature or a feature id return the dict as-is
if not feature_id and not feature:
return results
if not feature_id and "path" not in feature and "parent" in feature:
# gis_location_onvalidation on a Create => no ID yet
# Read the Parent's path instead
feature = self._lookup_parent_path(feature.parent)
id = feature.id
elif not feature or "path" not in feature or "parent" not in feature:
feature = self._lookup_parent_path(feature_id)
if feature and (feature.path or feature.parent):
if feature.path:
path = feature.path
else:
path = self.update_location_tree(feature)
# Get ids of ancestors at each level.
if feature.parent:
strict = self.get_strict_hierarchy(feature.parent)
else:
strict = self.get_strict_hierarchy(id)
if path and strict and not names:
# No need to do a db lookup for parents in this case -- we
# know the levels of the parents from their position in path.
# Note ids returned from db are ints, not strings, so be
# consistent with that.
path_ids = map(int, path.split("/"))
# This skips the last path element, which is the supplied
# location.
for (i, id) in enumerate(path_ids[:-1]):
results["L%i" % i] = id
elif path:
ancestors = self.get_parents(id, feature=feature)
if ancestors:
for ancestor in ancestors:
if ancestor.level and ancestor.level in self.hierarchy_level_keys:
if names and ids:
results[ancestor.level] = Storage()
results[ancestor.level].name = ancestor.name
results[ancestor.level].id = ancestor.id
elif names:
results[ancestor.level] = ancestor.name
else:
results[ancestor.level] = ancestor.id
if not feature_id:
# Add the Parent in (we only need the version required for gis_location onvalidation here)
results[feature.level] = feature.name
if names:
# We need to have entries for all levels
# (both for address onvalidation & new LocationSelector)
hierarchy_level_keys = self.hierarchy_level_keys
for key in hierarchy_level_keys:
if not results.has_key(key):
results[key] = None
return results
# -------------------------------------------------------------------------
def update_table_hierarchy_labels(self, tablename=None):
"""
Re-set table options that depend on location_hierarchy
Only update tables which are already defined
"""
levels = ("L1", "L2", "L3", "L4", "L5")
labels = self.get_location_hierarchy()
db = current.db
if tablename and tablename in db:
# Update the specific table which has just been defined
table = db[tablename]
if tablename == "gis_location":
labels["L0"] = current.messages.COUNTRY
table.level.requires = \
IS_NULL_OR(IS_IN_SET(labels))
else:
for level in levels:
table[level].label = labels[level]
else:
# Do all Tables which are already defined
# gis_location
if "gis_location" in db:
table = db.gis_location
table.level.requires = \
IS_NULL_OR(IS_IN_SET(labels))
# These tables store location hierarchy info for XSLT export.
# Labels are used for PDF & XLS Reports
tables = ["org_office",
#"pr_person",
"pr_address",
"cr_shelter",
"asset_asset",
#"hms_hospital",
]
for tablename in tables:
if tablename in db:
table = db[tablename]
for level in levels:
table[level].label = labels[level]
# -------------------------------------------------------------------------
@staticmethod
def set_config(config_id=None, force_update_cache=False):
"""
Reads the specified GIS config from the DB, caches it in response.
Passing in a false or non-existent id will cause the personal config,
if any, to be used, else the site config (uuid SITE_DEFAULT), else
their fallback values defined in this class.
If force_update_cache is true, the config will be read and cached in
response even if the specified config is the same as what's already
cached. Used when the config was just written.
The config itself will be available in response.s3.gis.config.
Scalar fields from the gis_config record and its linked
gis_projection record have the same names as the fields in their
tables and can be accessed as response.s3.gis.<fieldname>.
Returns the id of the config it actually used, if any.
@param: config_id. use '0' to set the SITE_DEFAULT
@ToDo: Merge configs for Event
"""
_gis = current.response.s3.gis
# If an id has been supplied, try it first. If it matches what's in
# response, there's no work to do.
if config_id and not force_update_cache and \
_gis.config and \
_gis.config.id == config_id:
return
db = current.db
s3db = current.s3db
ctable = s3db.gis_config
mtable = s3db.gis_marker
ptable = s3db.gis_projection
stable = s3db.gis_symbology
fields = [ctable.id,
ctable.default_location_id,
ctable.region_location_id,
ctable.geocoder,
ctable.lat_min,
ctable.lat_max,
ctable.lon_min,
ctable.lon_max,
ctable.zoom,
ctable.lat,
ctable.lon,
ctable.pe_id,
ctable.symbology_id,
ctable.wmsbrowser_url,
ctable.wmsbrowser_name,
ctable.zoom_levels,
mtable.image,
mtable.height,
mtable.width,
ptable.epsg,
ptable.proj4js,
ptable.maxExtent,
ptable.units,
]
cache = Storage()
row = None
rows = None
if config_id:
# Merge this one with the Site Default
query = (ctable.id == config_id) | \
(ctable.uuid == "SITE_DEFAULT")
# May well not be complete, so Left Join
left = [ptable.on(ptable.id == ctable.projection_id),
stable.on(stable.id == ctable.symbology_id),
mtable.on(mtable.id == stable.marker_id),
]
rows = db(query).select(*fields,
left=left,
orderby=ctable.pe_type,
limitby=(0, 2))
if len(rows) == 1:
# The requested config must be invalid, so just use site default
row = rows.first()
elif config_id is 0:
# Use site default
query = (ctable.uuid == "SITE_DEFAULT") & \
(mtable.id == stable.marker_id) & \
(stable.id == ctable.symbology_id) & \
(ptable.id == ctable.projection_id)
row = db(query).select(*fields,
limitby=(0, 1)).first()
if not row:
# No configs found at all
_gis.config = cache
return cache
# If no id supplied, extend the site config with any personal or OU configs
if not rows and not row:
# Read personalised config, if available.
auth = current.auth
if auth.is_logged_in():
pe_id = auth.user.pe_id
# OU configs
# List of roles to check (in order)
roles = ["Staff", "Volunteer"]
role_paths = s3db.pr_get_role_paths(pe_id, roles=roles)
# Unordered list of PEs
pes = []
for role in roles:
if role in role_paths:
# @ToDo: Allow selection of which OU a person's config should inherit from for disambiguation
# - store in s3db.gis_config?
# - needs a method in gis_config_form_setup() to populate the dropdown from the OUs (in this person's Path for this person's, would have to be a dynamic lookup for Admins)
pes = role_paths[role].nodes()
# Staff don't check Volunteer's OUs
break
query = (ctable.uuid == "SITE_DEFAULT") | \
((ctable.pe_id == pe_id) & \
(ctable.pe_default != False))
len_pes = len(pes)
if len_pes == 1:
query |= (ctable.pe_id == pes[0])
elif len_pes:
query |= (ctable.pe_id.belongs(pes))
# Personal may well not be complete, so Left Join
left = [ptable.on(ptable.id == ctable.projection_id),
stable.on(stable.id == ctable.symbology_id),
mtable.on(mtable.id == stable.marker_id),
]
# Order by pe_type (defined in gis_config)
# @ToDo: Do this purely from the hierarchy
rows = db(query).select(*fields,
left=left,
orderby=ctable.pe_type)
if len(rows) == 1:
row = rows.first()
if rows and not row:
# Merge Configs
cache["ids"] = []
for row in rows:
config = row["gis_config"]
if not config_id:
config_id = config.id
cache["ids"].append(config.id)
for key in config:
if key in ["delete_record", "gis_layer_config", "gis_menu", "update_record"]:
continue
if key not in cache or cache[key] is None:
cache[key] = config[key]
if "epsg" not in cache or cache["epsg"] is None:
projection = row["gis_projection"]
for key in ["epsg", "units", "maxExtent", "proj4js"]:
cache[key] = projection[key] if key in projection \
else None
if "marker_image" not in cache or \
cache["marker_image"] is None:
marker = row["gis_marker"]
for key in ["image", "height", "width"]:
cache["marker_%s" % key] = marker[key] if key in marker \
else None
# Add NULL values for any that aren't defined, to avoid KeyErrors
for key in ["epsg", "units", "proj4js", "maxExtent",
"marker_image", "marker_height", "marker_width",
]:
if key not in cache:
cache[key] = None
if not row:
# No personal config or not logged in. Use site default.
query = (ctable.uuid == "SITE_DEFAULT") & \
(mtable.id == stable.marker_id) & \
(stable.id == ctable.symbology_id) & \
(ptable.id == ctable.projection_id)
row = db(query).select(*fields,
limitby=(0, 1)).first()
if not row:
# No configs found at all
_gis.config = cache
return cache
if not cache:
# We had a single row
config = row["gis_config"]
config_id = config.id
cache["ids"] = [config_id]
projection = row["gis_projection"]
marker = row["gis_marker"]
for key in config:
cache[key] = config[key]
for key in ["epsg", "maxExtent", "proj4js", "units"]:
cache[key] = projection[key] if key in projection else None
for key in ["image", "height", "width"]:
cache["marker_%s" % key] = marker[key] if key in marker \
else None
# Store the values
_gis.config = cache
return cache
# -------------------------------------------------------------------------
@staticmethod
def get_config():
"""
Returns the current GIS config structure.
@ToDo: Config() class
"""
gis = current.response.s3.gis
if not gis.config:
# Ask set_config to put the appropriate config in response.
if current.session.s3.gis_config_id:
GIS.set_config(current.session.s3.gis_config_id)
else:
GIS.set_config()
return gis.config
# -------------------------------------------------------------------------
def get_location_hierarchy(self, level=None, location=None):
"""
Returns the location hierarchy and it's labels
@param: level - a specific level for which to lookup the label
@param: location - the location_id to lookup the location for
currently only the actual location is supported
@ToDo: Do a search of parents to allow this
lookup for any location
"""
_levels = self.hierarchy_levels
_location = location
if not location and _levels:
# Use cached value
if level:
if level in _levels:
return _levels[level]
else:
return level
else:
return _levels
COUNTRY = current.messages.COUNTRY
if level == "L0":
return COUNTRY
db = current.db
s3db = current.s3db
table = s3db.gis_hierarchy
fields = [table.uuid,
table.L1,
table.L2,
table.L3,
table.L4,
table.L5,
]
query = (table.uuid == "SITE_DEFAULT")
if not location:
config = GIS.get_config()
location = config.region_location_id
if location:
# Try the Region, but ensure we have the fallback available in a single query
query = query | (table.location_id == location)
rows = db(query).select(cache=s3db.cache,
*fields)
if len(rows) > 1:
# Remove the Site Default
filter = lambda row: row.uuid == "SITE_DEFAULT"
rows.exclude(filter)
elif not rows:
# prepop hasn't run yet
if level:
return level
levels = OrderedDict()
hierarchy_level_keys = self.hierarchy_level_keys
for key in hierarchy_level_keys:
if key == "L0":
levels[key] = COUNTRY
else:
levels[key] = key
return levels
T = current.T
row = rows.first()
if level:
try:
return T(row[level])
except:
return level
else:
levels = OrderedDict()
hierarchy_level_keys = self.hierarchy_level_keys
for key in hierarchy_level_keys:
if key == "L0":
levels[key] = COUNTRY
elif key in row and row[key]:
# Only include rows with values
levels[key] = str(T(row[key]))
if not _location:
# Cache the value
self.hierarchy_levels = levels
if level:
return levels[level]
else:
return levels
# -------------------------------------------------------------------------
def get_strict_hierarchy(self, location=None):
"""
Returns the strict hierarchy value from the current config.
@param: location - the location_id of the record to check
"""
s3db = current.s3db
table = s3db.gis_hierarchy
# Read the system default
# @ToDo: Check for an active gis_config region?
query = (table.uuid == "SITE_DEFAULT")
if location:
# Try the Location's Country, but ensure we have the fallback available in a single query
query = query | (table.location_id == self.get_parent_country(location))
rows = current.db(query).select(table.uuid,
table.strict_hierarchy,
cache=s3db.cache)
if len(rows) > 1:
# Remove the Site Default
filter = lambda row: row.uuid == "SITE_DEFAULT"
rows.exclude(filter)
row = rows.first()
if row:
strict = row.strict_hierarchy
else:
# Pre-pop hasn't run yet
return False
return strict
# -------------------------------------------------------------------------
def get_max_hierarchy_level(self):
"""
Returns the deepest level key (i.e. Ln) in the current hierarchy.
- used by gis_location_onvalidation()
"""
location_hierarchy = self.get_location_hierarchy()
return max(location_hierarchy)
# -------------------------------------------------------------------------
def get_all_current_levels(self, level=None):
"""
Get the current hierarchy levels plus non-hierarchy levels.
"""
all_levels = OrderedDict()
all_levels.update(self.get_location_hierarchy())
#T = current.T
#all_levels["GR"] = T("Location Group")
#all_levels["XX"] = T("Imported")
if level:
try:
return all_levels[level]
except Exception, e:
return level
else:
return all_levels
# -------------------------------------------------------------------------
@staticmethod
def get_countries(key_type="id"):
"""
Returns country code or L0 location id versus name for all countries.
The lookup is cached in the session
If key_type is "code", these are returned as an OrderedDict with
country code as the key. If key_type is "id", then the location id
is the key. In all cases, the value is the name.
"""
session = current.session
if "gis" not in session:
session.gis = Storage()
gis = session.gis
if gis.countries_by_id:
cached = True
else:
cached = False
if not cached:
s3db = current.s3db
table = s3db.gis_location
ttable = s3db.gis_location_tag
query = (table.level == "L0") & \
(ttable.tag == "ISO2") & \
(ttable.location_id == table.id)
countries = current.db(query).select(table.id,
table.name,
ttable.value,
orderby=table.name)
if not countries:
return []
countries_by_id = OrderedDict()
countries_by_code = OrderedDict()
for row in countries:
location = row["gis_location"]
countries_by_id[location.id] = location.name
countries_by_code[row["gis_location_tag"].value] = location.name
# Cache in the session
gis.countries_by_id = countries_by_id
gis.countries_by_code = countries_by_code
if key_type == "id":
return countries_by_id
else:
return countries_by_code
elif key_type == "id":
return gis.countries_by_id
else:
return gis.countries_by_code
# -------------------------------------------------------------------------
@staticmethod
def get_country(key, key_type="id"):
"""
Returns country name for given code or id from L0 locations.
The key can be either location id or country code, as specified
by key_type.
"""
if key:
if current.gis.get_countries(key_type):
if key_type == "id":
return current.session.gis.countries_by_id[key]
else:
return current.session.gis.countries_by_code[key]
return None
# -------------------------------------------------------------------------
def get_parent_country(self, location, key_type="id"):
"""
Returns the parent country for a given record
@param: location: the location or id to search for
@param: key_type: whether to return an id or code
@ToDo: Optimise to not use try/except
"""
if not location:
return None
db = current.db
s3db = current.s3db
# @ToDo: Avoid try/except here!
# - separate parameters best as even isinstance is expensive
try:
# location is passed as integer (location_id)
table = s3db.gis_location
location = db(table.id == location).select(table.id,
table.path,
table.level,
limitby=(0, 1),
cache=s3db.cache).first()
except:
# location is passed as record
pass
if location.level == "L0":
if key_type == "id":
return location.id
elif key_type == "code":
ttable = s3db.gis_location_tag
query = (ttable.tag == "ISO2") & \
(ttable.location_id == location.id)
tag = db(query).select(ttable.value,
limitby=(0, 1)).first()
try:
return tag.value
except:
return None
else:
parents = self.get_parents(location.id,
feature=location)
if parents:
for row in parents:
if row.level == "L0":
if key_type == "id":
return row.id
elif key_type == "code":
ttable = s3db.gis_location_tag
query = (ttable.tag == "ISO2") & \
(ttable.location_id == row.id)
tag = db(query).select(ttable.value,
limitby=(0, 1)).first()
try:
return tag.value
except:
return None
return None
# -------------------------------------------------------------------------
def get_default_country(self, key_type="id"):
"""
Returns the default country for the active gis_config
@param: key_type: whether to return an id or code
"""
config = GIS.get_config()
if config.default_location_id:
return self.get_parent_country(config.default_location_id,
key_type=key_type)
return None
# -------------------------------------------------------------------------
def get_features_in_polygon(self, location, tablename=None, category=None):
"""
Returns a gluon.sql.Rows of Features within a Polygon.
The Polygon can be either a WKT string or the ID of a record in the
gis_location table
Currently unused.
@ToDo: Optimise to not use try/except
"""
from shapely.geos import ReadingError
from shapely.wkt import loads as wkt_loads
try:
# Enable C-based speedups available from 1.2.10+
from shapely import speedups
speedups.enable()
except:
current.log.info("S3GIS",
"Upgrade Shapely for Performance enhancements")
db = current.db
s3db = current.s3db
locations = s3db.gis_location
try:
location_id = int(location)
# Check that the location is a polygon
query = (locations.id == location_id)
location = db(query).select(locations.wkt,
locations.lon_min,
locations.lon_max,
locations.lat_min,
locations.lat_max,
limitby=(0, 1)).first()
if location:
wkt = location.wkt
if wkt and (wkt.startswith("POLYGON") or \
wkt.startswith("MULTIPOLYGON")):
# ok
lon_min = location.lon_min
lon_max = location.lon_max
lat_min = location.lat_min
lat_max = location.lat_max
else:
current.log.error("Location searched within isn't a Polygon!")
return None
except: # @ToDo: need specific exception
wkt = location
if (wkt.startswith("POLYGON") or wkt.startswith("MULTIPOLYGON")):
# ok
lon_min = None
else:
current.log.error("This isn't a Polygon!")
return None
try:
polygon = wkt_loads(wkt)
except: # @ToDo: need specific exception
current.log.error("Invalid Polygon!")
return None
table = s3db[tablename]
if "location_id" not in table.fields():
# @ToDo: Add any special cases to be able to find the linked location
current.log.error("This table doesn't have a location_id!")
return None
query = (table.location_id == locations.id)
if "deleted" in table.fields:
query &= (table.deleted == False)
# @ToDo: Check AAA (do this as a resource filter?)
features = db(query).select(locations.wkt,
locations.lat,
locations.lon,
table.ALL)
output = Rows()
# @ToDo: provide option to use PostGIS/Spatialite
# settings = current.deployment_settings
# if settings.gis.spatialdb and settings.database.db_type == "postgres":
if lon_min is None:
# We have no BBOX so go straight to the full geometry check
for row in features:
_location = row.gis_location
wkt = _location.wkt
if wkt is None:
lat = _location.lat
lon = _location.lon
if lat is not None and lon is not None:
wkt = self.latlon_to_wkt(lat, lon)
else:
continue
try:
shape = wkt_loads(wkt)
if shape.intersects(polygon):
# Save Record
output.records.append(row)
except ReadingError:
current.log.error("Error reading wkt of location with id",
value=row.id)
else:
# 1st check for Features included within the bbox (faster)
def in_bbox(row):
_location = row.gis_location
return (_location.lon > lon_min) & \
(_location.lon < lon_max) & \
(_location.lat > lat_min) & \
(_location.lat < lat_max)
for row in features.find(lambda row: in_bbox(row)):
# Search within this subset with a full geometry check
# Uses Shapely.
_location = row.gis_location
wkt = _location.wkt
if wkt is None:
lat = _location.lat
lon = _location.lon
if lat is not None and lon is not None:
wkt = self.latlon_to_wkt(lat, lon)
else:
continue
try:
shape = wkt_loads(wkt)
if shape.intersects(polygon):
# Save Record
output.records.append(row)
except ReadingError:
current.log.error("Error reading wkt of location with id",
value = row.id)
return output
# -------------------------------------------------------------------------
def get_features_in_radius(self, lat, lon, radius, tablename=None, category=None):
"""
Returns Features within a Radius (in km) of a LatLon Location
Unused
"""
import math
db = current.db
settings = current.deployment_settings
if settings.gis.spatialdb and settings.database.db_type == "postgres":
# Use PostGIS routine
# The ST_DWithin function call will automatically include a bounding box comparison that will make use of any indexes that are available on the geometries.
# @ToDo: Support optional Category (make this a generic filter?)
import psycopg2
import psycopg2.extras
dbname = settings.database.database
username = settings.database.username
password = <PASSWORD>
host = settings.database.host
port = settings.database.port or "5432"
# Convert km to degrees (since we're using the_geom not the_geog)
radius = math.degrees(float(radius) / RADIUS_EARTH)
connection = psycopg2.connect("dbname=%s user=%s password=%s host=%s port=%s" % (dbname, username, password, host, port))
cursor = connection.cursor(cursor_factory=psycopg2.extras.DictCursor)
info_string = "SELECT column_name, udt_name FROM information_schema.columns WHERE table_name = 'gis_location' or table_name = '%s';" % tablename
cursor.execute(info_string)
# @ToDo: Look at more optimal queries for just those fields we need
if tablename:
# Lookup the resource
query_string = cursor.mogrify("SELECT * FROM gis_location, %s WHERE %s.location_id = gis_location.id and ST_DWithin (ST_GeomFromText ('POINT (%s %s)', 4326), the_geom, %s);" % (tablename, tablename, lat, lon, radius))
else:
# Lookup the raw Locations
query_string = cursor.mogrify("SELECT * FROM gis_location WHERE ST_DWithin (ST_GeomFromText ('POINT (%s %s)', 4326), the_geom, %s);" % (lat, lon, radius))
cursor.execute(query_string)
# @ToDo: Export Rows?
features = []
for record in cursor:
d = dict(record.items())
row = Storage()
# @ToDo: Optional support for Polygons
if tablename:
row.gis_location = Storage()
row.gis_location.id = d["id"]
row.gis_location.lat = d["lat"]
row.gis_location.lon = d["lon"]
row.gis_location.lat_min = d["lat_min"]
row.gis_location.lon_min = d["lon_min"]
row.gis_location.lat_max = d["lat_max"]
row.gis_location.lon_max = d["lon_max"]
row[tablename] = Storage()
row[tablename].id = d["id"]
row[tablename].name = d["name"]
else:
row.name = d["name"]
row.id = d["id"]
row.lat = d["lat"]
row.lon = d["lon"]
row.lat_min = d["lat_min"]
row.lon_min = d["lon_min"]
row.lat_max = d["lat_max"]
row.lon_max = d["lon_max"]
features.append(row)
return features
#elif settings.database.db_type == "mysql":
# Do the calculation in MySQL to pull back only the relevant rows
# Raw MySQL Formula from: http://blog.peoplesdns.com/archives/24
# PI = 3.141592653589793, mysql's pi() function returns 3.141593
#pi = math.pi
#query = """SELECT name, lat, lon, acos(SIN( PI()* 40.7383040 /180 )*SIN( PI()*lat/180 ))+(cos(PI()* 40.7383040 /180)*COS( PI()*lat/180) *COS(PI()*lon/180-PI()* -73.99319 /180))* 3963.191
#AS distance
#FROM gis_location
#WHERE 1=1
#AND 3963.191 * ACOS( (SIN(PI()* 40.7383040 /180)*SIN(PI() * lat/180)) + (COS(PI()* 40.7383040 /180)*cos(PI()*lat/180)*COS(PI() * lon/180-PI()* -73.99319 /180))) < = 1.5
#ORDER BY 3963.191 * ACOS((SIN(PI()* 40.7383040 /180)*SIN(PI()*lat/180)) + (COS(PI()* 40.7383040 /180)*cos(PI()*lat/180)*COS(PI() * lon/180-PI()* -73.99319 /180)))"""
# db.executesql(query)
else:
# Calculate in Python
# Pull back all the rows within a square bounding box (faster than checking all features manually)
# Then check each feature within this subset
# http://janmatuschek.de/LatitudeLongitudeBoundingCoordinates
# @ToDo: Support optional Category (make this a generic filter?)
# shortcuts
radians = math.radians
degrees = math.degrees
MIN_LAT = radians(-90) # -PI/2
MAX_LAT = radians(90) # PI/2
MIN_LON = radians(-180) # -PI
MAX_LON = radians(180) # PI
# Convert to radians for the calculation
r = float(radius) / RADIUS_EARTH
radLat = radians(lat)
radLon = radians(lon)
# Calculate the bounding box
minLat = radLat - r
maxLat = radLat + r
if (minLat > MIN_LAT) and (maxLat < MAX_LAT):
deltaLon = math.asin(math.sin(r) / math.cos(radLat))
minLon = radLon - deltaLon
if (minLon < MIN_LON):
minLon += 2 * math.pi
maxLon = radLon + deltaLon
if (maxLon > MAX_LON):
maxLon -= 2 * math.pi
else:
# Special care for Poles & 180 Meridian:
# http://janmatuschek.de/LatitudeLongitudeBoundingCoordinates#PolesAnd180thMeridian
minLat = max(minLat, MIN_LAT)
maxLat = min(maxLat, MAX_LAT)
minLon = MIN_LON
maxLon = MAX_LON
# Convert back to degrees
minLat = degrees(minLat)
minLon = degrees(minLon)
maxLat = degrees(maxLat)
maxLon = degrees(maxLon)
# shortcut
locations = db.gis_location
query = (locations.lat > minLat) & (locations.lat < maxLat) & (locations.lon > minLon) & (locations.lon < maxLon)
deleted = (locations.deleted == False)
empty = (locations.lat != None) & (locations.lon != None)
query = deleted & empty & query
if tablename:
# Lookup the resource
table = current.s3db[tablename]
query &= (table.location_id == locations.id)
records = db(query).select(table.ALL,
locations.id,
locations.name,
locations.level,
locations.lat,
locations.lon,
locations.lat_min,
locations.lon_min,
locations.lat_max,
locations.lon_max)
else:
# Lookup the raw Locations
records = db(query).select(locations.id,
locations.name,
locations.level,
locations.lat,
locations.lon,
locations.lat_min,
locations.lon_min,
locations.lat_max,
locations.lon_max)
features = Rows()
for record in records:
# Calculate the Great Circle distance
if tablename:
distance = self.greatCircleDistance(lat,
lon,
record.gis_location.lat,
record.gis_location.lon)
else:
distance = self.greatCircleDistance(lat,
lon,
record.lat,
record.lon)
if distance < radius:
features.records.append(record)
else:
# skip
continue
return features
# -------------------------------------------------------------------------
def get_latlon(self, feature_id, filter=False):
"""
Returns the Lat/Lon for a Feature
used by display_feature() in gis controller
@param feature_id: the feature ID
@param filter: Filter out results based on deployment_settings
"""
db = current.db
table = db.gis_location
feature = db(table.id == feature_id).select(table.id,
table.lat,
table.lon,
table.parent,
table.path,
limitby=(0, 1)).first()
# Zero is an allowed value, hence explicit test for None.
if "lon" in feature and "lat" in feature and \
(feature.lat is not None) and (feature.lon is not None):
return dict(lon=feature.lon, lat=feature.lat)
else:
# Step through ancestors to first with lon, lat.
parents = self.get_parents(feature.id, feature=feature)
if parents:
for row in parents:
lon = row.get("lon", None)
lat = row.get("lat", None)
if (lon is not None) and (lat is not None):
return dict(lon=lon, lat=lat)
# Invalid feature_id
return None
# -------------------------------------------------------------------------
@staticmethod
def get_marker(controller=None,
function=None,
filter=None,
):
"""
Returns a Marker dict
- called by S3REST: S3Resource.export_tree() for non-geojson resources
"""
marker = None
if controller and function:
# Lookup marker in the gis_feature table
db = current.db
s3db = current.s3db
ftable = s3db.gis_layer_feature
ltable = s3db.gis_layer_symbology
mtable = s3db.gis_marker
try:
symbology_id = current.response.s3.gis.config.symbology_id
except:
# Config not initialised yet
config = GIS.get_config()
symbology_id = config.symbology_id
query = (ftable.controller == controller) & \
(ftable.function == function) & \
(ftable.layer_id == ltable.layer_id) & \
(ltable.symbology_id == symbology_id) & \
(ltable.marker_id == mtable.id)
if filter:
query &= (ftable.filter == filter)
marker = db(query).select(mtable.image,
mtable.height,
mtable.width,
ltable.gps_marker).first()
if marker:
_marker = marker["gis_marker"]
marker = dict(image=_marker.image,
height=_marker.height,
width=_marker.width,
gps_marker=marker["gis_layer_symbology"].gps_marker
)
if not marker:
# Default
marker = Marker().as_dict()
return marker
# -------------------------------------------------------------------------
@staticmethod
def get_location_data(resource):
"""
Returns the locations, markers and popup tooltips for an XML export
e.g. Feature Layers or Search results (Feature Resources)
e.g. Exports in KML, GeoRSS or GPX format
Called by S3REST: S3Resource.export_tree()
@param: resource - S3Resource instance (required)
"""
NONE = current.messages["NONE"]
#if DEBUG:
# start = datetime.datetime.now()
db = current.db
s3db = current.s3db
request = current.request
get_vars = request.get_vars
format = current.auth.permission.format
ftable = s3db.gis_layer_feature
layer = None
layer_id = get_vars.get("layer", None)
if layer_id:
# Feature Layer
query = (ftable.id == layer_id)
layer = db(query).select(ftable.trackable,
ftable.polygons,
ftable.popup_label,
ftable.popup_fields,
ftable.attr_fields,
limitby=(0, 1)).first()
else:
# e.g. KML, geoRSS or GPX export
# e.g. Search results loaded as a Feature Resource layer
# e.g. Volunteer Layer in Vulnerability module
controller = request.controller
function = request.function
query = (ftable.controller == controller) & \
(ftable.function == function)
layers = db(query).select(ftable.style_default,
ftable.trackable,
ftable.polygons,
ftable.popup_label,
ftable.popup_fields,
ftable.attr_fields,
)
if len(layers) > 1:
layers.exclude(lambda row: row.style_default == False)
if len(layers) > 1:
# We can't provide details for the whole layer, but need to do a per-record check
return None
if layers:
layer = layers.first()
attr_fields = get_vars.get("attr", [])
if attr_fields:
attr_fields = attr_fields.split(",")
popup_fields = get_vars.get("popup", [])
if popup_fields:
popup_fields = popup_fields.split(",")
if layer:
popup_label = layer.popup_label
if not popup_fields:
popup_fields = layer.popup_fields or []
if not attr_fields:
attr_fields = layer.attr_fields or []
trackable = layer.trackable
polygons = layer.polygons
else:
popup_label = ""
popup_fields = ["name"]
trackable = False
polygons = False
table = resource.table
tablename = resource.tablename
pkey = table._id.name
markers = {}
tooltips = {}
attributes = {}
_pkey = table[pkey]
# Ensure there are no ID represents to confuse things
_pkey.represent = None
if format == "geojson":
if popup_fields or attr_fields:
# Build the Attributes &/Popup Tooltips now so that representations can be
# looked-up in bulk rather than as a separate lookup per record
if popup_fields:
tips = {}
label_off = get_vars.get("label_off", None)
if popup_label and not label_off:
_tooltip = " (%s)" % current.T(popup_label)
else:
_tooltip = ""
attr = {}
fields = list(set(popup_fields + attr_fields))
if pkey not in fields:
fields.insert(0, pkey)
data = resource.select(fields,
limit=None,
represent=True)
rfields = data["rfields"]
attr_cols = {}
_popup_cols = {}
for f in rfields:
fname = f.fname
selector = f.selector
if fname in popup_fields:
_popup_cols[fname] = f.colname
elif selector in popup_fields:
_popup_cols[selector] = f.colname
if fname in attr_fields or selector in attr_fields:
fieldname = f.colname
tname, fname = fieldname.split(".")
try:
ftype = db[tname][fname].type
except AttributeError:
# FieldMethod
ftype = None
attr_cols[fieldname] = (ftype, fname)
# Want to control sort order
popup_cols = []
for f in popup_fields:
colname = _popup_cols.get(f, None)
if colname:
popup_cols.append(colname)
rows = data["rows"]
_pkey = str(_pkey)
for row in rows:
record_id = int(row[_pkey])
if attr_cols:
attribute = {}
for fieldname in attr_cols:
represent = row[fieldname]
if represent and represent != NONE:
# Skip empty fields
_attr = attr_cols[fieldname]
ftype = _attr[0]
if ftype == "integer":
# Attributes should be numbers not Strings
try:
represent = int(represent.replace(",", ""))
except:
# @ToDo: Don't assume this i18n formatting...better to have no represent & then bypass the s3_unicode in select too
# (although we *do* want the represent in the tooltips!)
pass
if ftype == "double":
# Attributes should be numbers not Strings
try:
float_represent = float(represent.replace(",", ""))
int_represent = int(float_represent)
if int_represent == float_represent:
represent = int_represent
else:
represent = float_represent
except:
# @ToDo: Don't assume this i18n formatting...better to have no represent & then bypass the s3_unicode in select too
# (although we *do* want the represent in the tooltips!)
pass
attribute[_attr[1]] = represent
attr[record_id] = attribute
if popup_cols:
tooltip = _tooltip
first = True
for fieldname in popup_cols:
represent = row[fieldname]
if represent and represent != NONE:
represent = s3_unicode(represent)
# Skip empty fields
if first:
tooltip = "%s%s" % (represent, tooltip)
first = False
else:
tooltip = "%s<br />%s" % (tooltip, represent)
tips[record_id] = tooltip
if attr_fields:
attributes[tablename] = attr
if popup_fields:
tooltips[tablename] = tips
#if DEBUG:
# end = datetime.datetime.now()
# duration = end - start
# duration = "{:.2f}".format(duration.total_seconds())
# if layer_id:
# query = (ftable.id == layer_id)
# layer_name = db(query).select(ftable.name,
# limitby=(0, 1)).first().name
# else:
# layer_name = "Unknown"
# _debug("Attributes/Tooltip lookup of layer %s completed in %s seconds" % \
# (layer_name, duration))
_markers = get_vars.get("markers", None)
if _markers:
# Add a per-feature Marker
marker_fn = s3db.get_config(tablename, "marker_fn")
if marker_fn:
for record in resource:
markers[record[pkey]] = marker_fn(record)
else:
# No configuration found so use default marker for all
c, f = tablename.split("_", 1)
markers = GIS.get_marker(c, f)
markers[tablename] = markers
else:
# KML, GeoRSS or GPX
marker_fn = s3db.get_config(tablename, "marker_fn")
if marker_fn:
# Add a per-feature Marker
for record in resource:
markers[record[pkey]] = marker_fn(record)
else:
# No configuration found so use default marker for all
c, f = tablename.split("_", 1)
markers = GIS.get_marker(c, f)
markers[tablename] = markers
# Lookup the LatLons now so that it can be done as a single
# query rather than per record
#if DEBUG:
# start = datetime.datetime.now()
latlons = {}
wkts = {}
geojsons = {}
gtable = s3db.gis_location
if trackable:
# Use S3Track
ids = resource._ids
# Ensure IDs in ascending order
ids.sort()
try:
tracker = S3Trackable(table, record_ids=ids)
except SyntaxError:
# This table isn't trackable
pass
else:
_latlons = tracker.get_location(_fields=[gtable.lat,
gtable.lon])
index = 0
for _id in ids:
_location = _latlons[index]
latlons[_id] = (_location.lat, _location.lon)
index += 1
if not latlons:
if "location_id" in table.fields:
join = True
query = (table.id.belongs(resource._ids)) & \
(table.location_id == gtable.id)
elif "site_id" in table.fields:
join = True
stable = s3db.org_site
query = (table.id.belongs(resource._ids)) & \
(table.site_id == stable.site_id) & \
(stable.location_id == gtable.id)
elif tablename == "gis_location":
join = False
query = (table.id.belongs(resource._ids))
else:
# Can't display this resource on the Map
return None
if polygons:
settings = current.deployment_settings
tolerance = settings.get_gis_simplify_tolerance()
if settings.get_gis_spatialdb():
if format == "geojson":
# Do the Simplify & GeoJSON direct from the DB
rows = db(query).select(table.id,
gtable.the_geom.st_simplify(tolerance).st_asgeojson(precision=4).with_alias("geojson"))
for row in rows:
geojsons[row[tablename].id] = row.geojson
else:
# Do the Simplify direct from the DB
rows = db(query).select(table.id,
gtable.the_geom.st_simplify(tolerance).st_astext().with_alias("wkt"))
for row in rows:
wkts[row[tablename].id] = row.wkt
else:
rows = db(query).select(table.id,
gtable.wkt)
simplify = GIS.simplify
if format == "geojson":
# Simplify the polygon to reduce download size
if join:
for row in rows:
geojson = simplify(row["gis_location"].wkt,
tolerance=tolerance,
output="geojson")
if geojson:
geojsons[row[tablename].id] = geojson
else:
for row in rows:
geojson = simplify(row.wkt,
tolerance=tolerance,
output="geojson")
if geojson:
geojsons[row.id] = geojson
else:
# Simplify the polygon to reduce download size
# & also to work around the recursion limit in libxslt
# http://blog.gmane.org/gmane.comp.python.lxml.devel/day=20120309
if join:
for row in rows:
wkt = simplify(row["gis_location"].wkt)
if wkt:
wkts[row[tablename].id] = wkt
else:
for row in rows:
wkt = simplify(row.wkt)
if wkt:
wkts[row.id] = wkt
else:
# Points
rows = db(query).select(table.id,
gtable.lat,
gtable.lon)
if join:
for row in rows:
_location = row["gis_location"]
latlons[row[tablename].id] = (_location.lat, _location.lon)
else:
for row in rows:
latlons[row.id] = (row.lat, row.lon)
_latlons = {}
if latlons:
_latlons[tablename] = latlons
_wkts = {}
_wkts[tablename] = wkts
_geojsons = {}
_geojsons[tablename] = geojsons
#if DEBUG:
# end = datetime.datetime.now()
# duration = end - start
# duration = "{:.2f}".format(duration.total_seconds())
# _debug("latlons lookup of layer %s completed in %s seconds" % \
# (layer_name, duration))
# Used by S3XML's gis_encode()
return dict(latlons = _latlons,
wkts = _wkts,
geojsons = _geojsons,
markers = markers,
tooltips = tooltips,
attributes = attributes,
)
# -------------------------------------------------------------------------
@staticmethod
def get_shapefile_geojson(resource):
"""
Lookup Shapefile Layer polygons once per layer and not per-record
Called by S3REST: S3Resource.export_tree()
@ToDo: Vary simplification level & precision by Zoom level
- store this in the style?
"""
db = current.db
id = resource._ids[0]
tablename = "gis_layer_shapefile_%s" % id
table = db[tablename]
query = resource.get_query()
fields = []
fappend = fields.append
for f in table.fields:
if f not in ("layer_id", "lat", "lon"):
fappend(f)
attributes = {}
geojsons = {}
settings = current.deployment_settings
tolerance = settings.get_gis_simplify_tolerance()
if settings.get_gis_spatialdb():
# Do the Simplify & GeoJSON direct from the DB
fields.remove("the_geom")
fields.remove("wkt")
_fields = [table[f] for f in fields]
rows = db(query).select(table.the_geom.st_simplify(tolerance).st_asgeojson(precision=4).with_alias("geojson"),
*_fields)
for row in rows:
_row = row[tablename]
id = _row.id
geojsons[id] = row.geojson
_attributes = {}
for f in fields:
if f not in ("id"):
_attributes[f] = _row[f]
attributes[id] = _attributes
else:
_fields = [table[f] for f in fields]
rows = db(query).select(*_fields)
simplify = GIS.simplify
for row in rows:
# Simplify the polygon to reduce download size
geojson = simplify(row.wkt, tolerance=tolerance,
output="geojson")
id = row.id
if geojson:
geojsons[id] = geojson
_attributes = {}
for f in fields:
if f not in ("id", "wkt"):
_attributes[f] = row[f]
attributes[id] = _attributes
_attributes = {}
_attributes[tablename] = attributes
_geojsons = {}
_geojsons[tablename] = geojsons
# return 'locations'
return dict(attributes = _attributes,
geojsons = _geojsons)
# -------------------------------------------------------------------------
@staticmethod
def get_theme_geojson(resource):
"""
Lookup Theme Layer polygons once per layer and not per-record
Called by S3REST: S3Resource.export_tree()
@ToDo: Vary precision by Lx
- store this (& tolerance map) in the style?
"""
s3db = current.s3db
tablename = "gis_theme_data"
table = s3db.gis_theme_data
gtable = s3db.gis_location
query = (table.id.belongs(resource._ids)) & \
(table.location_id == gtable.id)
geojsons = {}
# @ToDo: How to get the tolerance to vary by level?
# - add Stored Procedure?
#if current.deployment_settings.get_gis_spatialdb():
# # Do the Simplify & GeoJSON direct from the DB
# rows = current.db(query).select(table.id,
# gtable.the_geom.st_simplify(0.01).st_asgeojson(precision=4).with_alias("geojson"))
# for row in rows:
# geojsons[row["gis_theme_data.id"]] = row.geojson
#else:
rows = current.db(query).select(table.id,
gtable.level,
gtable.wkt)
simplify = GIS.simplify
tolerance = {"L0": 0.01,
"L1": 0.005,
"L2": 0.00125,
"L3": 0.000625,
"L4": 0.0003125,
"L5": 0.00015625,
}
for row in rows:
grow = row.gis_location
# Simplify the polygon to reduce download size
geojson = simplify(grow.wkt,
tolerance=tolerance[grow.level],
output="geojson")
if geojson:
geojsons[row["gis_theme_data.id"]] = geojson
_geojsons = {}
_geojsons[tablename] = geojsons
# return 'locations'
return dict(geojsons = _geojsons)
# -------------------------------------------------------------------------
@staticmethod
def greatCircleDistance(lat1, lon1, lat2, lon2, quick=True):
"""
Calculate the shortest distance (in km) over the earth's sphere between 2 points
Formulae from: http://www.movable-type.co.uk/scripts/latlong.html
(NB We could also use PostGIS functions, where possible, instead of this query)
"""
import math
# shortcuts
cos = math.cos
sin = math.sin
radians = math.radians
if quick:
# Spherical Law of Cosines (accurate down to around 1m & computationally quick)
lat1 = radians(lat1)
lat2 = radians(lat2)
lon1 = radians(lon1)
lon2 = radians(lon2)
distance = math.acos(sin(lat1) * sin(lat2) + cos(lat1) * cos(lat2) * cos(lon2 - lon1)) * RADIUS_EARTH
return distance
else:
# Haversine
#asin = math.asin
sqrt = math.sqrt
pow = math.pow
dLat = radians(lat2 - lat1)
dLon = radians(lon2 - lon1)
a = pow(sin(dLat / 2), 2) + cos(radians(lat1)) * cos(radians(lat2)) * pow(sin(dLon / 2), 2)
c = 2 * math.atan2(sqrt(a), sqrt(1 - a))
#c = 2 * asin(sqrt(a)) # Alternate version
# Convert radians to kilometers
distance = RADIUS_EARTH * c
return distance
# -------------------------------------------------------------------------
@staticmethod
def create_poly(feature):
"""
Create a .poly file for OpenStreetMap exports
http://wiki.openstreetmap.org/wiki/Osmosis/Polygon_Filter_File_Format
"""
from shapely.wkt import loads as wkt_loads
try:
# Enable C-based speedups available from 1.2.10+
from shapely import speedups
speedups.enable()
except:
current.log.info("S3GIS",
"Upgrade Shapely for Performance enhancements")
name = feature.name
if "wkt" in feature:
wkt = feature.wkt
else:
# WKT not included by default in feature, so retrieve this now
table = current.s3db.gis_location
wkt = current.db(table.id == feature.id).select(table.wkt,
limitby=(0, 1)
).first().wkt
try:
shape = wkt_loads(wkt)
except:
error = "Invalid WKT: %s" % name
current.log.error(error)
return error
geom_type = shape.geom_type
if geom_type == "MultiPolygon":
polygons = shape.geoms
elif geom_type == "Polygon":
polygons = [shape]
else:
error = "Unsupported Geometry: %s, %s" % (name, geom_type)
current.log.error(error)
return error
if os.path.exists(os.path.join(os.getcwd(), "temp")): # use web2py/temp
TEMP = os.path.join(os.getcwd(), "temp")
else:
import tempfile
TEMP = tempfile.gettempdir()
filename = "%s.poly" % name
filepath = os.path.join(TEMP, filename)
File = open(filepath, "w")
File.write("%s\n" % filename)
count = 1
for polygon in polygons:
File.write("%s\n" % count)
points = polygon.exterior.coords
for point in points:
File.write("\t%s\t%s\n" % (point[0], point[1]))
File.write("END\n")
count += 1
File.write("END\n")
File.close()
return None
# -------------------------------------------------------------------------
@staticmethod
def export_admin_areas(countries=[],
levels=("L0", "L1", "L2", "L3"),
format="geojson",
simplify=0.01,
decimals=4,
):
"""
Export admin areas to /static/cache for use by interactive web-mapping services
- designed for use by the Vulnerability Mapping
@param countries: list of ISO2 country codes
@param levels: list of which Lx levels to export
@param format: Only GeoJSON supported for now (may add KML &/or OSM later)
@param simplify: tolerance for the simplification algorithm. False to disable simplification
@param decimals: number of decimal points to include in the coordinates
"""
db = current.db
s3db = current.s3db
table = s3db.gis_location
ifield = table.id
if countries:
ttable = s3db.gis_location_tag
cquery = (table.level == "L0") & \
(table.end_date == None) & \
(ttable.location_id == ifield) & \
(ttable.tag == "ISO2") & \
(ttable.value.belongs(countries))
else:
# All countries
cquery = (table.level == "L0") & \
(table.end_date == None) & \
(table.deleted != True)
if current.deployment_settings.get_gis_spatialdb():
spatial = True
_field = table.the_geom
if simplify:
# Do the Simplify & GeoJSON direct from the DB
field = _field.st_simplify(simplify).st_asgeojson(precision=decimals).with_alias("geojson")
else:
# Do the GeoJSON direct from the DB
field = _field.st_asgeojson(precision=decimals).with_alias("geojson")
else:
spatial = False
field = table.wkt
if simplify:
_simplify = GIS.simplify
else:
from shapely.wkt import loads as wkt_loads
from ..geojson import dumps
try:
# Enable C-based speedups available from 1.2.10+
from shapely import speedups
speedups.enable()
except:
current.log.info("S3GIS",
"Upgrade Shapely for Performance enhancements")
folder = os.path.join(current.request.folder, "static", "cache")
features = []
append = features.append
if "L0" in levels:
# Reduce the decimals in output by 1
_decimals = decimals -1
if spatial:
if simplify:
field = _field.st_simplify(simplify).st_asgeojson(precision=_decimals).with_alias("geojson")
else:
field = _field.st_asgeojson(precision=_decimals).with_alias("geojson")
countries = db(cquery).select(ifield,
field)
for row in countries:
if spatial:
id = row["gis_location"].id
geojson = row.geojson
elif simplify:
id = row.id
wkt = row.wkt
if wkt:
geojson = _simplify(wkt, tolerance=simplify,
decimals=_decimals,
output="geojson")
else:
name = db(table.id == id).select(table.name,
limitby=(0, 1)).first().name
print >> sys.stderr, "No WKT: L0 %s %s" % (name, id)
continue
else:
id = row.id
shape = wkt_loads(row.wkt)
# Compact Encoding
geojson = dumps(shape, separators=SEPARATORS)
if geojson:
f = dict(type = "Feature",
properties = {"id": id},
geometry = json.loads(geojson)
)
append(f)
if features:
data = dict(type = "FeatureCollection",
features = features
)
# Output to file
filename = os.path.join(folder, "countries.geojson")
File = open(filename, "w")
File.write(json.dumps(data, separators=SEPARATORS))
File.close()
q1 = (table.level == "L1") & \
(table.deleted != True) & \
(table.end_date == None)
q2 = (table.level == "L2") & \
(table.deleted != True) & \
(table.end_date == None)
q3 = (table.level == "L3") & \
(table.deleted != True) & \
(table.end_date == None)
q4 = (table.level == "L4") & \
(table.deleted != True) & \
(table.end_date == None)
if "L1" in levels:
if "L0" not in levels:
countries = db(cquery).select(ifield)
if simplify:
# We want greater precision when zoomed-in more
simplify = simplify / 2 # 0.005 with default setting
if spatial:
field = _field.st_simplify(simplify).st_asgeojson(precision=decimals).with_alias("geojson")
for country in countries:
if not spatial or "L0" not in levels:
_id = country.id
else:
_id = country["gis_location"].id
query = q1 & (table.parent == _id)
features = []
append = features.append
rows = db(query).select(ifield,
field)
for row in rows:
if spatial:
id = row["gis_location"].id
geojson = row.geojson
elif simplify:
id = row.id
wkt = row.wkt
if wkt:
geojson = _simplify(wkt, tolerance=simplify,
decimals=decimals,
output="geojson")
else:
name = db(table.id == id).select(table.name,
limitby=(0, 1)).first().name
print >> sys.stderr, "No WKT: L1 %s %s" % (name, id)
continue
else:
id = row.id
shape = wkt_loads(row.wkt)
# Compact Encoding
geojson = dumps(shape, separators=SEPARATORS)
if geojson:
f = dict(type = "Feature",
properties = {"id": id},
geometry = json.loads(geojson)
)
append(f)
if features:
data = dict(type = "FeatureCollection",
features = features
)
# Output to file
filename = os.path.join(folder, "1_%s.geojson" % _id)
File = open(filename, "w")
File.write(json.dumps(data, separators=SEPARATORS))
File.close()
else:
current.log.debug("No L1 features in %s" % _id)
if "L2" in levels:
if "L0" not in levels and "L1" not in levels:
countries = db(cquery).select(ifield)
if simplify:
# We want greater precision when zoomed-in more
simplify = simplify / 4 # 0.00125 with default setting
if spatial:
field = _field.st_simplify(simplify).st_asgeojson(precision=decimals).with_alias("geojson")
for country in countries:
if not spatial or "L0" not in levels:
id = country.id
else:
id = country["gis_location"].id
query = q1 & (table.parent == id)
l1s = db(query).select(ifield)
for l1 in l1s:
query = q2 & (table.parent == l1.id)
features = []
append = features.append
rows = db(query).select(ifield,
field)
for row in rows:
if spatial:
id = row["gis_location"].id
geojson = row.geojson
elif simplify:
id = row.id
wkt = row.wkt
if wkt:
geojson = _simplify(wkt, tolerance=simplify,
decimals=decimals,
output="geojson")
else:
name = db(table.id == id).select(table.name,
limitby=(0, 1)).first().name
print >> sys.stderr, "No WKT: L2 %s %s" % (name, id)
continue
else:
id = row.id
shape = wkt_loads(row.wkt)
# Compact Encoding
geojson = dumps(shape, separators=SEPARATORS)
if geojson:
f = dict(type = "Feature",
properties = {"id": id},
geometry = json.loads(geojson)
)
append(f)
if features:
data = dict(type = "FeatureCollection",
features = features
)
# Output to file
filename = os.path.join(folder, "2_%s.geojson" % l1.id)
File = open(filename, "w")
File.write(json.dumps(data, separators=SEPARATORS))
File.close()
else:
current.log.debug("No L2 features in %s" % l1.id)
if "L3" in levels:
if "L0" not in levels and "L1" not in levels and "L2" not in levels:
countries = db(cquery).select(ifield)
if simplify:
# We want greater precision when zoomed-in more
simplify = simplify / 2 # 0.000625 with default setting
if spatial:
field = _field.st_simplify(simplify).st_asgeojson(precision=decimals).with_alias("geojson")
for country in countries:
if not spatial or "L0" not in levels:
id = country.id
else:
id = country["gis_location"].id
query = q1 & (table.parent == id)
l1s = db(query).select(ifield)
for l1 in l1s:
query = q2 & (table.parent == l1.id)
l2s = db(query).select(ifield)
for l2 in l2s:
query = q3 & (table.parent == l2.id)
features = []
append = features.append
rows = db(query).select(ifield,
field)
for row in rows:
if spatial:
id = row["gis_location"].id
geojson = row.geojson
elif simplify:
id = row.id
wkt = row.wkt
if wkt:
geojson = _simplify(wkt, tolerance=simplify,
decimals=decimals,
output="geojson")
else:
name = db(table.id == id).select(table.name,
limitby=(0, 1)).first().name
print >> sys.stderr, "No WKT: L3 %s %s" % (name, id)
continue
else:
id = row.id
shape = wkt_loads(row.wkt)
# Compact Encoding
geojson = dumps(shape, separators=SEPARATORS)
if geojson:
f = dict(type = "Feature",
properties = {"id": id},
geometry = json.loads(geojson)
)
append(f)
if features:
data = dict(type = "FeatureCollection",
features = features
)
# Output to file
filename = os.path.join(folder, "3_%s.geojson" % l2.id)
File = open(filename, "w")
File.write(json.dumps(data, separators=SEPARATORS))
File.close()
else:
current.log.debug("No L3 features in %s" % l2.id)
if "L4" in levels:
if "L0" not in levels and "L1" not in levels and "L2" not in levels and "L3" not in levels:
countries = db(cquery).select(ifield)
if simplify:
# We want greater precision when zoomed-in more
simplify = simplify / 2 # 0.0003125 with default setting
if spatial:
field = _field.st_simplify(simplify).st_asgeojson(precision=decimals).with_alias("geojson")
for country in countries:
if not spatial or "L0" not in levels:
id = country.id
else:
id = country["gis_location"].id
query = q1 & (table.parent == id)
l1s = db(query).select(ifield)
for l1 in l1s:
query = q2 & (table.parent == l1.id)
l2s = db(query).select(ifield)
for l2 in l2s:
query = q3 & (table.parent == l2.id)
l3s = db(query).select(ifield)
for l3 in l3s:
query = q4 & (table.parent == l3.id)
features = []
append = features.append
rows = db(query).select(ifield,
field)
for row in rows:
if spatial:
id = row["gis_location"].id
geojson = row.geojson
elif simplify:
id = row.id
wkt = row.wkt
if wkt:
geojson = _simplify(wkt, tolerance=simplify,
decimals=decimals,
output="geojson")
else:
name = db(table.id == id).select(table.name,
limitby=(0, 1)).first().name
print >> sys.stderr, "No WKT: L4 %s %s" % (name, id)
continue
else:
id = row.id
shape = wkt_loads(row.wkt)
# Compact Encoding
geojson = dumps(shape, separators=SEPARATORS)
if geojson:
f = dict(type = "Feature",
properties = {"id": id},
geometry = json.loads(geojson)
)
append(f)
if features:
data = dict(type = "FeatureCollection",
features = features
)
# Output to file
filename = os.path.join(folder, "4_%s.geojson" % l3.id)
File = open(filename, "w")
File.write(json.dumps(data, separators=SEPARATORS))
File.close()
else:
current.log.debug("No L4 features in %s" % l3.id)
# -------------------------------------------------------------------------
def import_admin_areas(self,
source="gadmv1",
countries=[],
levels=["L0", "L1", "L2"]
):
"""
Import Admin Boundaries into the Locations table
@param source - Source to get the data from.
Currently only GADM is supported: http://gadm.org
@param countries - List of ISO2 countrycodes to download data for
defaults to all countries
@param levels - Which levels of the hierarchy to import.
defaults to all 3 supported levels
"""
if source == "gadmv1":
try:
from osgeo import ogr
except:
current.log.error("Unable to import ogr. Please install python-gdal bindings: GDAL-1.8.1+")
return
if "L0" in levels:
self.import_gadm1_L0(ogr, countries=countries)
if "L1" in levels:
self.import_gadm1(ogr, "L1", countries=countries)
if "L2" in levels:
self.import_gadm1(ogr, "L2", countries=countries)
current.log.debug("All done!")
elif source == "gadmv1":
try:
from osgeo import ogr
except:
current.log.error("Unable to import ogr. Please install python-gdal bindings: GDAL-1.8.1+")
return
if "L0" in levels:
self.import_gadm2(ogr, "L0", countries=countries)
if "L1" in levels:
self.import_gadm2(ogr, "L1", countries=countries)
if "L2" in levels:
self.import_gadm2(ogr, "L2", countries=countries)
current.log.debug("All done!")
else:
current.log.warning("Only GADM is currently supported")
return
return
# -------------------------------------------------------------------------
@staticmethod
def import_gadm1_L0(ogr, countries=[]):
"""
Import L0 Admin Boundaries into the Locations table from GADMv1
- designed to be called from import_admin_areas()
- assumes that basic prepop has been done, so that no new records need to be created
@param ogr - The OGR Python module
@param countries - List of ISO2 countrycodes to download data for
defaults to all countries
"""
db = current.db
s3db = current.s3db
ttable = s3db.gis_location_tag
table = db.gis_location
layer = {
"url" : "http://gadm.org/data/gadm_v1_lev0_shp.zip",
"zipfile" : "gadm_v1_lev0_shp.zip",
"shapefile" : "gadm1_lev0",
"codefield" : "ISO2", # This field is used to uniquely identify the L0 for updates
"code2field" : "ISO" # This field is used to uniquely identify the L0 for parenting the L1s
}
# Copy the current working directory to revert back to later
cwd = os.getcwd()
# Create the working directory
TEMP = os.path.join(cwd, "temp")
if not os.path.exists(TEMP): # use web2py/temp/GADMv1 as a cache
import tempfile
TEMP = tempfile.gettempdir()
tempPath = os.path.join(TEMP, "GADMv1")
if not os.path.exists(tempPath):
try:
os.mkdir(tempPath)
except OSError:
current.log.error("Unable to create temp folder %s!" % tempPath)
return
# Set the current working directory
os.chdir(tempPath)
layerName = layer["shapefile"]
# Check if file has already been downloaded
fileName = layer["zipfile"]
if not os.path.isfile(fileName):
# Download the file
from gluon.tools import fetch
url = layer["url"]
current.log.debug("Downloading %s" % url)
try:
file = fetch(url)
except urllib2.URLError, exception:
current.log.error(exception)
return
fp = StringIO(file)
else:
current.log.debug("Using existing file %s" % fileName)
fp = open(fileName)
# Unzip it
current.log.debug("Unzipping %s" % layerName)
import zipfile
myfile = zipfile.ZipFile(fp)
for ext in ["dbf", "prj", "sbn", "sbx", "shp", "shx"]:
fileName = "%s.%s" % (layerName, ext)
file = myfile.read(fileName)
f = open(fileName, "w")
f.write(file)
f.close()
myfile.close()
# Use OGR to read Shapefile
current.log.debug("Opening %s.shp" % layerName)
ds = ogr.Open("%s.shp" % layerName)
if ds is None:
current.log.error("Open failed.\n")
return
lyr = ds.GetLayerByName(layerName)
lyr.ResetReading()
codeField = layer["codefield"]
code2Field = layer["code2field"]
for feat in lyr:
code = feat.GetField(codeField)
if not code:
# Skip the entries which aren't countries
continue
if countries and code not in countries:
# Skip the countries which we're not interested in
continue
geom = feat.GetGeometryRef()
if geom is not None:
if geom.GetGeometryType() == ogr.wkbPoint:
pass
else:
query = (table.id == ttable.location_id) & \
(ttable.tag == "ISO2") & \
(ttable.value == code)
wkt = geom.ExportToWkt()
if wkt.startswith("LINESTRING"):
gis_feature_type = 2
elif wkt.startswith("POLYGON"):
gis_feature_type = 3
elif wkt.startswith("MULTIPOINT"):
gis_feature_type = 4
elif wkt.startswith("MULTILINESTRING"):
gis_feature_type = 5
elif wkt.startswith("MULTIPOLYGON"):
gis_feature_type = 6
elif wkt.startswith("GEOMETRYCOLLECTION"):
gis_feature_type = 7
code2 = feat.GetField(code2Field)
#area = feat.GetField("Shape_Area")
try:
id = db(query).select(table.id,
limitby=(0, 1)).first().id
query = (table.id == id)
db(query).update(gis_feature_type=gis_feature_type,
wkt=wkt)
ttable.insert(location_id = id,
tag = "ISO3",
value = code2)
#ttable.insert(location_id = location_id,
# tag = "area",
# value = area)
except db._adapter.driver.OperationalError, exception:
current.log.error(sys.exc_info[1])
else:
current.log.debug("No geometry\n")
# Close the shapefile
ds.Destroy()
db.commit()
# Revert back to the working directory as before.
os.chdir(cwd)
return
# -------------------------------------------------------------------------
def import_gadm1(self, ogr, level="L1", countries=[]):
"""
Import L1 Admin Boundaries into the Locations table from GADMv1
- designed to be called from import_admin_areas()
- assumes a fresh database with just Countries imported
@param ogr - The OGR Python module
@param level - "L1" or "L2"
@param countries - List of ISO2 countrycodes to download data for
defaults to all countries
"""
if level == "L1":
layer = {
"url" : "http://gadm.org/data/gadm_v1_lev1_shp.zip",
"zipfile" : "gadm_v1_lev1_shp.zip",
"shapefile" : "gadm1_lev1",
"namefield" : "NAME_1",
# Uniquely identify the L1 for updates
"sourceCodeField" : "ID_1",
"edenCodeField" : "GADM1",
# Uniquely identify the L0 for parenting the L1s
"parent" : "L0",
"parentSourceCodeField" : "ISO",
"parentEdenCodeField" : "ISO3",
}
elif level == "L2":
layer = {
"url" : "http://biogeo.ucdavis.edu/data/gadm/gadm_v1_lev2_shp.zip",
"zipfile" : "gadm_v1_lev2_shp.zip",
"shapefile" : "gadm_v1_lev2",
"namefield" : "NAME_2",
# Uniquely identify the L2 for updates
"sourceCodeField" : "ID_2",
"edenCodeField" : "GADM2",
# Uniquely identify the L0 for parenting the L1s
"parent" : "L1",
"parentSourceCodeField" : "ID_1",
"parentEdenCodeField" : "GADM1",
}
else:
current.log.warning("Level %s not supported!" % level)
return
import csv
import shutil
import zipfile
db = current.db
s3db = current.s3db
cache = s3db.cache
table = s3db.gis_location
ttable = s3db.gis_location_tag
csv.field_size_limit(2**20 * 100) # 100 megs
# Not all the data is encoded like this
# (unable to determine encoding - appears to be damaged in source):
# Azerbaijan L1
# Vietnam L1 & L2
ENCODING = "cp1251"
# from http://docs.python.org/library/csv.html#csv-examples
def latin_csv_reader(unicode_csv_data, dialect=csv.excel, **kwargs):
for row in csv.reader(unicode_csv_data):
yield [unicode(cell, ENCODING) for cell in row]
def latin_dict_reader(data, dialect=csv.excel, **kwargs):
reader = latin_csv_reader(data, dialect=dialect, **kwargs)
headers = reader.next()
for r in reader:
yield dict(zip(headers, r))
# Copy the current working directory to revert back to later
cwd = os.getcwd()
# Create the working directory
TEMP = os.path.join(cwd, "temp")
if not os.path.exists(TEMP): # use web2py/temp/GADMv1 as a cache
import tempfile
TEMP = tempfile.gettempdir()
tempPath = os.path.join(TEMP, "GADMv1")
if not os.path.exists(tempPath):
try:
os.mkdir(tempPath)
except OSError:
current.log.error("Unable to create temp folder %s!" % tempPath)
return
# Set the current working directory
os.chdir(tempPath)
# Remove any existing CSV folder to allow the new one to be created
try:
shutil.rmtree("CSV")
except OSError:
# Folder doesn't exist, so should be creatable
pass
layerName = layer["shapefile"]
# Check if file has already been downloaded
fileName = layer["zipfile"]
if not os.path.isfile(fileName):
# Download the file
from gluon.tools import fetch
url = layer["url"]
current.log.debug("Downloading %s" % url)
try:
file = fetch(url)
except urllib2.URLError, exception:
current.log.error(exception)
# Revert back to the working directory as before.
os.chdir(cwd)
return
fp = StringIO(file)
else:
current.log.debug("Using existing file %s" % fileName)
fp = open(fileName)
# Unzip it
current.log.debug("Unzipping %s" % layerName)
myfile = zipfile.ZipFile(fp)
for ext in ["dbf", "prj", "sbn", "sbx", "shp", "shx"]:
fileName = "%s.%s" % (layerName, ext)
file = myfile.read(fileName)
f = open(fileName, "w")
f.write(file)
f.close()
myfile.close()
# Convert to CSV
current.log.debug("Converting %s.shp to CSV" % layerName)
# Simplified version of generic Shapefile Importer:
# http://svn.osgeo.org/gdal/trunk/gdal/swig/python/samples/ogr2ogr.py
bSkipFailures = False
nGroupTransactions = 200
nFIDToFetch = ogr.NullFID
inputFileName = "%s.shp" % layerName
inputDS = ogr.Open(inputFileName, False)
outputFileName = "CSV"
outputDriver = ogr.GetDriverByName("CSV")
outputDS = outputDriver.CreateDataSource(outputFileName, options=[])
# GADM only has 1 layer/source
inputLayer = inputDS.GetLayer(0)
inputFDefn = inputLayer.GetLayerDefn()
# Create the output Layer
outputLayer = outputDS.CreateLayer(layerName)
# Copy all Fields
#papszFieldTypesToString = []
inputFieldCount = inputFDefn.GetFieldCount()
panMap = [-1 for i in range(inputFieldCount)]
outputFDefn = outputLayer.GetLayerDefn()
nDstFieldCount = 0
if outputFDefn is not None:
nDstFieldCount = outputFDefn.GetFieldCount()
for iField in range(inputFieldCount):
inputFieldDefn = inputFDefn.GetFieldDefn(iField)
oFieldDefn = ogr.FieldDefn(inputFieldDefn.GetNameRef(),
inputFieldDefn.GetType())
oFieldDefn.SetWidth(inputFieldDefn.GetWidth())
oFieldDefn.SetPrecision(inputFieldDefn.GetPrecision())
# The field may have been already created at layer creation
iDstField = -1;
if outputFDefn is not None:
iDstField = outputFDefn.GetFieldIndex(oFieldDefn.GetNameRef())
if iDstField >= 0:
panMap[iField] = iDstField
elif outputLayer.CreateField(oFieldDefn) == 0:
# now that we've created a field, GetLayerDefn() won't return NULL
if outputFDefn is None:
outputFDefn = outputLayer.GetLayerDefn()
panMap[iField] = nDstFieldCount
nDstFieldCount = nDstFieldCount + 1
# Transfer features
nFeaturesInTransaction = 0
#iSrcZField = -1
inputLayer.ResetReading()
if nGroupTransactions > 0:
outputLayer.StartTransaction()
while True:
poDstFeature = None
if nFIDToFetch != ogr.NullFID:
# Only fetch feature on first pass.
if nFeaturesInTransaction == 0:
poFeature = inputLayer.GetFeature(nFIDToFetch)
else:
poFeature = None
else:
poFeature = inputLayer.GetNextFeature()
if poFeature is None:
break
nParts = 0
nIters = 1
for iPart in range(nIters):
nFeaturesInTransaction = nFeaturesInTransaction + 1
if nFeaturesInTransaction == nGroupTransactions:
outputLayer.CommitTransaction()
outputLayer.StartTransaction()
nFeaturesInTransaction = 0
poDstFeature = ogr.Feature(outputLayer.GetLayerDefn())
if poDstFeature.SetFromWithMap(poFeature, 1, panMap) != 0:
if nGroupTransactions > 0:
outputLayer.CommitTransaction()
current.log.error("Unable to translate feature %d from layer %s" % \
(poFeature.GetFID(), inputFDefn.GetName()))
# Revert back to the working directory as before.
os.chdir(cwd)
return
poDstGeometry = poDstFeature.GetGeometryRef()
if poDstGeometry is not None:
if nParts > 0:
# For -explodecollections, extract the iPart(th) of the geometry
poPart = poDstGeometry.GetGeometryRef(iPart).Clone()
poDstFeature.SetGeometryDirectly(poPart)
poDstGeometry = poPart
if outputLayer.CreateFeature(poDstFeature) != 0 and \
not bSkipFailures:
if nGroupTransactions > 0:
outputLayer.RollbackTransaction()
# Revert back to the working directory as before.
os.chdir(cwd)
return
if nGroupTransactions > 0:
outputLayer.CommitTransaction()
# Cleanup
outputDS.Destroy()
inputDS.Destroy()
fileName = "%s.csv" % layerName
filePath = os.path.join("CSV", fileName)
os.rename(filePath, fileName)
os.removedirs("CSV")
# Use OGR to read SHP for geometry
current.log.debug("Opening %s.shp" % layerName)
ds = ogr.Open("%s.shp" % layerName)
if ds is None:
current.log.debug("Open failed.\n")
# Revert back to the working directory as before.
os.chdir(cwd)
return
lyr = ds.GetLayerByName(layerName)
lyr.ResetReading()
# Use CSV for Name
current.log.debug("Opening %s.csv" % layerName)
rows = latin_dict_reader(open("%s.csv" % layerName))
nameField = layer["namefield"]
sourceCodeField = layer["sourceCodeField"]
edenCodeField = layer["edenCodeField"]
parentSourceCodeField = layer["parentSourceCodeField"]
parentLevel = layer["parent"]
parentEdenCodeField = layer["parentEdenCodeField"]
parentCodeQuery = (ttable.tag == parentEdenCodeField)
count = 0
for row in rows:
# Read Attributes
feat = lyr[count]
parentCode = feat.GetField(parentSourceCodeField)
query = (table.level == parentLevel) & \
parentCodeQuery & \
(ttable.value == parentCode)
parent = db(query).select(table.id,
ttable.value,
limitby=(0, 1),
cache=cache).first()
if not parent:
# Skip locations for which we don't have a valid parent
current.log.warning("Skipping - cannot find parent with key: %s, value: %s" % \
(parentEdenCodeField, parentCode))
count += 1
continue
if countries:
# Skip the countries which we're not interested in
if level == "L1":
if parent["gis_location_tag"].value not in countries:
#current.log.warning("Skipping %s as not in countries list" % parent["gis_location_tag"].value)
count += 1
continue
else:
# Check grandparent
country = self.get_parent_country(parent.id,
key_type="code")
if country not in countries:
count += 1
continue
# This is got from CSV in order to be able to handle the encoding
name = row.pop(nameField)
name.encode("utf8")
code = feat.GetField(sourceCodeField)
#area = feat.GetField("Shape_Area")
geom = feat.GetGeometryRef()
if geom is not None:
if geom.GetGeometryType() == ogr.wkbPoint:
lat = geom.GetX()
lon = geom.GetY()
id = table.insert(name=name,
level=level,
gis_feature_type=1,
lat=lat,
lon=lon,
parent=parent.id)
ttable.insert(location_id = id,
tag = edenCodeField,
value = code)
# ttable.insert(location_id = id,
# tag = "area",
# value = area)
else:
wkt = geom.ExportToWkt()
if wkt.startswith("LINESTRING"):
gis_feature_type = 2
elif wkt.startswith("POLYGON"):
gis_feature_type = 3
elif wkt.startswith("MULTIPOINT"):
gis_feature_type = 4
elif wkt.startswith("MULTILINESTRING"):
gis_feature_type = 5
elif wkt.startswith("MULTIPOLYGON"):
gis_feature_type = 6
elif wkt.startswith("GEOMETRYCOLLECTION"):
gis_feature_type = 7
id = table.insert(name=name,
level=level,
gis_feature_type=gis_feature_type,
wkt=wkt,
parent=parent.id)
ttable.insert(location_id = id,
tag = edenCodeField,
value = code)
# ttable.insert(location_id = id,
# tag = "area",
# value = area)
else:
current.log.debug("No geometry\n")
count += 1
# Close the shapefile
ds.Destroy()
db.commit()
current.log.debug("Updating Location Tree...")
try:
self.update_location_tree()
except MemoryError:
# If doing all L2s, it can break memory limits
# @ToDo: Check now that we're doing by level
current.log.critical("Memory error when trying to update_location_tree()!")
db.commit()
# Revert back to the working directory as before.
os.chdir(cwd)
return
# -------------------------------------------------------------------------
@staticmethod
def import_gadm2(ogr, level="L0", countries=[]):
"""
Import Admin Boundaries into the Locations table from GADMv2
- designed to be called from import_admin_areas()
- assumes that basic prepop has been done, so that no new L0 records need to be created
@param ogr - The OGR Python module
@param level - The OGR Python module
@param countries - List of ISO2 countrycodes to download data for
defaults to all countries
@ToDo: Complete this
- not currently possible to get all data from the 1 file easily
- no ISO2
- needs updating for gis_location_tag model
- only the lowest available levels accessible
- use GADMv1 for L0, L1, L2 & GADMv2 for specific lower?
"""
if level == "L0":
codeField = "ISO2" # This field is used to uniquely identify the L0 for updates
code2Field = "ISO" # This field is used to uniquely identify the L0 for parenting the L1s
elif level == "L1":
#nameField = "NAME_1"
codeField = "ID_1" # This field is used to uniquely identify the L1 for updates
code2Field = "ISO" # This field is used to uniquely identify the L0 for parenting the L1s
#parent = "L0"
#parentCode = "code2"
elif level == "L2":
#nameField = "NAME_2"
codeField = "ID_2" # This field is used to uniquely identify the L2 for updates
code2Field = "ID_1" # This field is used to uniquely identify the L1 for parenting the L2s
#parent = "L1"
#parentCode = "code"
else:
current.log.error("Level %s not supported!" % level)
return
db = current.db
s3db = current.s3db
table = s3db.gis_location
url = "http://gadm.org/data2/gadm_v2_shp.zip"
zipfile = "gadm_v2_shp.zip"
shapefile = "gadm2"
# Copy the current working directory to revert back to later
old_working_directory = os.getcwd()
# Create the working directory
if os.path.exists(os.path.join(os.getcwd(), "temp")): # use web2py/temp/GADMv2 as a cache
TEMP = os.path.join(os.getcwd(), "temp")
else:
import tempfile
TEMP = tempfile.gettempdir()
tempPath = os.path.join(TEMP, "GADMv2")
try:
os.mkdir(tempPath)
except OSError:
# Folder already exists - reuse
pass
# Set the current working directory
os.chdir(tempPath)
layerName = shapefile
# Check if file has already been downloaded
fileName = zipfile
if not os.path.isfile(fileName):
# Download the file
from gluon.tools import fetch
current.log.debug("Downloading %s" % url)
try:
file = fetch(url)
except urllib2.URLError, exception:
current.log.error(exception)
return
fp = StringIO(file)
else:
current.log.debug("Using existing file %s" % fileName)
fp = open(fileName)
# Unzip it
current.log.debug("Unzipping %s" % layerName)
import zipfile
myfile = zipfile.ZipFile(fp)
for ext in ["dbf", "prj", "sbn", "sbx", "shp", "shx"]:
fileName = "%s.%s" % (layerName, ext)
file = myfile.read(fileName)
f = open(fileName, "w")
f.write(file)
f.close()
myfile.close()
# Use OGR to read Shapefile
current.log.debug("Opening %s.shp" % layerName)
ds = ogr.Open("%s.shp" % layerName)
if ds is None:
current.log.debug("Open failed.\n")
return
lyr = ds.GetLayerByName(layerName)
lyr.ResetReading()
for feat in lyr:
code = feat.GetField(codeField)
if not code:
# Skip the entries which aren't countries
continue
if countries and code not in countries:
# Skip the countries which we're not interested in
continue
geom = feat.GetGeometryRef()
if geom is not None:
if geom.GetGeometryType() == ogr.wkbPoint:
pass
else:
## FIXME
##query = (table.code == code)
wkt = geom.ExportToWkt()
if wkt.startswith("LINESTRING"):
gis_feature_type = 2
elif wkt.startswith("POLYGON"):
gis_feature_type = 3
elif wkt.startswith("MULTIPOINT"):
gis_feature_type = 4
elif wkt.startswith("MULTILINESTRING"):
gis_feature_type = 5
elif wkt.startswith("MULTIPOLYGON"):
gis_feature_type = 6
elif wkt.startswith("GEOMETRYCOLLECTION"):
gis_feature_type = 7
#code2 = feat.GetField(code2Field)
#area = feat.GetField("Shape_Area")
try:
## FIXME
db(query).update(gis_feature_type=gis_feature_type,
wkt=wkt)
#code2=code2,
#area=area
except db._adapter.driver.OperationalError, exception:
current.log.error(exception)
else:
current.log.debug("No geometry\n")
# Close the shapefile
ds.Destroy()
db.commit()
# Revert back to the working directory as before.
os.chdir(old_working_directory)
return
# -------------------------------------------------------------------------
def import_geonames(self, country, level=None):
"""
Import Locations from the Geonames database
@param country: the 2-letter country code
@param level: the ADM level to import
Designed to be run from the CLI
Levels should be imported sequentially.
It is assumed that L0 exists in the DB already
L1-L3 may have been imported from Shapefiles with Polygon info
Geonames can then be used to populate the lower levels of hierarchy
"""
import codecs
from shapely.geometry import point
from shapely.geos import ReadingError
from shapely.wkt import loads as wkt_loads
try:
# Enable C-based speedups available from 1.2.10+
from shapely import speedups
speedups.enable()
except:
current.log.info("S3GIS",
"Upgrade Shapely for Performance enhancements")
db = current.db
s3db = current.s3db
#cache = s3db.cache
request = current.request
#settings = current.deployment_settings
table = s3db.gis_location
ttable = s3db.gis_location_tag
url = "http://download.geonames.org/export/dump/" + country + ".zip"
cachepath = os.path.join(request.folder, "cache")
filename = country + ".txt"
filepath = os.path.join(cachepath, filename)
if os.access(filepath, os.R_OK):
cached = True
else:
cached = False
if not os.access(cachepath, os.W_OK):
current.log.error("Folder not writable", cachepath)
return
if not cached:
# Download File
from gluon.tools import fetch
try:
f = fetch(url)
except (urllib2.URLError,):
e = sys.exc_info()[1]
current.log.error("URL Error", e)
return
except (urllib2.HTTPError,):
e = sys.exc_info()[1]
current.log.error("HTTP Error", e)
return
# Unzip File
if f[:2] == "PK":
# Unzip
fp = StringIO(f)
import zipfile
myfile = zipfile.ZipFile(fp)
try:
# Python 2.6+ only :/
# For now, 2.5 users need to download/unzip manually to cache folder
myfile.extract(filename, cachepath)
myfile.close()
except IOError:
current.log.error("Zipfile contents don't seem correct!")
myfile.close()
return
f = codecs.open(filepath, encoding="utf-8")
# Downloaded file is worth keeping
#os.remove(filepath)
if level == "L1":
fc = "ADM1"
parent_level = "L0"
elif level == "L2":
fc = "ADM2"
parent_level = "L1"
elif level == "L3":
fc = "ADM3"
parent_level = "L2"
elif level == "L4":
fc = "ADM4"
parent_level = "L3"
else:
# 5 levels of hierarchy or 4?
# @ToDo make more extensible still
#gis_location_hierarchy = self.get_location_hierarchy()
try:
#label = gis_location_hierarchy["L5"]
level = "L5"
parent_level = "L4"
except:
# ADM4 data in Geonames isn't always good (e.g. PK bad)
level = "L4"
parent_level = "L3"
finally:
fc = "PPL"
deleted = (table.deleted == False)
query = deleted & (table.level == parent_level)
# Do the DB query once (outside loop)
all_parents = db(query).select(table.wkt,
table.lon_min,
table.lon_max,
table.lat_min,
table.lat_max,
table.id)
if not all_parents:
# No locations in the parent level found
# - use the one higher instead
parent_level = "L" + str(int(parent_level[1:]) + 1)
query = deleted & (table.level == parent_level)
all_parents = db(query).select(table.wkt,
table.lon_min,
table.lon_max,
table.lat_min,
table.lat_max,
table.id)
# Parse File
current_row = 0
for line in f:
current_row += 1
# Format of file: http://download.geonames.org/export/dump/readme.txt
geonameid, \
name, \
asciiname, \
alternatenames, \
lat, \
lon, \
feature_class, \
feature_code, \
country_code, \
cc2, \
admin1_code, \
admin2_code, \
admin3_code, \
admin4_code, \
population, \
elevation, \
gtopo30, \
timezone, \
modification_date = line.split("\t")
if feature_code == fc:
# Add WKT
lat = float(lat)
lon = float(lon)
wkt = self.latlon_to_wkt(lat, lon)
shape = point.Point(lon, lat)
# Add Bounds
lon_min = lon_max = lon
lat_min = lat_max = lat
# Locate Parent
parent = ""
# 1st check for Parents whose bounds include this location (faster)
def in_bbox(row):
return (row.lon_min < lon_min) & \
(row.lon_max > lon_max) & \
(row.lat_min < lat_min) & \
(row.lat_max > lat_max)
for row in all_parents.find(lambda row: in_bbox(row)):
# Search within this subset with a full geometry check
# Uses Shapely.
# @ToDo provide option to use PostGIS/Spatialite
try:
parent_shape = wkt_loads(row.wkt)
if parent_shape.intersects(shape):
parent = row.id
# Should be just a single parent
break
except ReadingError:
current.log.error("Error reading wkt of location with id", row.id)
# Add entry to database
new_id = table.insert(name=name,
level=level,
parent=parent,
lat=lat,
lon=lon,
wkt=wkt,
lon_min=lon_min,
lon_max=lon_max,
lat_min=lat_min,
lat_max=lat_max)
ttable.insert(location_id=new_id,
tag="geonames",
value=geoname_id)
else:
continue
current.log.debug("All done!")
return
# -------------------------------------------------------------------------
@staticmethod
def latlon_to_wkt(lat, lon):
"""
Convert a LatLon to a WKT string
>>> s3gis.latlon_to_wkt(6, 80)
'POINT(80 6)'
"""
WKT = "POINT(%f %f)" % (lon, lat)
return WKT
# -------------------------------------------------------------------------
@staticmethod
def parse_location(wkt, lon=None, lat=None):
"""
Parses a location from wkt, returning wkt, lat, lon, bounding box and type.
For points, wkt may be None if lat and lon are provided; wkt will be generated.
For lines and polygons, the lat, lon returned represent the shape's centroid.
Centroid and bounding box will be None if Shapely is not available.
"""
if not wkt:
if not lon is not None and lat is not None:
raise RuntimeError, "Need wkt or lon+lat to parse a location"
wkt = "POINT(%f %f)" % (lon, lat)
geom_type = GEOM_TYPES["point"]
bbox = (lon, lat, lon, lat)
else:
try:
from shapely.wkt import loads as wkt_loads
SHAPELY = True
except:
SHAPELY = False
if SHAPELY:
shape = wkt_loads(wkt)
centroid = shape.centroid
lat = centroid.y
lon = centroid.x
geom_type = GEOM_TYPES[shape.type.lower()]
bbox = shape.bounds
else:
lat = None
lon = None
geom_type = GEOM_TYPES[wkt.split("(")[0].lower()]
bbox = None
res = {"wkt": wkt, "lat": lat, "lon": lon, "gis_feature_type": geom_type}
if bbox:
res["lon_min"], res["lat_min"], res["lon_max"], res["lat_max"] = bbox
return res
# -------------------------------------------------------------------------
@staticmethod
def update_location_tree(feature=None):
"""
Update GIS Locations' Materialized path, Lx locations, Lat/Lon & the_geom
@param feature: a feature dict to update the tree for
- if not provided then update the whole tree
returns the path of the feature
Called onaccept for locations (async, where-possible)
"""
db = current.db
try:
table = db.gis_location
except:
table = current.s3db.gis_location
spatial = current.deployment_settings.get_gis_spatialdb()
wkt_centroid = GIS.wkt_centroid
def bounds_centroid_wkt(feature):
form = Storage()
form.vars = feature
form.errors = Storage()
wkt_centroid(form)
form_vars = form.vars
if "lat_max" in form_vars:
wkt = form_vars.wkt
_vars = dict(gis_feature_type = form_vars.gis_feature_type,
lat = form_vars.lat,
lon = form_vars.lon,
wkt = wkt,
lat_max = form_vars.lat_max,
lat_min = form_vars.lat_min,
lon_min = form_vars.lon_min,
lon_max = form_vars.lon_max)
if wkt:
if not wkt.startswith("POI"):
# Polygons aren't inherited
_vars.update(inherited = False)
if spatial:
_vars.update(the_geom = wkt)
db(table.id == feature.id).update(**_vars)
if not feature:
# Do the whole database
# Do in chunks to save memory and also do in correct order
fields = [table.id, table.name, table.gis_feature_type,
table.L0, table.L1, table.L2, table.L3, table.L4,
table.lat, table.lon, table.wkt, table.inherited,
# Handle Countries which start with Bounds set, yet are Points
table.lat_min, table.lon_min, table.lat_max, table.lon_max,
table.path, table.parent]
update_location_tree = GIS.update_location_tree
for level in ["L0", "L1", "L2", "L3", "L4", "L5", None]:
query = (table.level == level) & (table.deleted == False)
features = db(query).select(*fields)
for feature in features:
feature["level"] = level
wkt = feature["wkt"]
if wkt and not wkt.startswith("POI"):
# Polygons aren't inherited
feature["inherited"] = False
update_location_tree(feature)
# Also do the Bounds/Centroid/WKT
bounds_centroid_wkt(feature)
return
# Single Feature
id = str(feature["id"]) if "id" in feature else None
if not id:
# Nothing we can do
raise ValueError
# L0
name = feature.get("name", False)
level = feature.get("level", False)
path = feature.get("path", False)
L0 = feature.get("L0", False)
if level == "L0":
if name:
if path == id and L0 == name:
# No action required
return path
else:
db(table.id == id).update(L0=name,
path=id)
else:
# Look this up
feature = db(table.id == id).select(table.name,
table.path,
table.L0,
limitby=(0, 1)).first()
if feature:
name = feature["name"]
path = feature["path"]
L0 = feature["L0"]
if path == id and L0 == name:
# No action required
return path
else:
db(table.id == id).update(L0=name,
path=id)
return id
# L1
parent = feature.get("parent", False)
L1 = feature.get("L1", False)
lat = feature.get("lat", False)
lon = feature.get("lon", False)
wkt = feature.get("wkt", False)
inherited = feature.get("inherited", None)
if level == "L1":
if name is False or lat is False or lon is False or \
wkt is False or inherited is None or parent is False or \
path is False or L0 is False or L1 is False:
# Get the whole feature
feature = db(table.id == id).select(table.id,
table.name,
table.parent,
table.path,
table.lat,
table.lon,
table.wkt,
table.inherited,
table.L0,
table.L1,
limitby=(0, 1)).first()
name = feature.name
parent = feature.parent
path = feature.path
lat = feature.lat
lon = feature.lon
wkt = feature.wkt
inherited = feature.inherited
L0 = feature.L0
L1 = feature.L1
if parent:
_path = "%s/%s" % (parent, id)
_L0 = db(table.id == parent).select(table.name,
table.lat,
table.lon,
limitby=(0, 1)).first()
L0_name = _L0.name
L0_lat = _L0.lat
L0_lon = _L0.lon
else:
_path = id
L0_name = None
L0_lat = None
L0_lon = None
if path == _path and L1 == name and L0 == L0_name:
if inherited and lat == L0_lat and lon == L0_lon:
if wkt:
# No action required
return path
else:
# Do the Bounds/Centroid/WKT
feature.update(gis_feature_type="1")
bounds_centroid_wkt(feature)
return path
elif inherited or lat is None or lon is None:
vars = dict(inherited=True,
lat=L0_lat,
lon=L0_lon,
L2=None,
L3=None,
L4=None,
L5=None,
)
db(table.id == id).update(**vars)
# Also do the Bounds/Centroid/WKT
vars.update(gis_feature_type="1")
feature.update(**vars)
bounds_centroid_wkt(feature)
elif inherited and lat == L0_lat and lon == L0_lon:
vars = dict(path=_path,
L0=L0_name,
L1=name,
L2=None,
L3=None,
L4=None,
L5=None,
)
db(table.id == id).update(**vars)
if wkt:
# No further action required
return _path
else:
# Do the Bounds/Centroid/WKT
vars.update(gis_feature_type="1")
feature.update(**vars)
bounds_centroid_wkt(feature)
return _path
elif inherited or lat is None or lon is None:
vars = dict(path=_path,
L0=L0_name,
L1=name,
L2=None,
L3=None,
L4=None,
L5=None,
inherited=True,
lat=L0_lat,
lon=L0_lon,
)
db(table.id == id).update(**vars)
# Also do the Bounds/Centroid/WKT
vars.update(gis_feature_type="1")
feature.update(**vars)
bounds_centroid_wkt(feature)
else:
db(table.id == id).update(path=_path,
inherited=False,
L0=L0_name,
L1=name)
# Ensure that any locations which inherit their latlon from this one get updated
query = (table.parent == id) & \
(table.inherited == True)
fields = [table.id, table.name, table.level, table.path, table.parent,
table.L0, table.L1, table.L2, table.L3, table.L4,
table.lat, table.lon, table.wkt, table.inherited]
rows = db(query).select(*fields)
update_location_tree = GIS.update_location_tree
for row in rows:
try:
update_location_tree(row)
except RuntimeError:
current.log.error("Cannot propagate inherited latlon to child %s of L1 location ID %s: too much recursion" % \
(row.id, id))
return _path
# L2
L2 = feature.get("L2", False)
if level == "L2":
if name is False or lat is False or lon is False or \
wkt is False or inherited is None or parent is False or \
path is False or L0 is False or L1 is False or L2 is False:
# Get the whole feature
feature = db(table.id == id).select(table.id,
table.name,
table.parent,
table.path,
table.lat,
table.lon,
table.wkt,
table.inherited,
table.L0,
table.L1,
table.L2,
limitby=(0, 1)).first()
name = feature.name
parent = feature.parent
path = feature.path
lat = feature.lat
lon = feature.lon
wkt = feature.wkt
inherited = feature.inherited
L0 = feature.L0
L1 = feature.L1
L2 = feature.L2
if parent:
Lx = db(table.id == parent).select(table.name,
table.level,
table.parent,
table.lat,
table.lon,
limitby=(0, 1)).first()
if Lx.level == "L1":
L1_name = Lx.name
_parent = Lx.parent
if _parent:
_path = "%s/%s/%s" % (_parent, parent, id)
L0_name = db(table.id == _parent).select(table.name,
limitby=(0, 1),
cache=current.s3db.cache).first().name
else:
_path = "%s/%s" % (parent, id)
L0_name = None
elif Lx.level == "L0":
_path = "%s/%s" % (parent, id)
L0_name = Lx.name
L1_name = None
else:
current.log.error("Parent of L2 Location ID %s has invalid level: %s is %s" % \
(id, parent, Lx.level))
#raise ValueError
return "%s/%s" % (parent, id)
Lx_lat = Lx.lat
Lx_lon = Lx.lon
else:
_path = id
L0_name = None
L1_name = None
Lx_lat = None
Lx_lon = None
if path == _path and L2 == name and L0 == L0_name and \
L1 == L1_name:
if inherited and lat == Lx_lat and lon == Lx_lon:
if wkt:
# No action required
return path
else:
# Do the Bounds/Centroid/WKT
feature.update(gis_feature_type="1")
bounds_centroid_wkt(feature)
return path
elif inherited or lat is None or lon is None:
vars = dict(inherited=True,
lat=Lx_lat,
lon=Lx_lon,
L3=None,
L4=None,
L5=None,
)
db(table.id == id).update(**vars)
# Also do the Bounds/Centroid/WKT
vars.update(gis_feature_type="1")
feature.update(**vars)
bounds_centroid_wkt(feature)
elif inherited and lat == Lx_lat and lon == Lx_lon:
vars = dict(path=_path,
L0=L0_name,
L1=L1_name,
L2=name,
L3=None,
L4=None,
L5=None,
)
db(table.id == id).update(**vars)
if wkt:
# No further action required
return _path
else:
# Do the Bounds/Centroid/WKT
vars.update(gis_feature_type="1")
feature.update(**vars)
bounds_centroid_wkt(feature)
return _path
elif inherited or lat is None or lon is None:
vars = dict(path=_path,
L0=L0_name,
L1=L1_name,
L2=name,
L3=None,
L4=None,
L5=None,
inherited=True,
lat=Lx_lat,
lon=Lx_lon,
)
db(table.id == id).update(**vars)
# Also do the Bounds/Centroid/WKT
vars.update(gis_feature_type="1")
feature.update(**vars)
bounds_centroid_wkt(feature)
else:
db(table.id == id).update(path=_path,
inherited=False,
L0=L0_name,
L1=L1_name,
L2=name)
# Ensure that any locations which inherit their latlon from this one get updated
query = (table.parent == id) & \
(table.inherited == True)
fields = [table.id, table.name, table.level, table.path, table.parent,
table.L0, table.L1, table.L2, table.L3, table.L4,
table.lat, table.lon, table.inherited]
rows = db(query).select(*fields)
update_location_tree = GIS.update_location_tree
for row in rows:
try:
update_location_tree(row)
except RuntimeError:
current.log.error("Cannot propagate inherited latlon to child %s of L2 location ID %s: too much recursion" % \
(row.id, id))
return _path
# L3
L3 = feature.get("L3", False)
if level == "L3":
if name is False or lat is False or lon is False or \
wkt is False or inherited is None or parent is False or \
path is False or L0 is False or L1 is False or L2 is False or \
L3 is False:
# Get the whole feature
feature = db(table.id == id).select(table.id,
table.name,
table.parent,
table.path,
table.lat,
table.lon,
table.wkt,
table.inherited,
table.L0,
table.L1,
table.L2,
table.L3,
limitby=(0, 1)).first()
name = feature.name
parent = feature.parent
path = feature.path
lat = feature.lat
lon = feature.lon
wkt = feature.wkt
inherited = feature.inherited
L0 = feature.L0
L1 = feature.L1
L2 = feature.L2
L3 = feature.L3
if parent:
Lx = db(table.id == parent).select(table.id,
table.name,
table.level,
table.L0,
table.L1,
table.path,
table.lat,
table.lon,
limitby=(0, 1)).first()
if Lx.level == "L2":
L0_name = Lx.L0
L1_name = Lx.L1
L2_name = Lx.name
_path = Lx.path
if _path and L0_name and L1_name:
_path = "%s/%s" % (_path, id)
else:
# This feature needs to be updated
_path = GIS.update_location_tree(Lx)
_path = "%s/%s" % (_path, id)
# Query again
Lx = db(table.id == parent).select(table.L0,
table.L1,
table.lat,
table.lon,
limitby=(0, 1)
).first()
L0_name = Lx.L0
L1_name = Lx.L1
elif Lx.level == "L1":
L0_name = Lx.L0
L1_name = Lx.name
L2_name = None
_path = Lx.path
if _path and L0_name:
_path = "%s/%s" % (_path, id)
else:
# This feature needs to be updated
_path = GIS.update_location_tree(Lx)
_path = "%s/%s" % (_path, id)
# Query again
Lx = db(table.id == parent).select(table.L0,
table.lat,
table.lon,
limitby=(0, 1)
).first()
L0_name = Lx.L0
elif Lx.level == "L0":
_path = "%s/%s" % (parent, id)
L0_name = Lx.name
L1_name = None
L2_name = None
else:
current.log.error("Parent of L3 Location ID %s has invalid level: %s is %s" % \
(id, parent, Lx.level))
#raise ValueError
return "%s/%s" % (parent, id)
Lx_lat = Lx.lat
Lx_lon = Lx.lon
else:
_path = id
L0_name = None
L1_name = None
L2_name = None
Lx_lat = None
Lx_lon = None
if path == _path and L3 == name and L0 == L0_name and \
L1 == L1_name and L2 == L2_name:
if inherited and lat == Lx_lat and lon == Lx_lon:
if wkt:
# No action required
return path
else:
# Do the Bounds/Centroid/WKT
feature.update(gis_feature_type="1")
bounds_centroid_wkt(feature)
return path
elif inherited or lat is None or lon is None:
vars = dict(inherited=True,
lat=Lx_lat,
lon=Lx_lon,
L4=None,
L5=None,
)
db(table.id == id).update(**vars)
# Also do the Bounds/Centroid/WKT
vars.update(gis_feature_type="1")
feature.update(**vars)
bounds_centroid_wkt(feature)
elif inherited and lat == Lx_lat and lon == Lx_lon:
vars = dict(path=_path,
L0=L0_name,
L1=L1_name,
L2=L2_name,
L3=name,
)
db(table.id == id).update(**vars)
if wkt:
# No further action required
return _path
else:
# Do the Bounds/Centroid/WKT
vars.update(gis_feature_type="1")
feature.update(**vars)
bounds_centroid_wkt(feature)
return _path
elif inherited or lat is None or lon is None:
vars = dict(path=_path,
L0=L0_name,
L1=L1_name,
L2=L2_name,
L3=name,
L4=None,
L5=None,
inherited=True,
lat=Lx_lat,
lon=Lx_lon)
db(table.id == id).update(**vars)
# Also do the Bounds/Centroid/WKT
vars.update(gis_feature_type="1")
feature.update(**vars)
bounds_centroid_wkt(feature)
else:
db(table.id == id).update(path=_path,
inherited=False,
L0=L0_name,
L1=L1_name,
L2=L2_name,
L3=name,
L4=None,
L5=None)
# Ensure that any locations which inherit their latlon from this one get updated
query = (table.parent == id) & \
(table.inherited == True)
fields = [table.id, table.name, table.level, table.path, table.parent,
table.L0, table.L1, table.L2, table.L3, table.L4,
table.lat, table.lon, table.wkt, table.inherited]
rows = db(query).select(*fields)
update_location_tree = GIS.update_location_tree
for row in rows:
try:
update_location_tree(row)
except RuntimeError:
current.log.error("Cannot propagate inherited latlon to child %s of L3 location ID %s: too much recursion" % \
(row.id, id))
return _path
# L4
L4 = feature.get("L4", False)
if level == "L4":
if name is False or lat is False or lon is False or \
wkt is False or inherited is None or parent is False or \
path is False or L0 is False or L1 is False or L2 is False or \
L3 is False or L4 is False:
# Get the whole feature
feature = db(table.id == id).select(table.id,
table.name,
table.parent,
table.path,
table.lat,
table.lon,
table.wkt,
table.inherited,
table.L0,
table.L1,
table.L2,
table.L3,
table.L4,
limitby=(0, 1)).first()
name = feature.name
parent = feature.parent
path = feature.path
lat = feature.lat
lon = feature.lon
wkt = feature.wkt
inherited = feature.inherited
L0 = feature.L0
L1 = feature.L1
L2 = feature.L2
L3 = feature.L3
L4 = feature.L4
if parent:
Lx = db(table.id == parent).select(table.id,
table.name,
table.level,
table.L0,
table.L1,
table.L2,
table.path,
table.lat,
table.lon,
limitby=(0, 1)).first()
if Lx.level == "L3":
L0_name = Lx.L0
L1_name = Lx.L1
L2_name = Lx.L2
L3_name = Lx.name
_path = Lx.path
if _path and L0_name and L1_name and L2_name:
_path = "%s/%s" % (_path, id)
else:
# This feature needs to be updated
_path = GIS.update_location_tree(Lx)
_path = "%s/%s" % (_path, id)
# Query again
Lx = db(table.id == parent).select(table.L0,
table.L1,
table.L2,
table.lat,
table.lon,
limitby=(0, 1)
).first()
L0_name = Lx.L0
L1_name = Lx.L1
L2_name = Lx.L2
elif Lx.level == "L2":
L0_name = Lx.L0
L1_name = Lx.L1
L2_name = Lx.name
L3_name = None
_path = Lx.path
if _path and L0_name and L1_name:
_path = "%s/%s" % (_path, id)
else:
# This feature needs to be updated
_path = GIS.update_location_tree(Lx)
_path = "%s/%s" % (_path, id)
# Query again
Lx = db(table.id == parent).select(table.L0,
table.L1,
table.lat,
table.lon,
limitby=(0, 1)
).first()
L0_name = Lx.L0
L1_name = Lx.L1
elif Lx.level == "L1":
L0_name = Lx.L0
L1_name = Lx.name
L2_name = None
L3_name = None
_path = Lx.path
if _path and L0_name:
_path = "%s/%s" % (_path, id)
else:
# This feature needs to be updated
_path = GIS.update_location_tree(Lx)
_path = "%s/%s" % (_path, id)
# Query again
Lx = db(table.id == parent).select(table.L0,
table.lat,
table.lon,
limitby=(0, 1)
).first()
L0_name = Lx.L0
elif Lx.level == "L0":
_path = "%s/%s" % (parent, id)
L0_name = Lx.name
L1_name = None
L2_name = None
L3_name = None
else:
current.log.error("Parent of L4 Location ID %s has invalid level: %s is %s" % \
(id, parent, Lx.level))
#raise ValueError
return "%s/%s" % (parent, id)
Lx_lat = Lx.lat
Lx_lon = Lx.lon
else:
_path = id
L0_name = None
L1_name = None
L2_name = None
L3_name = None
Lx_lat = None
Lx_lon = None
if path == _path and L4 == name and L0 == L0_name and \
L1 == L1_name and L2 == L2_name and \
L3 == L3_name:
if inherited and lat == Lx_lat and lon == Lx_lon:
if wkt:
# No action required
return path
else:
# Do the Bounds/Centroid/WKT
feature.update(gis_feature_type="1")
bounds_centroid_wkt(feature)
return path
elif inherited or lat is None or lon is None:
vars = dict(inherited=True,
lat=Lx_lat,
lon=Lx_lon,
L5=None,
)
db(table.id == id).update(**vars)
# Also do the Bounds/Centroid/WKT
vars.update(gis_feature_type="1")
feature.update(**vars)
bounds_centroid_wkt(feature)
elif inherited and lat == Lx_lat and lon == Lx_lon:
vars = dict(path=_path,
L0=L0_name,
L1=L1_name,
L2=L2_name,
L3=L3_name,
L4=name,
L5=None,
)
db(table.id == id).update(**vars)
if wkt:
# No further action required
return _path
else:
# Do the Bounds/Centroid/WKT
vars.update(gis_feature_type="1")
feature.update(**vars)
bounds_centroid_wkt(feature)
return _path
elif inherited or lat is None or lon is None:
vars = dict(path=_path,
L0=L0_name,
L1=L1_name,
L2=L2_name,
L3=L3_name,
L4=name,
L5=None,
inherited=True,
lat=Lx_lat,
lon=Lx_lon)
db(table.id == id).update(**vars)
# Also do the Bounds/Centroid/WKT
vars.update(gis_feature_type="1")
feature.update(**vars)
bounds_centroid_wkt(feature)
else:
db(table.id == id).update(path=_path,
inherited=False,
L0=L0_name,
L1=L1_name,
L2=L2_name,
L3=L3_name,
L4=name,
L5=None)
# Ensure that any locations which inherit their latlon from this one get updated
query = (table.parent == id) & \
(table.inherited == True)
fields = [table.id, table.name, table.level, table.path, table.parent,
table.L0, table.L1, table.L2, table.L3, table.L4,
table.lat, table.lon, table.wkt, table.inherited]
rows = db(query).select(*fields)
update_location_tree = GIS.update_location_tree
for row in rows:
try:
update_location_tree(row)
except RuntimeError:
current.log.error("Cannot propagate inherited latlon to child %s of L4 location ID %s: too much recursion" % \
(row.id, id))
return _path
# L5
L5 = feature.get("L5", False)
if level == "L5":
if name is False or lat is False or lon is False or \
wkt is False or inherited is None or parent is False or \
path is False or L0 is False or L1 is False or L2 is False or \
L3 is False or L4 is False or \
L5 is False:
# Get the whole feature
feature = db(table.id == id).select(table.id,
table.name,
table.parent,
table.path,
table.lat,
table.lon,
table.wkt,
table.inherited,
table.L0,
table.L1,
table.L2,
table.L3,
table.L4,
table.L5,
limitby=(0, 1)).first()
name = feature.name
parent = feature.parent
path = feature.path
lat = feature.lat
lon = feature.lon
wkt = feature.wkt
inherited = feature.inherited
L0 = feature.L0
L1 = feature.L1
L2 = feature.L2
L3 = feature.L3
L4 = feature.L4
L5 = feature.L5
if parent:
Lx = db(table.id == parent).select(table.id,
table.name,
table.level,
table.L0,
table.L1,
table.L2,
table.L3,
table.path,
table.lat,
table.lon,
limitby=(0, 1)).first()
if Lx.level == "L4":
L0_name = Lx.L0
L1_name = Lx.L1
L2_name = Lx.L2
L3_name = Lx.L3
L4_name = Lx.name
_path = Lx.path
if _path and L0_name and L1_name and L2_name and L3_name:
_path = "%s/%s" % (_path, id)
else:
# This feature needs to be updated
_path = GIS.update_location_tree(Lx)
_path = "%s/%s" % (_path, id)
# Query again
Lx = db(table.id == parent).select(table.L0,
table.L1,
table.L2,
table.L3,
table.lat,
table.lon,
limitby=(0, 1)
).first()
L0_name = Lx.L0
L1_name = Lx.L1
L2_name = Lx.L2
L3_name = Lx.L3
elif Lx.level == "L3":
L0_name = Lx.L0
L1_name = Lx.L1
L2_name = Lx.L2
L3_name = Lx.name
L4_name = None
_path = Lx.path
if _path and L0_name and L1_name and L2_name:
_path = "%s/%s" % (_path, id)
else:
# This feature needs to be updated
_path = GIS.update_location_tree(Lx)
_path = "%s/%s" % (_path, id)
# Query again
Lx = db(table.id == parent).select(table.L0,
table.L1,
table.L2,
table.lat,
table.lon,
limitby=(0, 1)
).first()
L0_name = Lx.L0
L1_name = Lx.L1
L2_name = Lx.L2
elif Lx.level == "L2":
L0_name = Lx.L0
L1_name = Lx.L1
L2_name = Lx.name
L3_name = None
L4_name = None
_path = Lx.path
if _path and L0_name and L1_name:
_path = "%s/%s" % (_path, id)
else:
# This feature needs to be updated
_path = GIS.update_location_tree(Lx)
_path = "%s/%s" % (_path, id)
# Query again
Lx = db(table.id == parent).select(table.L0,
table.L1,
table.lat,
table.lon,
limitby=(0, 1)
).first()
L0_name = Lx.L0
L1_name = Lx.L1
elif Lx.level == "L1":
L0_name = Lx.L0
L1_name = Lx.name
L2_name = None
L3_name = None
L4_name = None
_path = Lx.path
if _path and L0_name:
_path = "%s/%s" % (_path, id)
else:
# This feature needs to be updated
_path = GIS.update_location_tree(Lx)
_path = "%s/%s" % (_path, id)
# Query again
Lx = db(table.id == parent).select(table.L0,
table.lat,
table.lon,
limitby=(0, 1)
).first()
L0_name = Lx.L0
elif Lx.level == "L0":
_path = "%s/%s" % (parent, id)
L0_name = Lx.name
L1_name = None
L2_name = None
L3_name = None
L4_name = None
else:
current.log.error("Parent of L5 Location ID %s has invalid level: %s is %s" % \
(id, parent, Lx.level))
#raise ValueError
return "%s/%s" % (parent, id)
Lx_lat = Lx.lat
Lx_lon = Lx.lon
else:
_path = id
L0_name = None
L1_name = None
L2_name = None
L3_name = None
L4_name = None
Lx_lat = None
Lx_lon = None
if path == _path and L5 == name and L0 == L0_name and \
L1 == L1_name and L2 == L2_name and \
L3 == L3_name and L4 == L4_name:
if inherited and lat == Lx_lat and lon == Lx_lon:
if wkt:
# No action required
return path
else:
# Do the Bounds/Centroid/WKT
feature.update(gis_feature_type="1")
bounds_centroid_wkt(feature)
return path
elif inherited or lat is None or lon is None:
vars = dict(inherited=True,
lat=Lx_lat,
lon=Lx_lon,
)
db(table.id == id).update(**vars)
# Also do the Bounds/Centroid/WKT
vars.update(gis_feature_type="1")
feature.update(**vars)
bounds_centroid_wkt(feature)
elif inherited and lat == Lx_lat and lon == Lx_lon:
vars = dict(path=_path,
L0=L0_name,
L1=L1_name,
L2=L2_name,
L3=L3_name,
L4=L4_name,
L5=name,
)
db(table.id == id).update(**vars)
if wkt:
# No further action required
return _path
else:
# Do the Bounds/Centroid/WKT
vars.update(gis_feature_type="1")
feature.update(**vars)
bounds_centroid_wkt(feature)
return _path
elif inherited or lat is None or lon is None:
vars = dict(path=_path,
L0=L0_name,
L1=L1_name,
L2=L2_name,
L3=L3_name,
L4=L4_name,
L5=name,
inherited=True,
lat=Lx_lat,
lon=Lx_lon)
db(table.id == id).update(**vars)
# Also do the Bounds/Centroid/WKT
vars.update(gis_feature_type="1")
feature.update(**vars)
bounds_centroid_wkt(feature)
else:
db(table.id == id).update(path=_path,
inherited=False,
L0=L0_name,
L1=L1_name,
L2=L2_name,
L3=L3_name,
L4=L4_name,
L5=name)
# Ensure that any locations which inherit their latlon from this one get updated
query = (table.parent == id) & \
(table.inherited == True)
fields = [table.id, table.name, table.level, table.path, table.parent,
table.L0, table.L1, table.L2, table.L3, table.L4, table.L5,
table.lat, table.lon, table.wkt, table.inherited]
rows = db(query).select(*fields)
update_location_tree = GIS.update_location_tree
for row in rows:
try:
update_location_tree(row)
except RuntimeError:
current.log.error("Cannot propagate inherited latlon to child %s of L5 location ID %s: too much recursion" % \
(row.id, id))
return _path
# Specific Location
# - or unspecified (which we should avoid happening)
if name is False or lat is False or lon is False or wkt is False or \
inherited is None or parent is False or path is False or \
L0 is False or L1 is False or L2 is False or L3 is False or \
L4 is False or L5 is False:
# Get the whole feature
feature = db(table.id == id).select(table.id,
table.name,
table.level,
table.parent,
table.path,
table.lat,
table.lon,
table.wkt,
table.inherited,
table.L0,
table.L1,
table.L2,
table.L3,
table.L4,
table.L5,
limitby=(0, 1)).first()
name = feature.name
level = feature.level
parent = feature.parent
path = feature.path
lat = feature.lat
lon = feature.lon
wkt = feature.wkt
inherited = feature.inherited
L0 = feature.L0
L1 = feature.L1
L2 = feature.L2
L3 = feature.L3
L4 = feature.L4
L5 = feature.L5
if parent:
Lx = db(table.id == parent).select(table.id,
table.name,
table.level,
table.L0,
table.L1,
table.L2,
table.L3,
table.L4,
table.path,
table.lat,
table.lon,
limitby=(0, 1)).first()
if Lx.level == "L5":
L0_name = Lx.L0
L1_name = Lx.L1
L2_name = Lx.L2
L3_name = Lx.L3
L4_name = Lx.L4
L5_name = Lx.name
_path = Lx.path
if _path and L0_name and L1_name and L2_name and L3_name and L4_name:
_path = "%s/%s" % (_path, id)
else:
# This feature needs to be updated
_path = GIS.update_location_tree(Lx)
_path = "%s/%s" % (_path, id)
# Query again
Lx = db(table.id == parent).select(table.L0,
table.L1,
table.L2,
table.L3,
table.L4,
table.lat,
table.lon,
limitby=(0, 1)).first()
L0_name = Lx.L0
L1_name = Lx.L1
L2_name = Lx.L2
L3_name = Lx.L3
L4_name = Lx.L4
elif Lx.level == "L4":
L0_name = Lx.L0
L1_name = Lx.L1
L2_name = Lx.L2
L3_name = Lx.L3
L4_name = Lx.name
L5_name = name if level == "L5" else None
_path = Lx.path
if _path and L0_name and L1_name and L2_name and L3_name:
_path = "%s/%s" % (_path, id)
else:
# This feature needs to be updated
_path = GIS.update_location_tree(Lx)
_path = "%s/%s" % (_path, id)
# Query again
Lx = db(table.id == parent).select(table.L0,
table.L1,
table.L2,
table.L3,
table.lat,
table.lon,
limitby=(0, 1)).first()
L0_name = Lx.L0
L1_name = Lx.L1
L2_name = Lx.L2
L3_name = Lx.L3
elif Lx.level == "L3":
L0_name = Lx.L0
L1_name = Lx.L1
L2_name = Lx.L2
L3_name = Lx.name
L4_name = name if level == "L4" else None
L5_name = name if level == "L5" else None
_path = Lx.path
if _path and L0_name and L1_name and L2_name:
_path = "%s/%s" % (_path, id)
else:
# This feature needs to be updated
_path = GIS.update_location_tree(Lx)
_path = "%s/%s" % (_path, id)
# Query again
Lx = db(table.id == parent).select(table.L0,
table.L1,
table.L2,
table.lat,
table.lon,
limitby=(0, 1)).first()
L0_name = Lx.L0
L1_name = Lx.L1
L2_name = Lx.L2
elif Lx.level == "L2":
L0_name = Lx.L0
L1_name = Lx.L1
L2_name = Lx.name
L3_name = name if level == "L3" else None
L4_name = name if level == "L4" else None
L5_name = name if level == "L5" else None
_path = Lx.path
if _path and L0_name and L1_name:
_path = "%s/%s" % (_path, id)
else:
# This feature needs to be updated
_path = GIS.update_location_tree(Lx)
_path = "%s/%s" % (_path, id)
# Query again
Lx = db(table.id == parent).select(table.L0,
table.L1,
table.lat,
table.lon,
limitby=(0, 1)).first()
L0_name = Lx.L0
L1_name = Lx.L1
elif Lx.level == "L1":
L0_name = Lx.L0
L1_name = Lx.name
L2_name = name if level == "L2" else None
L3_name = name if level == "L3" else None
L4_name = name if level == "L4" else None
L5_name = name if level == "L5" else None
_path = Lx.path
if _path and L0_name:
_path = "%s/%s" % (_path, id)
else:
# This feature needs to be updated
_path = GIS.update_location_tree(Lx)
_path = "%s/%s" % (_path, id)
# Query again
Lx = db(table.id == parent).select(table.L0,
table.lat,
table.lon,
limitby=(0, 1)).first()
L0_name = Lx.L0
elif Lx.level == "L0":
_path = "%s/%s" % (parent, id)
L0_name = Lx.name
L1_name = name if level == "L1" else None
L2_name = name if level == "L2" else None
L3_name = name if level == "L3" else None
L4_name = name if level == "L4" else None
L5_name = name if level == "L5" else None
else:
#raise ValueError
return id
Lx_lat = Lx.lat
Lx_lon = Lx.lon
else:
_path = id
L0_name = name if level == "L0" else None
L1_name = name if level == "L1" else None
L2_name = name if level == "L2" else None
L3_name = name if level == "L3" else None
L4_name = name if level == "L4" else None
L5_name = name if level == "L5" else None
Lx_lat = None
Lx_lon = None
if path == _path and L0 == L0_name and \
L1 == L1_name and L2 == L2_name and \
L3 == L3_name and L4 == L4_name and \
L5 == L5_name:
if inherited and lat == Lx_lat and lon == Lx_lon:
if wkt:
# No action required
return path
else:
# Do the Bounds/Centroid/WKT (below)
vars = dict()
elif inherited or lat is None or lon is None:
vars = dict(inherited=True,
lat=Lx_lat,
lon=Lx_lon,
)
db(table.id == id).update(**vars)
else:
# Do the Bounds/Centroid/WKT (below)
vars = dict()
elif inherited and lat == Lx_lat and lon == Lx_lon:
vars = dict(path=_path,
L0=L0_name,
L1=L1_name,
L2=L2_name,
L3=L3_name,
L4=L4_name,
L5=L5_name,
)
db(table.id == id).update(**vars)
elif inherited or lat is None or lon is None:
vars = dict(path=_path,
L0=L0_name,
L1=L1_name,
L2=L2_name,
L3=L3_name,
L4=L4_name,
L5=L5_name,
inherited=True,
lat=Lx_lat,
lon=Lx_lon
)
db(table.id == id).update(**vars)
else:
# We have a Lat & Lon
vars = dict(path=_path,
inherited=False,
L0=L0_name,
L1=L1_name,
L2=L2_name,
L3=L3_name,
L4=L4_name,
L5=L5_name,
)
db(table.id == id).update(**vars)
# Also do the Bounds/Centroid/WKT
if not wkt or wkt.startswith("POI"):
vars.update(gis_feature_type="1")
feature.update(**vars)
bounds_centroid_wkt(feature)
return _path
# -------------------------------------------------------------------------
@staticmethod
def wkt_centroid(form):
"""
OnValidation callback:
If a WKT is defined: validate the format,
calculate the LonLat of the Centroid, and set bounds
Else if a LonLat is defined: calculate the WKT for the Point.
"""
messages = current.messages
form_vars = form.vars
if form_vars.get("gis_feature_type", None) == "1":
# Point
if (form_vars.lon is None and form_vars.lat is None) or \
(form_vars.lon == "" and form_vars.lat == ""):
# No Geometry available
# Don't clobber existing records (e.g. in Prepop)
#form_vars.gis_feature_type = "0"
# Cannot create WKT, so Skip
return
elif form_vars.lat is None or form_vars.lat == "":
# Can't just have lon without lat
form.errors["lat"] = messages.lat_empty
elif form_vars.lon is None or form_vars.lon == "":
form.errors["lon"] = messages.lon_empty
else:
form_vars.wkt = "POINT(%(lon)s %(lat)s)" % form_vars
if "lon_min" not in form_vars or form_vars.lon_min is None:
form_vars.lon_min = form_vars.lon
if "lon_max" not in form_vars or form_vars.lon_max is None:
form_vars.lon_max = form_vars.lon
if "lat_min" not in form_vars or form_vars.lat_min is None:
form_vars.lat_min = form_vars.lat
if "lat_max" not in form_vars or form_vars.lat_max is None:
form_vars.lat_max = form_vars.lat
elif form_vars.get("wkt", None):
# Parse WKT for LineString, Polygon, etc
from shapely.wkt import loads as wkt_loads
try:
shape = wkt_loads(form_vars.wkt)
except:
try:
# Perhaps this is really a LINESTRING (e.g. OSM import of an unclosed Way)
linestring = "LINESTRING%s" % form_vars.wkt[8:-1]
shape = wkt_loads(linestring)
form_vars.wkt = linestring
except:
form.errors["wkt"] = messages.invalid_wkt
return
gis_feature_type = shape.type
if gis_feature_type == "Point":
form_vars.gis_feature_type = 1
elif gis_feature_type == "LineString":
form_vars.gis_feature_type = 2
elif gis_feature_type == "Polygon":
form_vars.gis_feature_type = 3
elif gis_feature_type == "MultiPoint":
form_vars.gis_feature_type = 4
elif gis_feature_type == "MultiLineString":
form_vars.gis_feature_type = 5
elif gis_feature_type == "MultiPolygon":
form_vars.gis_feature_type = 6
elif gis_feature_type == "GeometryCollection":
form_vars.gis_feature_type = 7
try:
centroid_point = shape.centroid
form_vars.lon = centroid_point.x
form_vars.lat = centroid_point.y
bounds = shape.bounds
if gis_feature_type != "Point" or \
"lon_min" not in form_vars or form_vars.lon_min is None or \
form_vars.lon_min == form_vars.lon_max:
# Update bounds unless we have a 'Point' which has already got wider Bounds specified (such as a country)
form_vars.lon_min = bounds[0]
form_vars.lat_min = bounds[1]
form_vars.lon_max = bounds[2]
form_vars.lat_max = bounds[3]
except:
form.errors.gis_feature_type = messages.centroid_error
elif (form_vars.lon is None and form_vars.lat is None) or \
(form_vars.lon == "" and form_vars.lat == ""):
# No Geometry available
# Don't clobber existing records (e.g. in Prepop)
#form_vars.gis_feature_type = "0"
# Cannot create WKT, so Skip
return
else:
# Point
form_vars.gis_feature_type = "1"
if form_vars.lat is None or form_vars.lat == "":
form.errors["lat"] = messages.lat_empty
elif form_vars.lon is None or form_vars.lon == "":
form.errors["lon"] = messages.lon_empty
else:
form_vars.wkt = "POINT(%(lon)s %(lat)s)" % form_vars
if "lon_min" not in form_vars or form_vars.lon_min is None:
form_vars.lon_min = form_vars.lon
if "lon_max" not in form_vars or form_vars.lon_max is None:
form_vars.lon_max = form_vars.lon
if "lat_min" not in form_vars or form_vars.lat_min is None:
form_vars.lat_min = form_vars.lat
if "lat_max" not in form_vars or form_vars.lat_max is None:
form_vars.lat_max = form_vars.lat
if current.deployment_settings.get_gis_spatialdb():
# Also populate the spatial field
form_vars.the_geom = form_vars.wkt
return
# -------------------------------------------------------------------------
@staticmethod
def query_features_by_bbox(lon_min, lat_min, lon_max, lat_max):
"""
Returns a query of all Locations inside the given bounding box
"""
table = current.s3db.gis_location
query = (table.lat_min <= lat_max) & \
(table.lat_max >= lat_min) & \
(table.lon_min <= lon_max) & \
(table.lon_max >= lon_min)
return query
# -------------------------------------------------------------------------
@staticmethod
def get_features_by_bbox(lon_min, lat_min, lon_max, lat_max):
"""
Returns Rows of Locations whose shape intersects the given bbox.
"""
query = current.gis.query_features_by_bbox(lon_min,
lat_min,
lon_max,
lat_max)
return current.db(query).select()
# -------------------------------------------------------------------------
@staticmethod
def get_features_by_shape(shape):
"""
Returns Rows of locations which intersect the given shape.
Relies on Shapely for wkt parsing and intersection.
@ToDo: provide an option to use PostGIS/Spatialite
"""
from shapely.geos import ReadingError
from shapely.wkt import loads as wkt_loads
try:
# Enable C-based speedups available from 1.2.10+
from shapely import speedups
speedups.enable()
except:
current.log.info("S3GIS",
"Upgrade Shapely for Performance enhancements")
table = current.s3db.gis_location
in_bbox = current.gis.query_features_by_bbox(*shape.bounds)
has_wkt = (table.wkt != None) & (table.wkt != "")
for loc in current.db(in_bbox & has_wkt).select():
try:
location_shape = wkt_loads(loc.wkt)
if location_shape.intersects(shape):
yield loc
except ReadingError:
current.log.error("Error reading wkt of location with id", loc.id)
# -------------------------------------------------------------------------
@staticmethod
def get_features_by_latlon(lat, lon):
"""
Returns a generator of locations whose shape intersects the given LatLon.
Relies on Shapely.
@todo: provide an option to use PostGIS/Spatialite
"""
from shapely.geometry import point
return current.gis.get_features_by_shape(point.Point(lon, lat))
# -------------------------------------------------------------------------
@staticmethod
def get_features_by_feature(feature):
"""
Returns all Locations whose geometry intersects the given feature.
Relies on Shapely.
@ToDo: provide an option to use PostGIS/Spatialite
"""
from shapely.wkt import loads as wkt_loads
shape = wkt_loads(feature.wkt)
return current.gis.get_features_by_shape(shape)
# -------------------------------------------------------------------------
@staticmethod
def set_all_bounds():
"""
Sets bounds for all locations without them.
If shapely is present, and a location has wkt, bounds of the geometry
are used. Otherwise, the (lat, lon) are used as bounds.
"""
try:
from shapely.wkt import loads as wkt_loads
SHAPELY = True
except:
SHAPELY = False
db = current.db
table = current.s3db.gis_location
# Query to find all locations without bounds set
no_bounds = (table.lon_min == None) & \
(table.lat_min == None) & \
(table.lon_max == None) & \
(table.lat_max == None) & \
(table.lat != None) & \
(table.lon != None)
if SHAPELY:
# Refine to those locations with a WKT field
wkt_no_bounds = no_bounds & (table.wkt != None) & (table.wkt != "")
for location in db(wkt_no_bounds).select(table.wkt):
try :
shape = wkt_loads(location.wkt)
except:
current.log.error("Error reading WKT", location.wkt)
continue
bounds = shape.bounds
table[location.id] = dict(lon_min = bounds[0],
lat_min = bounds[1],
lon_max = bounds[2],
lat_max = bounds[3],
)
# Anything left, we assume is a Point, so set the bounds to be the same
db(no_bounds).update(lon_min=table.lon,
lat_min=table.lat,
lon_max=table.lon,
lat_max=table.lat)
# -------------------------------------------------------------------------
@staticmethod
def simplify(wkt,
tolerance=None,
preserve_topology=True,
output="wkt",
decimals=4
):
"""
Simplify a complex Polygon using the Douglas-Peucker algorithm
- NB This uses Python, better performance will be gained by doing
this direct from the database if you are using PostGIS:
ST_Simplify() is available as
db(query).select(table.the_geom.st_simplify(tolerance).st_astext().with_alias('wkt')).first().wkt
db(query).select(table.the_geom.st_simplify(tolerance).st_asgeojson().with_alias('geojson')).first().geojson
@param wkt: the WKT string to be simplified (usually coming from a gis_location record)
@param tolerance: how aggressive a simplification to perform
@param preserve_topology: whether the simplified geometry should be maintained
@param output: whether to output as WKT or GeoJSON format
@param decimals: the number of decimal places to include in the output
"""
from shapely.geometry import Point, LineString, Polygon, MultiPolygon
from shapely.wkt import loads as wkt_loads
try:
# Enable C-based speedups available from 1.2.10+
from shapely import speedups
speedups.enable()
except:
current.log.info("S3GIS",
"Upgrade Shapely for Performance enhancements")
try:
shape = wkt_loads(wkt)
except:
wkt = wkt[10] if wkt else wkt
current.log.error("Invalid Shape: %s" % wkt)
return None
if not tolerance:
tolerance = current.deployment_settings.get_gis_simplify_tolerance()
if tolerance:
shape = shape.simplify(tolerance, preserve_topology)
# Limit the number of decimal places
formatter = ".%sf" % decimals
def shrink_polygon(shape):
""" Helper Function """
points = shape.exterior.coords
coords = []
cappend = coords.append
for point in points:
x = float(format(point[0], formatter))
y = float(format(point[1], formatter))
cappend((x, y))
return Polygon(LineString(coords))
geom_type = shape.geom_type
if geom_type == "MultiPolygon":
polygons = shape.geoms
p = []
pappend = p.append
for polygon in polygons:
pappend(shrink_polygon(polygon))
shape = MultiPolygon([s for s in p])
elif geom_type == "Polygon":
shape = shrink_polygon(shape)
elif geom_type == "LineString":
points = shape.coords
coords = []
cappend = coords.append
for point in points:
x = float(format(point[0], formatter))
y = float(format(point[1], formatter))
cappend((x, y))
shape = LineString(coords)
elif geom_type == "Point":
x = float(format(shape.x, formatter))
y = float(format(shape.y, formatter))
shape = Point(x, y)
else:
current.log.info("Cannot yet shrink Geometry: %s" % geom_type)
# Output
if output == "wkt":
output = shape.to_wkt()
elif output == "geojson":
from ..geojson import dumps
# Compact Encoding
output = dumps(shape, separators=SEPARATORS)
return output
# -------------------------------------------------------------------------
def show_map(self,
id = "default_map",
height = None,
width = None,
bbox = {},
lat = None,
lon = None,
zoom = None,
projection = None,
add_feature = False,
add_feature_active = False,
add_line = False,
add_line_active = False,
add_polygon = False,
add_polygon_active = False,
features = None,
feature_queries = None,
feature_resources = None,
wms_browser = {},
catalogue_layers = False,
legend = False,
toolbar = False,
area = False,
nav = None,
save = False,
search = False,
mouse_position = None,
overview = None,
permalink = None,
scaleline = None,
zoomcontrol = None,
zoomWheelEnabled = True,
print_tool = {},
mgrs = {},
window = False,
window_hide = False,
closable = True,
maximizable = True,
collapsed = False,
callback = "DEFAULT",
plugins = None,
):
"""
Returns the HTML to display a map
Normally called in the controller as: map = gis.show_map()
In the view, put: {{=XML(map)}}
@param id: ID to uniquely identify this map if there are several on a page
@param height: Height of viewport (if not provided then the default deployment setting is used)
@param width: Width of viewport (if not provided then the default deployment setting is used)
@param bbox: default Bounding Box of viewport (if not provided then the Lat/Lon/Zoom are used) (Dict):
{"lon_min" : float,
"lat_min" : float,
"lon_max" : float,
"lat_max" : float,
}
@param lat: default Latitude of viewport (if not provided then the default setting from the Map Service Catalogue is used)
@param lon: default Longitude of viewport (if not provided then the default setting from the Map Service Catalogue is used)
@param zoom: default Zoom level of viewport (if not provided then the default setting from the Map Service Catalogue is used)
@param projection: EPSG code for the Projection to use (if not provided then the default setting from the Map Service Catalogue is used)
@param add_feature: Whether to include a DrawFeature control to allow adding a marker to the map
@param add_feature_active: Whether the DrawFeature control should be active by default
@param add_polygon: Whether to include a DrawFeature control to allow drawing a polygon over the map
@param add_polygon_active: Whether the DrawFeature control should be active by default
@param features: Simple Features to overlay on Map (no control over appearance & not interactive)
[wkt]
@param feature_queries: Feature Queries to overlay onto the map & their options (List of Dicts):
[{"name" : T("MyLabel"), # A string: the label for the layer
"query" : query, # A gluon.sql.Rows of gis_locations, which can be from a simple query or a Join.
# Extra fields can be added for 'popup_url', 'popup_label' & either
# 'marker' (url/height/width) or 'shape' (with optional 'colour' & 'size')
"active" : True, # Is the feed displayed upon load or needs ticking to load afterwards?
"marker" : None, # Optional: A per-Layer marker query or marker_id for the icon used to display the feature
"opacity" : 1, # Optional
"cluster_attribute", # Optional
"cluster_distance", # Optional
"cluster_threshold" # Optional
}]
@param feature_resources: REST URLs for (filtered) resources to overlay onto the map & their options (List of Dicts):
[{"name" : T("MyLabel"), # A string: the label for the layer
"id" : "search", # A string: the id for the layer (for manipulation by JavaScript)
"active" : True, # Is the feed displayed upon load or needs ticking to load afterwards?
EITHER:
"layer_id" : 1, # An integer: the layer_id to load (optional alternative to specifying URL/tablename/marker)
"filter" : "filter", # A string: an optional URL filter which *replaces* any in the layer
OR:
"tablename" : "module_resource", # A string: the tablename (used to determine whether to locate via location_id or site_id)
"url" : "/eden/module/resource.geojson?filter", # A URL to load the resource
"marker" : None, # Optional: A per-Layer marker dict for the icon used to display the feature (overrides layer_id if-set)
"opacity" : 1, # Optional (overrides layer_id if-set)
"cluster_attribute", # Optional (overrides layer_id if-set)
"cluster_distance", # Optional (overrides layer_id if-set)
"cluster_threshold", # Optional (overrides layer_id if-set)
"dir", # Optional (overrides layer_id if-set)
"style", # Optional (overrides layer_id if-set)
}]
@param wms_browser: WMS Server's GetCapabilities & options (dict)
{"name": T("MyLabel"), # Name for the Folder in LayerTree
"url": string # URL of GetCapabilities
}
@param catalogue_layers: Show all the enabled Layers from the GIS Catalogue
Defaults to False: Just show the default Base layer
@param legend: True: Show the GeoExt Legend panel, False: No Panel, "floating": New floating Legend Panel
@param toolbar: Show the Icon Toolbar of Controls
@param area: Show the Area tool on the Toolbar
@param nav: Show the Navigation controls on the Toolbar
@param save: Show the Save tool on the Toolbar
@param search: Show the Geonames search box
@param mouse_position: Show the current coordinates in the bottom-right of the map. 3 Options: 'normal', 'mgrs', False (defaults to checking deployment_settings, which defaults to 'normal')
@param overview: Show the Overview Map (defaults to checking deployment_settings, which defaults to True)
@param permalink: Show the Permalink control (defaults to checking deployment_settings, which defaults to True)
@param scaleline: Show the ScaleLine control (defaults to checking deployment_settings, which defaults to True)
@param zoomcontrol: Show the Zoom control (defaults to checking deployment_settings, which defaults to True)
@param print_tool: Show a print utility (NB This requires server-side support: http://eden.sahanafoundation.org/wiki/BluePrintGISPrinting)
{"url": string, # URL of print service (e.g. http://localhost:8080/geoserver/pdf/)
"mapTitle": string, # Title for the Printed Map (optional)
"subTitle": string # subTitle for the Printed Map (optional)
}
@param mgrs: Use the MGRS Control to select PDFs
{"name": string, # Name for the Control
"url": string # URL of PDF server
}
@ToDo: Also add MGRS Search support: http://gxp.opengeo.org/master/examples/mgrs.html
@param window: Have viewport pop out of page into a resizable window
@param window_hide: Have the window hidden by default, ready to appear (e.g. on clicking a button)
@param closable: In Window mode, whether the window is closable or not
@param collapsed: Start the Tools panel (West region) collapsed
@param callback: Code to run once the Map JavaScript has loaded
@param plugins: an iterable of objects which support the following methods:
.extend_gis_map(map)
Client-side portion suppoprts the following methods:
.addToMapWindow(items)
.setup(map)
"""
return MAP(id = id,
height = height,
width = width,
bbox = bbox,
lat = lat,
lon = lon,
zoom = zoom,
projection = projection,
add_feature = add_feature,
add_feature_active = add_feature_active,
add_line = add_line,
add_line_active = add_line_active,
add_polygon = add_polygon,
add_polygon_active = add_polygon_active,
features = features,
feature_queries = feature_queries,
feature_resources = feature_resources,
wms_browser = wms_browser,
catalogue_layers = catalogue_layers,
legend = legend,
toolbar = toolbar,
area = area,
nav = nav,
save = save,
search = search,
mouse_position = mouse_position,
overview = overview,
permalink = permalink,
scaleline = scaleline,
zoomcontrol = zoomcontrol,
zoomWheelEnabled = zoomWheelEnabled,
print_tool = print_tool,
mgrs = mgrs,
window = window,
window_hide = window_hide,
closable = closable,
maximizable = maximizable,
collapsed = collapsed,
callback = callback,
plugins = plugins,
)
# =============================================================================
class MAP(DIV):
"""
HTML Helper to render a Map
- allows the Map to be generated only when being rendered
- used by gis.show_map()
"""
def __init__(self, **opts):
"""
:param **opts: options to pass to the Map for server-side processing
"""
# We haven't yet run _setup()
self.setup = False
self.callback = None
# Options for server-side processing
self.opts = opts
self.id = opts.get("id", "default_map")
# Options for client-side processing
self.options = {}
# Components
components = []
# Map (Embedded not Window)
# Needs to be an ID which means we can't have multiple per page :/
# - Alternatives are also fragile. See s3.gis.js
components.append(DIV(DIV(_class="map_loader"),
_id="map_panel"))
self.components = components
for c in components:
self._setnode(c)
# Other DIV settings
self.attributes = {"_class": "map_wrapper",
"_id": self.id,
}
self.parent = None
# -------------------------------------------------------------------------
def _setup(self):
"""
Setup the Map
- not done during init() to be as Lazy as possible
- separated from xml() in order to be able to read options to put
into scripts (callback or otherwise)
"""
# Default configuration
config = GIS.get_config()
if not config:
# No prepop - Bail
current.session.error = current.T("Map cannot display without prepop data!")
redirect(URL(c="default", f="index"))
opts = self.opts
request = current.request
response = current.response
if not response.warning:
response.warning = ""
s3 = response.s3
T = current.T
s3db = current.s3db
auth = current.auth
settings = current.deployment_settings
MAP_ADMIN = auth.s3_has_role(current.session.s3.system_roles.MAP_ADMIN)
# Support bookmarks (such as from the control)
# - these over-ride the arguments
get_vars = request.get_vars
# JS Globals
globals = {}
# Map Options for client-side processing
options = {}
# Strings used by all Maps
i18n = {"gis_base_layers": T("Base Layers"),
"gis_overlays": T(settings.get_gis_label_overlays()),
"gis_layers": T(settings.get_gis_layers_label()),
"gis_draft_layer": T("Draft Features"),
"gis_cluster_multiple": T("There are multiple records at this location"),
"gis_loading": T("Loading"),
"gis_requires_login": T("Requires Login"),
"gis_too_many_features": T("There are too many features, please Zoom In or Filter"),
"gis_zoomin": T("Zoom In"),
}
############
# Viewport
############
height = opts.get("height", None)
if height:
map_height = height
else:
map_height = settings.get_gis_map_height()
options["map_height"] = map_height
width = opts.get("width", None)
if width:
map_width = width
else:
map_width = settings.get_gis_map_width()
options["map_width"] = map_width
# Bounding Box or Center/Zoom
bbox = opts.get("bbox", None)
if (bbox
and (-90 <= bbox["lat_max"] <= 90)
and (-90 <= bbox["lat_min"] <= 90)
and (-180 <= bbox["lon_max"] <= 180)
and (-180 <= bbox["lon_min"] <= 180)
):
# We have sane Bounds provided, so we should use them
pass
else:
# No bounds or we've been passed bounds which aren't sane
bbox = None
# Use Lat/Lon/Zoom to center instead
if "lat" in get_vars and get_vars.lat:
lat = float(get_vars.lat)
else:
lat = opts.get("lat", None)
if lat is None or lat == "":
lat = config.lat
if "lon" in get_vars and get_vars.lon:
lon = float(get_vars.lon)
else:
lon = opts.get("lon", None)
if lon is None or lon == "":
lon = config.lon
if bbox:
# Calculate from Bounds
options["bbox"] = [bbox["lon_min"], # left
bbox["lat_min"], # bottom
bbox["lon_max"], # right
bbox["lat_max"], # top
]
else:
options["lat"] = lat
options["lon"] = lon
if "zoom" in get_vars:
zoom = int(get_vars.zoom)
else:
zoom = opts.get("zoom", None)
if not zoom:
zoom = config.zoom
options["zoom"] = zoom or 1
options["numZoomLevels"] = config.zoom_levels
############
# Projection
############
projection = opts.get("projection", None)
if not projection:
projection = config.epsg
options["projection"] = projection
if projection not in (900913, 4326):
# Test for Valid Projection file in Proj4JS library
projpath = os.path.join(
request.folder, "static", "scripts", "gis", "proj4js", \
"lib", "defs", "EPSG%s.js" % projection
)
try:
f = open(projpath, "r")
f.close()
except:
if projection:
proj4js = config.proj4js
if proj4js:
# Create it
try:
f = open(projpath, "w")
except IOError, e:
response.error = \
T("Map not available: Cannot write projection file - %s") % e
else:
f.write('''Proj4js.defs["EPSG:4326"]="%s"''' % proj4js)
f.close()
else:
response.warning = \
T("Map not available: Projection %(projection)s not supported - please add definition to %(path)s") % \
dict(projection = "'%s'" % projection,
path= "/static/scripts/gis/proj4js/lib/defs")
else:
response.error = \
T("Map not available: No Projection configured")
return None
options["maxExtent"] = config.maxExtent
options["units"] = config.units
########
# Marker
########
if config.marker_image:
options["marker_default"] = dict(i = config.marker_image,
h = config.marker_height,
w = config.marker_width,
)
# @ToDo: show_map() opts with fallback to settings
# Keep these in sync with scaleImage() in s3.gis.js
marker_max_height = settings.get_gis_marker_max_height()
if marker_max_height != 35:
options["max_h"] = marker_max_height
marker_max_width = settings.get_gis_marker_max_width()
if marker_max_width != 30:
options["max_w"] = marker_max_width
#########
# Colours
#########
# Keep these in sync with s3.gis.js
cluster_fill = settings.get_gis_cluster_fill()
if cluster_fill and cluster_fill != '8087ff':
options["cluster_fill"] = cluster_fill
cluster_stroke = settings.get_gis_cluster_stroke()
if cluster_stroke and cluster_stroke != '2b2f76':
options["cluster_stroke"] = cluster_stroke
select_fill = settings.get_gis_select_fill()
if select_fill and select_fill != 'ffdc33':
options["select_fill"] = select_fill
select_stroke = settings.get_gis_select_stroke()
if select_stroke and select_stroke != 'ff9933':
options["select_stroke"] = select_stroke
if not settings.get_gis_cluster_label():
options["cluster_label"] = False
########
# Layout
########
if not opts.get("closable", False):
options["windowNotClosable"] = True
if opts.get("window", False):
options["window"] = True
if opts.get("window_hide", False):
options["windowHide"] = True
if opts.get("maximizable", False):
options["maximizable"] = True
else:
options["maximizable"] = False
# Collapsed
if opts.get("collapsed", False):
options["west_collapsed"] = True
# LayerTree
if not settings.get_gis_layer_tree_base():
options["hide_base"] = True
if not settings.get_gis_layer_tree_overlays():
options["hide_overlays"] = True
if not settings.get_gis_layer_tree_expanded():
options["folders_closed"] = True
if settings.get_gis_layer_tree_radio():
options["folders_radio"] = True
#######
# Tools
#######
# Toolbar
if opts.get("toolbar", False):
options["toolbar"] = True
i18n["gis_length_message"] = T("The length is")
i18n["gis_length_tooltip"] = T("Measure Length: Click the points along the path & end with a double-click")
i18n["gis_zoomfull"] = T("Zoom to maximum map extent")
i18n["gis_zoominbutton"] = T("Zoom In: click in the map or use the left mouse button and drag to create a rectangle")
i18n["gis_zoomout"] = T("Zoom Out: click in the map or use the left mouse button and drag to create a rectangle")
i18n["gis_geoLocate"] = T("Zoom to Current Location")
# Search
if opts.get("search", False):
# Presence of label adds support JS in Loader and turns feature on in s3.gis.js
# @ToDo: Provide explicit option to support multiple maps in a page with different options
i18n["gis_search"] = T("Search location in Geonames")
#i18n["gis_search_no_internet"] = T("Geonames.org search requires Internet connectivity!")
# Show NAV controls?
# e.g. removed within S3LocationSelectorWidget[2]
nav = opts.get("nav", None)
if nav is None:
nav = settings.get_gis_nav_controls()
if nav:
i18n["gis_pan"] = T("Pan Map: keep the left mouse button pressed and drag the map")
i18n["gis_navPrevious"] = T("Previous View")
i18n["gis_navNext"] = T("Next View")
else:
options["nav"] = False
# Show Area control?
if opts.get("area", False):
options["area"] = True
i18n["gis_area_message"] = T("The area is")
i18n["gis_area_tooltip"] = T("Measure Area: Click the points around the polygon & end with a double-click")
# Show Save control?
# e.g. removed within S3LocationSelectorWidget[2]
if opts.get("save", True) and auth.is_logged_in():
options["save"] = True
i18n["gis_save"] = T("Save: Default Lat, Lon & Zoom for the Viewport")
if MAP_ADMIN or (config.pe_id == auth.user.pe_id):
# Personal config or MapAdmin, so Save Button does Updates
options["config_id"] = config.id
# OSM Authoring
pe_id = auth.user.pe_id if auth.s3_logged_in() else None
if pe_id and s3db.auth_user_options_get_osm(pe_id):
# Presence of label turns feature on in s3.gis.js
# @ToDo: Provide explicit option to support multiple maps in a page with different options
i18n["gis_potlatch"] = T("Edit the OpenStreetMap data for this area")
i18n["gis_osm_zoom_closer"] = T("Zoom in closer to Edit OpenStreetMap layer")
# MGRS PDF Browser
mgrs = opts.get("mgrs", None)
if mgrs:
options["mgrs_name"] = mgrs["name"]
options["mgrs_url"] = mgrs["url"]
else:
# No Toolbar
# Show Save control?
# e.g. removed within S3LocationSelectorWidget[2]
if opts.get("save", True) and auth.is_logged_in():
db = current.db
permit = auth.s3_has_permission
ctable = db.gis_config
if permit("create", ctable):
options["save"] = True
i18n["gis_save_map"] = T("Save Map")
i18n["gis_new_map"] = T("Save as New Map?")
i18n["gis_name_map"] = T("Name of Map")
i18n["save"] = T("Save")
i18n["saved"] = T("Saved")
config_id = config.id
_config = db(ctable.id == config_id).select(ctable.uuid,
ctable.name,
limitby=(0, 1),
).first()
if MAP_ADMIN:
i18n["gis_my_maps"] = T("Saved Maps")
else:
options["pe_id"] = auth.user.pe_id
i18n["gis_my_maps"] = T("My Maps")
if permit("update", ctable, record_id=config_id):
options["config_id"] = config_id
options["config_name"] = _config.name
elif _config.uuid != "SITE_DEFAULT":
options["config_name"] = _config.name
# Legend panel
legend = opts.get("legend", False)
if legend:
i18n["gis_legend"] = T("Legend")
if legend == "float":
options["legend"] = "float"
if settings.get_gis_layer_metadata():
options["metadata"] = True
# MAP_ADMIN better for simpler deployments
#if auth.s3_has_permission("create", "cms_post_layer"):
if MAP_ADMIN:
i18n["gis_metadata_create"] = T("Create 'More Info'")
i18n["gis_metadata_edit"] = T("Edit 'More Info'")
else:
i18n["gis_metadata"] = T("More Info")
else:
options["legend"] = True
# Draw Feature Controls
if opts.get("add_feature", False):
i18n["gis_draw_feature"] = T("Add Point")
if opts.get("add_feature_active", False):
options["draw_feature"] = "active"
else:
options["draw_feature"] = "inactive"
if opts.get("add_line", False):
i18n["gis_draw_line"] = T("Add Line")
if opts.get("add_line_active", False):
options["draw_line"] = "active"
else:
options["draw_line"] = "inactive"
if opts.get("add_polygon", False):
i18n["gis_draw_polygon"] = T("Add Polygon")
if opts.get("add_polygon_active", False):
options["draw_polygon"] = "active"
else:
options["draw_polygon"] = "inactive"
# Layer Properties
if settings.get_gis_layer_properties():
# Presence of label turns feature on in s3.gis.js
i18n["gis_properties"] = T("Layer Properties")
# Upload Layer
if settings.get_gis_geoserver_password():
# Presence of label adds support JS in Loader and turns feature on in s3.gis.js
# @ToDo: Provide explicit option to support multiple maps in a page with different options
i18n["gis_uploadlayer"] = T("Upload Shapefile")
# WMS Browser
wms_browser = opts.get("wms_browser", None)
if wms_browser:
options["wms_browser_name"] = wms_browser["name"]
# urlencode the URL
options["wms_browser_url"] = urllib.quote(wms_browser["url"])
# Mouse Position
# 'normal', 'mgrs' or 'off'
mouse_position = opts.get("mouse_position", None)
if mouse_position is None:
mouse_position = settings.get_gis_mouse_position()
if mouse_position == "mgrs":
options["mouse_position"] = "mgrs"
# Tell loader to load support scripts
globals["mgrs"] = True
elif mouse_position:
options["mouse_position"] = True
# Overview Map
overview = opts.get("overview", None)
if overview is None:
overview = settings.get_gis_overview()
if not overview:
options["overview"] = False
# Permalink
permalink = opts.get("permalink", None)
if permalink is None:
permalink = settings.get_gis_permalink()
if not permalink:
options["permalink"] = False
# ScaleLine
scaleline = opts.get("scaleline", None)
if scaleline is None:
scaleline = settings.get_gis_scaleline()
if not scaleline:
options["scaleline"] = False
# Zoom control
zoomcontrol = opts.get("zoomcontrol", None)
if zoomcontrol is None:
zoomcontrol = settings.get_gis_zoomcontrol()
if not zoomcontrol:
options["zoomcontrol"] = False
zoomWheelEnabled = opts.get("zoomWheelEnabled", True)
if not zoomWheelEnabled:
options["no_zoom_wheel"] = True
########
# Layers
########
# Duplicate Features to go across the dateline?
# @ToDo: Action this again (e.g. for DRRPP)
if settings.get_gis_duplicate_features():
options["duplicate_features"] = True
# Features
features = opts.get("features", None)
if features:
options["features"] = addFeatures(features)
# Feature Queries
feature_queries = opts.get("feature_queries", None)
if feature_queries:
options["feature_queries"] = addFeatureQueries(feature_queries)
# Feature Resources
feature_resources = opts.get("feature_resources", None)
if feature_resources:
options["feature_resources"] = addFeatureResources(feature_resources)
if opts.get("catalogue_layers", False):
# Add all Layers from the Catalogue
layer_types = [LayerArcREST,
LayerBing,
LayerEmpty,
LayerGoogle,
LayerOSM,
LayerTMS,
LayerWMS,
LayerXYZ,
LayerJS,
LayerTheme,
LayerGeoJSON,
LayerGPX,
LayerCoordinate,
LayerGeoRSS,
LayerKML,
LayerOpenWeatherMap,
LayerShapefile,
LayerWFS,
LayerFeature,
]
else:
# Add just the default Base Layer
s3.gis.base = True
layer_types = []
db = current.db
ltable = s3db.gis_layer_config
etable = db.gis_layer_entity
query = (etable.id == ltable.layer_id) & \
(ltable.config_id == config["id"]) & \
(ltable.base == True) & \
(ltable.enabled == True)
layer = db(query).select(etable.instance_type,
limitby=(0, 1)).first()
if not layer:
# Use Site Default
ctable = db.gis_config
query = (etable.id == ltable.layer_id) & \
(ltable.config_id == ctable.id) & \
(ctable.uuid == "SITE_DEFAULT") & \
(ltable.base == True) & \
(ltable.enabled == True)
layer = db(query).select(etable.instance_type,
limitby=(0, 1)).first()
if layer:
layer_type = layer.instance_type
if layer_type == "gis_layer_openstreetmap":
layer_types = [LayerOSM]
elif layer_type == "gis_layer_google":
# NB v3 doesn't work when initially hidden
layer_types = [LayerGoogle]
elif layer_type == "gis_layer_arcrest":
layer_types = [LayerArcREST]
elif layer_type == "gis_layer_bing":
layer_types = [LayerBing]
elif layer_type == "gis_layer_tms":
layer_types = [LayerTMS]
elif layer_type == "gis_layer_wms":
layer_types = [LayerWMS]
elif layer_type == "gis_layer_xyz":
layer_types = [LayerXYZ]
elif layer_type == "gis_layer_empty":
layer_types = [LayerEmpty]
if not layer_types:
layer_types = [LayerEmpty]
scripts = []
scripts_append = scripts.append
for LayerType in layer_types:
try:
# Instantiate the Class
layer = LayerType()
layer.as_dict(options)
for script in layer.scripts:
scripts_append(script)
except Exception, exception:
error = "%s not shown: %s" % (LayerType.__name__, exception)
if s3.debug:
raise HTTP(500, error)
else:
response.warning += error
# WMS getFeatureInfo
# (loads conditionally based on whether queryable WMS Layers have been added)
if s3.gis.get_feature_info:
# Presence of label turns feature on
# @ToDo: Provide explicit option to support multiple maps in a page with different options
i18n["gis_get_feature_info"] = T("Get Feature Info")
i18n["gis_feature_info"] = T("Feature Info")
# Callback can be set before _setup()
if not self.callback:
self.callback = opts.get("callback", "DEFAULT")
# These can be read/modified after _setup() & before xml()
self.options = options
self.globals = globals
self.i18n = i18n
self.scripts = scripts
# Set up map plugins
# - currently just used by Climate
# @ToDo: Get these working with new loader
# This, and any code it generates, is done last
# However, map plugin should not assume this.
self.plugin_callbacks = []
plugins = opts.get("plugins", None)
if plugins:
for plugin in plugins:
plugin.extend_gis_map(self)
# Flag to xml() that we've already been run
self.setup = True
return options
# -------------------------------------------------------------------------
def xml(self):
"""
Render the Map
- this is primarily done by inserting a lot of JavaScript
- CSS loaded as-standard to avoid delays in page loading
- HTML added in init() as a component
"""
if not self.setup:
self._setup()
# Add ExtJS
# @ToDo: Do this conditionally on whether Ext UI is used
s3_include_ext()
s3 = current.response.s3
js_global_append = s3.js_global.append
i18n_dict = self.i18n
i18n = []
i18n_append = i18n.append
for key, val in i18n_dict.items():
# @ToDo: Check if already inserted (optimise multiple maps)
i18n_append('''i18n.%s="%s"''' % (key, val))
i18n = '''\n'''.join(i18n)
js_global_append(i18n)
globals_dict = self.globals
globals = []
globals_append = globals.append
dumps = json.dumps
for key, val in globals_dict.items():
# @ToDo: Check if already inserted (optimise multiple maps)
globals_append('''S3.gis.%s=%s''' % (key, dumps(val, separators=SEPARATORS)))
globals = '''\n'''.join(globals)
js_global_append(globals)
scripts = s3.scripts
script = URL(c="static", f="scripts/S3/s3.gis.loader.js")
if script not in scripts:
scripts.append(script)
callback = self.callback
map_id = self.id
options = self.options
projection = options["projection"]
options = dumps(options, separators=SEPARATORS)
plugin_callbacks = '''\n'''.join(self.plugin_callbacks)
if callback:
if callback == "DEFAULT":
if map_id == "default_map":
callback = '''S3.gis.show_map(null,%s)''' % options
else:
callback = '''S3.gis.show_map(%s,%s)''' % (map_id, options)
else:
# Store options where they can be read by a later show_map()
js_global_append('''S3.gis.options["%s"]=%s''' % (map_id, options))
script = URL(c="static", f="scripts/yepnope.1.5.4-min.js")
if script not in scripts:
scripts.append(script)
if plugin_callbacks:
callback = '''%s\n%s''' % (callback, plugin_callbacks)
callback = '''function(){%s}''' % callback
else:
# Store options where they can be read by a later show_map()
js_global_append('''S3.gis.options["%s"]=%s''' % (map_id, options))
if plugin_callbacks:
callback = '''function(){%s}''' % plugin_callbacks
else:
callback = '''null'''
loader = '''s3_gis_loadjs(%(debug)s,%(projection)s,%(callback)s,%(scripts)s)''' \
% dict(debug = "true" if s3.debug else "false",
projection = projection,
callback = callback,
scripts = self.scripts
)
s3.jquery_ready.append(loader)
# Return the HTML
return super(MAP, self).xml()
# =============================================================================
def addFeatures(features):
"""
Add Simple Features to the Draft layer
- used by S3LocationSelectorWidget
"""
simplify = GIS.simplify
_f = []
append = _f.append
for feature in features:
geojson = simplify(feature, output="geojson")
if geojson:
f = dict(type = "Feature",
geometry = json.loads(geojson))
append(f)
return _f
# =============================================================================
def addFeatureQueries(feature_queries):
"""
Add Feature Queries to the map
- These can be Rows or Storage()
NB These considerations need to be taken care of before arriving here:
Security of data
Localisation of name/popup_label
"""
db = current.db
s3db = current.s3db
cache = s3db.cache
request = current.request
controller = request.controller
function = request.function
fqtable = s3db.gis_feature_query
mtable = s3db.gis_marker
auth = current.auth
auth_user = auth.user
if auth_user:
created_by = auth_user.id
s3_make_session_owner = auth.s3_make_session_owner
else:
# Anonymous
# @ToDo: A deployment with many Anonymous Feature Queries being
# accessed will need to change this design - e.g. use session ID instead
created_by = None
layers_feature_query = []
append = layers_feature_query.append
for layer in feature_queries:
name = str(layer["name"])
_layer = dict(name=name)
name_safe = re.sub("\W", "_", name)
# Lat/Lon via Join or direct?
try:
layer["query"][0].gis_location.lat
join = True
except:
join = False
# Push the Features into a temporary table in order to have them accessible via GeoJSON
# @ToDo: Maintenance Script to clean out old entries (> 24 hours?)
cname = "%s_%s_%s" % (name_safe,
controller,
function)
# Clear old records
query = (fqtable.name == cname) & \
(fqtable.created_by == created_by)
db(query).delete()
for row in layer["query"]:
rowdict = {"name" : cname}
if join:
rowdict["lat"] = row.gis_location.lat
rowdict["lon"] = row.gis_location.lon
else:
rowdict["lat"] = row["lat"]
rowdict["lon"] = row["lon"]
if "popup_url" in row:
rowdict["popup_url"] = row["popup_url"]
if "popup_label" in row:
rowdict["popup_label"] = row["popup_label"]
if "marker" in row:
rowdict["marker_url"] = URL(c="static", f="img",
args=["markers",
row["marker"].image])
rowdict["marker_height"] = row["marker"].height
rowdict["marker_width"] = row["marker"].width
else:
if "marker_url" in row:
rowdict["marker_url"] = row["marker_url"]
if "marker_height" in row:
rowdict["marker_height"] = row["marker_height"]
if "marker_width" in row:
rowdict["marker_width"] = row["marker_width"]
if "shape" in row:
rowdict["shape"] = row["shape"]
if "size" in row:
rowdict["size"] = row["size"]
if "colour" in row:
rowdict["colour"] = row["colour"]
if "opacity" in row:
rowdict["opacity"] = row["opacity"]
record_id = fqtable.insert(**rowdict)
if not created_by:
s3_make_session_owner(fqtable, record_id)
# URL to retrieve the data
url = "%s.geojson?feature_query.name=%s&feature_query.created_by=%s" % \
(URL(c="gis", f="feature_query"),
cname,
created_by)
_layer["url"] = url
if "active" in layer and not layer["active"]:
_layer["visibility"] = False
if "marker" in layer:
# per-Layer Marker
marker = layer["marker"]
if isinstance(marker, int):
# integer (marker_id) not row
marker = db(mtable.id == marker).select(mtable.image,
mtable.height,
mtable.width,
limitby=(0, 1),
cache=cache
).first()
if marker:
# @ToDo: Single option as dict
_layer["marker_url"] = marker["image"]
_layer["marker_height"] = marker["height"]
_layer["marker_width"] = marker["width"]
if "opacity" in layer and layer["opacity"] != 1:
_layer["opacity"] = "%.1f" % layer["opacity"]
if "cluster_attribute" in layer and \
layer["cluster_attribute"] != CLUSTER_ATTRIBUTE:
_layer["cluster_attribute"] = layer["cluster_attribute"]
if "cluster_distance" in layer and \
layer["cluster_distance"] != CLUSTER_DISTANCE:
_layer["cluster_distance"] = layer["cluster_distance"]
if "cluster_threshold" in layer and \
layer["cluster_threshold"] != CLUSTER_THRESHOLD:
_layer["cluster_threshold"] = layer["cluster_threshold"]
append(_layer)
return layers_feature_query
# =============================================================================
def addFeatureResources(feature_resources):
"""
Add Feature Resources to the map
- REST URLs to back-end resources
"""
db = current.db
s3db = current.s3db
config = GIS.get_config()
ftable = s3db.gis_layer_feature
ltable = s3db.gis_layer_config
layers_feature_resource = []
append = layers_feature_resource.append
for layer in feature_resources:
name = str(layer["name"])
_layer = dict(name=name)
id = str(layer["id"])
id = re.sub("\W", "_", id)
_layer["id"] = id
# Are we loading a Catalogue Layer or a simple URL?
layer_id = layer.get("layer_id", None)
if layer_id:
query = (ftable.layer_id == layer_id)
lquery = (ltable.layer_id == layer_id) & \
(ltable.config_id == config.id)
left = ltable.on(lquery)
row = db(query).select(ftable.id,
ftable.controller,
ftable.function,
ftable.filter,
ftable.trackable,
ftable.use_site,
ftable.opacity,
ftable.cluster_attribute,
ftable.cluster_distance,
ftable.cluster_threshold,
ftable.dir,
ltable.style,
left=left,
limitby=(0, 1)).first()
style = layer.get("style", row["gis_layer_config.style"])
row = row["gis_layer_feature"]
if row.use_site:
maxdepth = 1
show_ids = "&show_ids=true"
else:
maxdepth = 0
show_ids = ""
url = "%s.geojson?layer=%i&components=None&maxdepth=%s%s" % \
(URL(row.controller, row.function), row.id, maxdepth, show_ids)
# Use specified filter or fallback to the one in the layer
filter = layer.get("filter", row.filter)
if filter:
url = "%s&%s" % (url, filter)
if row.trackable:
url = "%s&track=1" % url
opacity = layer.get("opacity", row.opacity)
cluster_attribute = layer.get("cluster_attribute",
row.cluster_attribute) or CLUSTER_ATTRIBUTE
cluster_distance = layer.get("cluster_distance",
row.cluster_distance)
cluster_threshold = layer.get("cluster_threshold",
row.cluster_threshold)
dir = layer.get("dir", row.dir)
if style:
try:
# JSON Object?
style = json.loads(style)
except:
style = None
if not style:
marker = layer.get("marker",
Marker(layer_id=layer_id).as_dict())
else:
# URL to retrieve the data
url = layer["url"]
tablename = layer["tablename"]
table = s3db[tablename]
# Optimise the query & tell back-end not to add the type to the tooltips
if "location_id" in table.fields:
maxdepth = 0
show_ids = ""
elif "site_id" in table.fields:
maxdepth = 1
show_ids = "&show_ids=true"
elif tablename == "gis_location":
maxdepth = 0
show_ids = ""
else:
# Not much we can do!
continue
options = "components=None&maxdepth=%s%s&label_off=1" % \
(maxdepth, show_ids)
if "?" in url:
url = "%s&%s" % (url, options)
else:
url = "%s?%s" % (url, options)
opacity = layer.get("opacity", 1)
cluster_attribute = layer.get("cluster_attribute",
CLUSTER_ATTRIBUTE)
cluster_distance = layer.get("cluster_distance",
CLUSTER_DISTANCE)
cluster_threshold = layer.get("cluster_threshold",
CLUSTER_THRESHOLD)
dir = layer.get("dir", None)
style = layer.get("style", None)
if style:
try:
# JSON Object?
style = json.loads(style)
except:
style = None
if not style:
marker = layer.get("marker", None)
if "active" in layer and not layer["active"]:
_layer["visibility"] = False
if opacity != 1:
_layer["opacity"] = "%.1f" % opacity
if cluster_attribute != CLUSTER_ATTRIBUTE:
_layer["cluster_attribute"] = cluster_attribute
if cluster_distance != CLUSTER_DISTANCE:
_layer["cluster_distance"] = cluster_distance
if cluster_threshold != CLUSTER_THRESHOLD:
_layer["cluster_threshold"] = cluster_threshold
if dir:
_layer["dir"] = dir
if style:
_layer["style"] = style
elif marker:
# Per-layer Marker
_layer["marker"] = dict(i = marker["image"],
h = marker["height"],
w = marker["width"],
)
else:
# Request the server to provide per-feature Markers
url = "%s&markers=1" % url
_layer["url"] = url
append(_layer)
return layers_feature_resource
# =============================================================================
class Marker(object):
"""
Represents a Map Marker
@ToDo: Support Markers in Themes
"""
def __init__(self, id=None, tablename=None, layer_id=None):
db = current.db
s3db = current.s3db
mtable = s3db.gis_marker
marker = None
config = None
polygons = False
if id:
# Lookup the Marker details from it's ID
marker = db(mtable.id == id).select(mtable.image,
mtable.height,
mtable.width,
limitby=(0, 1),
cache=s3db.cache).first()
elif layer_id:
# Check if we have a Marker for this Layer
config = GIS.get_config()
ltable = s3db.gis_layer_symbology
query = (ltable.layer_id == layer_id) & \
(ltable.symbology_id == config.symbology_id) & \
(ltable.marker_id == mtable.id)
marker = db(query).select(mtable.image,
mtable.height,
mtable.width,
limitby=(0, 1)).first()
if not marker:
# Check to see if we're a Polygon/LineString
# (& hence shouldn't use a default marker)
if tablename == "gis_layer_feature":
table = db.gis_layer_feature
query = (table.layer_id == layer_id)
layer = db(query).select(table.polygons,
limitby=(0, 1)).first()
if layer and layer.polygons:
polygons = True
elif tablename == "gis_layer_shapefile":
table = db.gis_layer_shapefile
query = (table.layer_id == layer_id)
layer = db(query).select(table.gis_feature_type,
limitby=(0, 1)).first()
if layer and layer.gis_feature_type != 1:
polygons = True
if marker:
self.image = marker.image
self.height = marker.height
self.width = marker.width
elif polygons:
self.image = None
else:
# Default Marker
if not config:
config = GIS.get_config()
self.image = config.marker_image
self.height = config.marker_height
self.width = config.marker_width
# -------------------------------------------------------------------------
def add_attributes_to_output(self, output):
"""
Called by Layer.as_dict()
"""
if self.image:
output["marker"] = dict(i = self.image,
h = self.height,
w = self.width,
)
# -------------------------------------------------------------------------
def as_dict(self):
"""
Called by gis.get_marker(), feature_resources & s3profile
"""
output = Storage(image = self.image,
height = self.height,
width = self.width,
)
return output
# =============================================================================
class Projection(object):
"""
Represents a Map Projection
"""
def __init__(self, id=None):
if id:
s3db = current.s3db
table = s3db.gis_projection
query = (table.id == id)
projection = current.db(query).select(table.epsg,
limitby=(0, 1),
cache=s3db.cache).first()
else:
# Default projection
config = GIS.get_config()
projection = Storage(epsg = config.epsg)
self.epsg = projection.epsg
# =============================================================================
class Layer(object):
"""
Abstract base class for Layers from Catalogue
"""
def __init__(self):
sublayers = []
append = sublayers.append
# List of Scripts to load async with the Map JavaScript
self.scripts = []
gis = current.response.s3.gis
s3db = current.s3db
s3_has_role = current.auth.s3_has_role
# Read the Layers enabled in the Active Configs
if gis.config is None:
GIS.set_config()
tablename = self.tablename
table = s3db[tablename]
ctable = s3db.gis_config
ltable = s3db.gis_layer_config
fields = table.fields
metafields = s3_all_meta_field_names()
fields = [table[f] for f in fields if f not in metafields]
fields += [ltable.enabled,
ltable.visible,
ltable.base,
ltable.style,
ctable.pe_type,
]
query = (table.layer_id == ltable.layer_id) & \
(ltable.config_id == ctable.id) & \
(ltable.config_id.belongs(gis.config.ids))
if gis.base == True:
# Only show the default base layer
if self.tablename == "gis_layer_empty":
# Show even if disabled (as fallback)
query = (table.id > 0)
else:
query &= (ltable.base == True)
if current.deployment_settings.get_gis_layer_metadata():
mtable = s3db.cms_post_layer
left = mtable.on(mtable.layer_id == table.layer_id)
fields.append(mtable.post_id)
else:
left = None
rows = current.db(query).select(orderby=ctable.pe_type,
left=left,
*fields)
layer_ids = []
lappend = layer_ids.append
SubLayer = self.SubLayer
# Flag to show whether we've set the default baselayer
# (otherwise a config higher in the hierarchy can overrule one lower down)
base = True
# Layers requested to be visible via URL (e.g. embedded map)
visible = current.request.get_vars.get("layers", None)
if visible:
visible = visible.split(".")
else:
visible = []
for _record in rows:
record = _record[tablename]
# Check if we've already seen this layer
layer_id = record.layer_id
if layer_id in layer_ids:
continue
# Add layer to list of checked
lappend(layer_id)
# Check if layer is enabled
_config = _record["gis_layer_config"]
if not _config.enabled:
continue
# Check user is allowed to access the layer
role_required = record.role_required
if role_required and not s3_has_role(role_required):
continue
# All OK - add SubLayer
record["visible"] = _config.visible or str(layer_id) in visible
if base and _config.base:
# var name can't conflict with OSM/WMS/ArcREST layers
record["_base"] = True
base = False
else:
record["_base"] = False
if "style" not in record:
# Take from the layer_config
record["style"] = _config.style
if left is not None:
record["post_id"] = _record["cms_post_layer.post_id"]
if tablename in ["gis_layer_bing", "gis_layer_google"]:
# SubLayers handled differently
append(record)
else:
append(SubLayer(tablename, record))
# Alphasort layers
# - client will only sort within their type: s3.gis.layers.js
self.sublayers = sorted(sublayers, key=lambda row: row.name)
# -------------------------------------------------------------------------
def as_dict(self, options=None):
"""
Output the Layers as a Python dict
"""
sublayer_dicts = []
append = sublayer_dicts.append
sublayers = self.sublayers
for sublayer in sublayers:
# Read the output dict for this sublayer
sublayer_dict = sublayer.as_dict()
if sublayer_dict:
# Add this layer to the list of layers for this layer type
append(sublayer_dict)
if sublayer_dicts:
if options:
# Used by Map._setup()
options[self.dictname] = sublayer_dicts
else:
# Used by as_json() and hence as_javascript()
return sublayer_dicts
# -------------------------------------------------------------------------
def as_json(self):
"""
Output the Layers as JSON
"""
result = self.as_dict()
if result:
#return json.dumps(result, indent=4, separators=(",", ": "), sort_keys=True)
return json.dumps(result, separators=SEPARATORS)
# -------------------------------------------------------------------------
def as_javascript(self):
"""
Output the Layers as global Javascript
- suitable for inclusion in the HTML page
"""
result = self.as_json()
if result:
return '''S3.gis.%s=%s\n''' % (self.dictname, result)
# -------------------------------------------------------------------------
class SubLayer(object):
def __init__(self, tablename, record):
# Ensure all attributes available (even if Null)
self.__dict__.update(record)
del record
if current.deployment_settings.get_L10n_translate_gis_layer():
self.safe_name = re.sub('[\\"]', "", s3_unicode(current.T(self.name)))
else:
self.safe_name = re.sub('[\\"]', "", self.name)
if tablename not in ("gis_layer_arcrest",
"gis_layer_coordinate",
"gis_layer_empty",
"gis_layer_js",
"gis_layer_mgrs",
"gis_layer_openstreetmap",
"gis_layer_openweathermap",
"gis_layer_theme",
"gis_layer_tms",
"gis_layer_wms",
"gis_layer_xyz",
):
# Layer uses Markers
self.marker = Marker(tablename=tablename, layer_id=self.layer_id)
if hasattr(self, "projection_id"):
self.projection = Projection(self.projection_id)
def setup_clustering(self, output):
if hasattr(self, "cluster_attribute"):
cluster_attribute = self.cluster_attribute
else:
cluster_attribute = None
cluster_distance = self.cluster_distance
cluster_threshold = self.cluster_threshold
if cluster_attribute and \
cluster_attribute != CLUSTER_ATTRIBUTE:
output["cluster_attribute"] = cluster_attribute
if cluster_distance != CLUSTER_DISTANCE:
output["cluster_distance"] = cluster_distance
if cluster_threshold != CLUSTER_THRESHOLD:
output["cluster_threshold"] = cluster_threshold
def setup_folder(self, output):
if self.dir:
output["dir"] = self.dir
def setup_folder_and_visibility(self, output):
if not self.visible:
output["visibility"] = False
if self.dir:
output["dir"] = self.dir
def setup_folder_visibility_and_opacity(self, output):
if not self.visible:
output["visibility"] = False
if self.opacity != 1:
output["opacity"] = "%.1f" % self.opacity
if self.dir:
output["dir"] = self.dir
# ---------------------------------------------------------------------
@staticmethod
def add_attributes_if_not_default(output, **values_and_defaults):
# could also write values in debug mode, to check if defaults ignored.
# could also check values are not being overwritten.
for key, (value, defaults) in values_and_defaults.iteritems():
if value not in defaults:
output[key] = value
# -----------------------------------------------------------------------------
class LayerArcREST(Layer):
"""
ArcGIS REST Layers from Catalogue
"""
tablename = "gis_layer_arcrest"
dictname = "layers_arcrest"
# -------------------------------------------------------------------------
class SubLayer(Layer.SubLayer):
def as_dict(self):
# Mandatory attributes
output = {"id": self.layer_id,
"type": "arcrest",
"name": self.safe_name,
"url": self.url,
}
# Attributes which are defaulted client-side if not set
self.setup_folder_and_visibility(output)
self.add_attributes_if_not_default(
output,
layers = (self.layers, ([0],)),
transparent = (self.transparent, (True,)),
base = (self.base, (False,)),
_base = (self._base, (False,)),
)
return output
# -----------------------------------------------------------------------------
class LayerBing(Layer):
"""
Bing Layers from Catalogue
"""
tablename = "gis_layer_bing"
dictname = "Bing"
# -------------------------------------------------------------------------
def as_dict(self, options=None):
sublayers = self.sublayers
if sublayers:
if Projection().epsg != 900913:
raise Exception("Cannot display Bing layers unless we're using the Spherical Mercator Projection\n")
apikey = current.deployment_settings.get_gis_api_bing()
if not apikey:
raise Exception("Cannot display Bing layers unless we have an API key\n")
# Mandatory attributes
ldict = {"ApiKey": apikey
}
for sublayer in sublayers:
# Attributes which are defaulted client-side if not set
if sublayer._base:
# Set default Base layer
ldict["Base"] = sublayer.type
if sublayer.type == "aerial":
ldict["Aerial"] = {"name": sublayer.name or "Bing Satellite",
"id": sublayer.layer_id}
elif sublayer.type == "road":
ldict["Road"] = {"name": sublayer.name or "Bing Roads",
"id": sublayer.layer_id}
elif sublayer.type == "hybrid":
ldict["Hybrid"] = {"name": sublayer.name or "Bing Hybrid",
"id": sublayer.layer_id}
if options:
# Used by Map._setup()
options[self.dictname] = ldict
else:
# Used by as_json() and hence as_javascript()
return ldict
# -----------------------------------------------------------------------------
class LayerCoordinate(Layer):
"""
Coordinate Layer from Catalogue
- there should only be one of these
"""
tablename = "gis_layer_coordinate"
dictname = "CoordinateGrid"
# -------------------------------------------------------------------------
def as_dict(self, options=None):
sublayers = self.sublayers
if sublayers:
if current.response.s3.debug:
self.scripts.append("gis/cdauth.js")
else:
self.scripts.append("gis/cdauth.min.js")
sublayer = sublayers[0]
name_safe = re.sub("'", "", sublayer.name)
ldict = dict(name = name_safe,
visibility = sublayer.visible,
id = sublayer.layer_id)
if options:
# Used by Map._setup()
options[self.dictname] = ldict
else:
# Used by as_json() and hence as_javascript()
return ldict
# -----------------------------------------------------------------------------
class LayerEmpty(Layer):
"""
Empty Layer from Catalogue
- there should only be one of these
"""
tablename = "gis_layer_empty"
dictname = "EmptyLayer"
# -------------------------------------------------------------------------
def as_dict(self, options=None):
sublayers = self.sublayers
if sublayers:
sublayer = sublayers[0]
name = s3_unicode(current.T(sublayer.name))
name_safe = re.sub("'", "", name)
ldict = dict(name = name_safe,
id = sublayer.layer_id)
if sublayer._base:
ldict["base"] = True
if options:
# Used by Map._setup()
options[self.dictname] = ldict
else:
# Used by as_json() and hence as_javascript()
return ldict
# -----------------------------------------------------------------------------
class LayerFeature(Layer):
"""
Feature Layers from Catalogue
"""
tablename = "gis_layer_feature"
dictname = "layers_feature"
# -------------------------------------------------------------------------
class SubLayer(Layer.SubLayer):
def __init__(self, tablename, record):
controller = record.controller
self.skip = False
if controller is not None:
if controller not in current.deployment_settings.modules:
# Module is disabled
self.skip = True
if not current.auth.permission.has_permission("read",
c=controller,
f=record.function):
# User has no permission to this resource (in ACL)
self.skip = True
else:
raise Exception("Feature Layer Record '%s' has no controller" % record.name)
super(LayerFeature.SubLayer, self).__init__(tablename, record)
def as_dict(self):
if self.skip:
# Skip layer
return
if self.use_site:
maxdepth = 1
show_ids = "&show_ids=true"
else:
maxdepth = 0
show_ids = ""
url = "%s.geojson?layer=%i&components=None&maxdepth=%s%s" % \
(URL(self.controller, self.function), self.id, maxdepth, show_ids)
if self.filter:
url = "%s&%s" % (url, self.filter)
if self.trackable:
url = "%s&track=1" % url
style = self.style
if style:
try:
# JSON Object?
style = json.loads(style)
except:
# Fieldname to pass to URL for server-side lookup
url = "%s&style=%s" % (url, style)
style = None
# Mandatory attributes
output = {"id": self.layer_id,
# Defaults client-side if not-provided
#"type": "feature",
"name": self.safe_name,
"url": url,
}
# Attributes which are defaulted client-side if not set
self.setup_folder_visibility_and_opacity(output)
self.setup_clustering(output)
if not self.popup_fields:
output["no_popups"] = 1
style = self.style
if style:
style = json.loads(style)
output["style"] = style
else:
self.marker.add_attributes_to_output(output)
return output
# -----------------------------------------------------------------------------
class LayerGeoJSON(Layer):
"""
GeoJSON Layers from Catalogue
"""
tablename = "gis_layer_geojson"
dictname = "layers_geojson"
# -------------------------------------------------------------------------
class SubLayer(Layer.SubLayer):
def as_dict(self):
# Mandatory attributes
output = {"id": self.layer_id,
"type": "geojson",
"name": self.safe_name,
"url": self.url,
}
# Attributes which are defaulted client-side if not set
projection = self.projection
if projection.epsg != 4326:
output["projection"] = projection.epsg
self.setup_folder_visibility_and_opacity(output)
self.setup_clustering(output)
style = self.style
if style:
style = json.loads(style)
output["style"] = style
else:
self.marker.add_attributes_to_output(output)
return output
# -----------------------------------------------------------------------------
class LayerGeoRSS(Layer):
"""
GeoRSS Layers from Catalogue
"""
tablename = "gis_layer_georss"
dictname = "layers_georss"
def __init__(self):
super(LayerGeoRSS, self).__init__()
LayerGeoRSS.SubLayer.cachetable = current.s3db.gis_cache
# -------------------------------------------------------------------------
class SubLayer(Layer.SubLayer):
def as_dict(self):
db = current.db
request = current.request
response = current.response
cachetable = self.cachetable
url = self.url
# Check to see if we should Download layer to the cache
download = True
query = (cachetable.source == url)
existing_cached_copy = db(query).select(cachetable.modified_on,
limitby=(0, 1)).first()
refresh = self.refresh or 900 # 15 minutes set if we have no data (legacy DB)
if existing_cached_copy:
modified_on = existing_cached_copy.modified_on
cutoff = modified_on + timedelta(seconds=refresh)
if request.utcnow < cutoff:
download = False
if download:
# Download layer to the Cache
from gluon.tools import fetch
# @ToDo: Call directly without going via HTTP
# @ToDo: Make this async by using S3Task (also use this for the refresh time)
fields = ""
if self.data:
fields = "&data_field=%s" % self.data
if self.image:
fields = "%s&image_field=%s" % (fields, self.image)
_url = "%s%s/update.georss?fetchurl=%s%s" % (current.deployment_settings.get_base_public_url(),
URL(c="gis", f="cache_feed"),
url,
fields)
# Keep Session for local URLs
import Cookie
cookie = Cookie.SimpleCookie()
cookie[response.session_id_name] = response.session_id
current.session._unlock(response)
try:
# @ToDo: Need to commit to not have DB locked with SQLite?
fetch(_url, cookie=cookie)
if existing_cached_copy:
# Clear old selfs which are no longer active
query = (cachetable.source == url) & \
(cachetable.modified_on < cutoff)
db(query).delete()
except Exception, exception:
current.log.error("GeoRSS %s download error" % url, exception)
# Feed down
if existing_cached_copy:
# Use cached copy
# Should we Update timestamp to prevent every
# subsequent request attempting the download?
#query = (cachetable.source == url)
#db(query).update(modified_on=request.utcnow)
pass
else:
response.warning += "%s down & no cached copy available" % url
name_safe = self.safe_name
# Pass the GeoJSON URL to the client
# Filter to the source of this feed
url = "%s.geojson?cache.source=%s" % (URL(c="gis", f="cache_feed"),
url)
# Mandatory attributes
output = {"id": self.layer_id,
"type": "georss",
"name": name_safe,
"url": url,
}
self.marker.add_attributes_to_output(output)
# Attributes which are defaulted client-side if not set
if self.refresh != 900:
output["refresh"] = self.refresh
self.setup_folder_visibility_and_opacity(output)
self.setup_clustering(output)
return output
# -----------------------------------------------------------------------------
class LayerGoogle(Layer):
"""
Google Layers/Tools from Catalogue
"""
tablename = "gis_layer_google"
dictname = "Google"
# -------------------------------------------------------------------------
def as_dict(self, options=None):
sublayers = self.sublayers
if sublayers:
T = current.T
epsg = (Projection().epsg == 900913)
settings = current.deployment_settings
apikey = settings.get_gis_api_google()
s3 = current.response.s3
debug = s3.debug
# Google scripts use document.write so cannot be loaded async via yepnope.js
add_script = s3.scripts.append
ldict = {}
for sublayer in sublayers:
# Attributes which are defaulted client-side if not set
if sublayer.type == "earth":
ldict["Earth"] = str(T("Switch to 3D"))
#{"modules":[{"name":"earth","version":"1"}]}
add_script("http://www.google.com/jsapi?key=" + apikey + "&autoload=%7B%22modules%22%3A%5B%7B%22name%22%3A%22earth%22%2C%22version%22%3A%221%22%7D%5D%7D")
# Dynamic Loading not supported: https://developers.google.com/loader/#Dynamic
#s3.jquery_ready.append('''try{google.load('earth','1')catch(e){}''')
if debug:
self.scripts.append("gis/gxp/widgets/GoogleEarthPanel.js")
else:
self.scripts.append("gis/gxp/widgets/GoogleEarthPanel.min.js")
s3.js_global.append('''S3.public_url="%s"''' % settings.get_base_public_url())
elif epsg:
# Earth is the only layer which can run in non-Spherical Mercator
# @ToDo: Warning?
if sublayer._base:
# Set default Base layer
ldict["Base"] = sublayer.type
if sublayer.type == "satellite":
ldict["Satellite"] = {"name": sublayer.name or "Google Satellite",
"id": sublayer.layer_id}
elif sublayer.type == "maps":
ldict["Maps"] = {"name": sublayer.name or "Google Maps",
"id": sublayer.layer_id}
elif sublayer.type == "hybrid":
ldict["Hybrid"] = {"name": sublayer.name or "Google Hybrid",
"id": sublayer.layer_id}
elif sublayer.type == "streetview":
ldict["StreetviewButton"] = "Click where you want to open Streetview"
elif sublayer.type == "terrain":
ldict["Terrain"] = {"name": sublayer.name or "Google Terrain",
"id": sublayer.layer_id}
elif sublayer.type == "mapmaker":
ldict["MapMaker"] = {"name": sublayer.name or "Google MapMaker",
"id": sublayer.layer_id}
elif sublayer.type == "mapmakerhybrid":
ldict["MapMakerHybrid"] = {"name": sublayer.name or "Google MapMaker Hybrid",
"id": sublayer.layer_id}
if "MapMaker" in ldict or "MapMakerHybrid" in ldict:
# Need to use v2 API
# This should be able to be fixed in OpenLayers now since Google have fixed in v3 API:
# http://code.google.com/p/gmaps-api-issues/issues/detail?id=2349#c47
add_script("http://maps.google.com/maps?file=api&v=2&key=%s" % apikey)
else:
# v3 API (3.10 is frozen, 3.11 release & 3.12 is nightly)
add_script("http://maps.google.com/maps/api/js?v=3.11&sensor=false")
if "StreetviewButton" in ldict:
# Streetview doesn't work with v2 API
ldict["StreetviewButton"] = str(T("Click where you want to open Streetview"))
ldict["StreetviewTitle"] = str(T("Street View"))
if debug:
self.scripts.append("gis/gxp/widgets/GoogleStreetViewPanel.js")
else:
self.scripts.append("gis/gxp/widgets/GoogleStreetViewPanel.min.js")
if options:
# Used by Map._setup()
options[self.dictname] = ldict
else:
# Used by as_json() and hence as_javascript()
return ldict
# -----------------------------------------------------------------------------
class LayerGPX(Layer):
"""
GPX Layers from Catalogue
"""
tablename = "gis_layer_gpx"
dictname = "layers_gpx"
# -------------------------------------------------------------------------
class SubLayer(Layer.SubLayer):
def as_dict(self):
url = URL(c="default", f="download",
args=self.track)
# Mandatory attributes
output = {"id": self.layer_id,
"name": self.safe_name,
"url": url,
}
# Attributes which are defaulted client-side if not set
self.marker.add_attributes_to_output(output)
self.add_attributes_if_not_default(
output,
waypoints = (self.waypoints, (True,)),
tracks = (self.tracks, (True,)),
routes = (self.routes, (True,)),
)
self.setup_folder_visibility_and_opacity(output)
self.setup_clustering(output)
return output
# -----------------------------------------------------------------------------
class LayerJS(Layer):
"""
JS Layers from Catalogue
- these are raw Javascript layers for use by expert OpenLayers people
to quickly add/configure new data sources without needing support
from back-end Sahana programmers
"""
tablename = "gis_layer_js"
dictname = "layers_js"
# -------------------------------------------------------------------------
def as_dict(self, options=None):
sublayers = self.sublayers
if sublayers:
sublayer_dicts = []
append = sublayer_dicts.append
for sublayer in sublayers:
append(sublayer.code)
if options:
# Used by Map._setup()
options[self.dictname] = sublayer_dicts
else:
# Used by as_json() and hence as_javascript()
return sublayer_dicts
# -----------------------------------------------------------------------------
class LayerKML(Layer):
"""
KML Layers from Catalogue
"""
tablename = "gis_layer_kml"
dictname = "layers_kml"
# -------------------------------------------------------------------------
def __init__(self, init=True):
"Set up the KML cache, should be done once per request"
super(LayerKML, self).__init__()
# Can we cache downloaded KML feeds?
# Needed for unzipping & filtering as well
# @ToDo: Should we move this folder to static to speed up access to cached content?
# Do we need to secure it?
request = current.request
cachepath = os.path.join(request.folder,
"uploads",
"gis_cache")
if os.path.exists(cachepath):
cacheable = os.access(cachepath, os.W_OK)
else:
try:
os.mkdir(cachepath)
except OSError, os_error:
current.log.error("GIS: KML layers cannot be cached: %s %s" % \
(cachepath, os_error))
cacheable = False
else:
cacheable = True
# @ToDo: Migrate to gis_cache
LayerKML.cachetable = current.s3db.gis_cache2
LayerKML.cacheable = cacheable
LayerKML.cachepath = cachepath
# -------------------------------------------------------------------------
class SubLayer(Layer.SubLayer):
def as_dict(self):
db = current.db
request = current.request
cachetable = LayerKML.cachetable
cacheable = LayerKML.cacheable
#cachepath = LayerKML.cachepath
name = self.name
if cacheable:
_name = urllib2.quote(name)
_name = _name.replace("%", "_")
filename = "%s.file.%s.kml" % (cachetable._tablename,
_name)
# Should we download a fresh copy of the source file?
download = True
query = (cachetable.name == name)
cached = db(query).select(cachetable.modified_on,
limitby=(0, 1)).first()
refresh = self.refresh or 900 # 15 minutes set if we have no data (legacy DB)
if cached:
modified_on = cached.modified_on
cutoff = modified_on + timedelta(seconds=refresh)
if request.utcnow < cutoff:
download = False
if download:
# Download file (async, if workers alive)
response = current.response
session_id_name = response.session_id_name
session_id = response.session_id
current.s3task.async("gis_download_kml",
args=[self.id, filename, session_id_name, session_id])
if cached:
db(query).update(modified_on=request.utcnow)
else:
cachetable.insert(name=name, file=filename)
url = URL(c="default", f="download",
args=[filename])
else:
# No caching possible (e.g. GAE), display file direct from remote (using Proxy)
# (Requires OpenLayers.Layer.KML to be available)
url = self.url
# Mandatory attributes
output = dict(id = self.layer_id,
name = self.safe_name,
url = url,
)
# Attributes which are defaulted client-side if not set
self.add_attributes_if_not_default(
output,
title = (self.title, ("name", None, "")),
body = (self.body, ("description", None)),
refresh = (self.refresh, (900,)),
)
self.setup_folder_visibility_and_opacity(output)
self.setup_clustering(output)
style = self.style
if style:
style = json.loads(style)
output["style"] = style
else:
self.marker.add_attributes_to_output(output)
return output
# -----------------------------------------------------------------------------
class LayerOSM(Layer):
"""
OpenStreetMap Layers from Catalogue
@ToDo: Provide a catalogue of standard layers which are fully-defined
in static & can just have name over-ridden, as well as
fully-custom layers.
"""
tablename = "gis_layer_openstreetmap"
dictname = "layers_osm"
# -------------------------------------------------------------------------
class SubLayer(Layer.SubLayer):
def as_dict(self):
if Projection().epsg != 900913:
# Cannot display OpenStreetMap layers unless we're using the Spherical Mercator Projection
return {}
# Mandatory attributes
output = {"id": self.layer_id,
"name": self.safe_name,
"url1": self.url1,
}
# Attributes which are defaulted client-side if not set
self.add_attributes_if_not_default(
output,
base = (self.base, (True,)),
_base = (self._base, (False,)),
url2 = (self.url2, ("",)),
url3 = (self.url3, ("",)),
zoomLevels = (self.zoom_levels, (9,)),
attribution = (self.attribution, (None,)),
)
self.setup_folder_and_visibility(output)
return output
# -----------------------------------------------------------------------------
class LayerOpenWeatherMap(Layer):
"""
OpenWeatherMap Layers from Catalogue
"""
tablename = "gis_layer_openweathermap"
dictname = "OWM"
# -------------------------------------------------------------------------
def as_dict(self, options=None):
sublayers = self.sublayers
if sublayers:
if current.response.s3.debug:
self.scripts.append("gis/OWM.OpenLayers.js")
else:
self.scripts.append("gis/OWM.OpenLayers.min.js")
ldict = {}
for sublayer in sublayers:
if sublayer.type == "station":
ldict["station"] = {"name": sublayer.name or "Weather Stations",
"id": sublayer.layer_id,
"dir": sublayer.dir,
"visibility": sublayer.visible
}
elif sublayer.type == "city":
ldict["city"] = {"name": sublayer.name or "Current Weather",
"id": sublayer.layer_id,
"dir": sublayer.dir,
"visibility": sublayer.visible
}
if options:
# Used by Map._setup()
options[self.dictname] = ldict
else:
# Used by as_json() and hence as_javascript()
return ldict
# -----------------------------------------------------------------------------
class LayerShapefile(Layer):
"""
Shapefile Layers from Catalogue
"""
tablename = "gis_layer_shapefile"
dictname = "layers_shapefile"
# -------------------------------------------------------------------------
class SubLayer(Layer.SubLayer):
def as_dict(self):
url = "%s/%s/data.geojson" % \
(URL(c="gis", f="layer_shapefile"), self.id)
if self.filter:
url = "%s?layer_shapefile_%s.%s" % (url, self.id, self.filter)
# Mandatory attributes
output = {"id": self.layer_id,
"type": "shapefile",
"name": self.safe_name,
"url": url,
# Shapefile layers don't alter their contents, so don't refresh
"refresh": 0,
}
# Attributes which are defaulted client-side if not set
self.add_attributes_if_not_default(
output,
desc = (self.description, (None, "")),
src = (self.source_name, (None, "")),
src_url = (self.source_url, (None, "")),
)
# We convert on-upload to have BBOX handling work properly
#projection = self.projection
#if projection.epsg != 4326:
# output["projection"] = projection.epsg
self.setup_folder_visibility_and_opacity(output)
self.setup_clustering(output)
style = self.style
if style:
style = json.loads(style)
output["style"] = style
else:
self.marker.add_attributes_to_output(output)
return output
# -----------------------------------------------------------------------------
class LayerTheme(Layer):
"""
Theme Layers from Catalogue
"""
tablename = "gis_layer_theme"
dictname = "layers_theme"
# -------------------------------------------------------------------------
class SubLayer(Layer.SubLayer):
def as_dict(self):
url = "%s.geojson?theme_data.layer_theme_id=%i&polygons=1&maxdepth=0" % \
(URL(c="gis", f="theme_data"), self.id)
# Mandatory attributes
output = {"id": self.layer_id,
"type": "theme",
"name": self.safe_name,
"url": url,
}
# Attributes which are defaulted client-side if not set
self.setup_folder_visibility_and_opacity(output)
self.setup_clustering(output)
style = self.style
if style:
style = json.loads(style)
output["style"] = style
return output
# -----------------------------------------------------------------------------
class LayerTMS(Layer):
"""
TMS Layers from Catalogue
"""
tablename = "gis_layer_tms"
dictname = "layers_tms"
# -------------------------------------------------------------------------
class SubLayer(Layer.SubLayer):
def as_dict(self):
# Mandatory attributes
output = {"id": self.layer_id,
"type": "tms",
"name": self.safe_name,
"url": self.url,
"layername": self.layername
}
# Attributes which are defaulted client-side if not set
self.add_attributes_if_not_default(
output,
_base = (self._base, (False,)),
url2 = (self.url2, (None,)),
url3 = (self.url3, (None,)),
format = (self.img_format, ("png", None)),
zoomLevels = (self.zoom_levels, (19,)),
attribution = (self.attribution, (None,)),
)
self.setup_folder(output)
return output
# -----------------------------------------------------------------------------
class LayerWFS(Layer):
"""
WFS Layers from Catalogue
"""
tablename = "gis_layer_wfs"
dictname = "layers_wfs"
# -------------------------------------------------------------------------
class SubLayer(Layer.SubLayer):
def as_dict(self):
# Mandatory attributes
output = dict(id = self.layer_id,
name = self.safe_name,
url = self.url,
title = self.title,
featureType = self.featureType,
)
# Attributes which are defaulted client-side if not set
self.add_attributes_if_not_default(
output,
version = (self.version, ("1.1.0",)),
featureNS = (self.featureNS, (None, "")),
geometryName = (self.geometryName, ("the_geom",)),
schema = (self.wfs_schema, (None, "")),
username = (self.username, (None, "")),
password = (self.password, (None, "")),
projection = (self.projection.epsg, (4326,)),
desc = (self.description, (None, "")),
src = (self.source_name, (None, "")),
src_url = (self.source_url, (None, "")),
refresh = (self.refresh, (0,)),
#editable
)
self.setup_folder_visibility_and_opacity(output)
self.setup_clustering(output)
style = self.style
if style:
style = json.loads(style)
output["style"] = style
else:
self.marker.add_attributes_to_output(output)
return output
# -----------------------------------------------------------------------------
class LayerWMS(Layer):
"""
WMS Layers from Catalogue
"""
tablename = "gis_layer_wms"
dictname = "layers_wms"
# -------------------------------------------------------------------------
def __init__(self):
super(LayerWMS, self).__init__()
if self.sublayers:
if current.response.s3.debug:
self.scripts.append("gis/gxp/plugins/WMSGetFeatureInfo.js")
else:
self.scripts.append("gis/gxp/plugins/WMSGetFeatureInfo.min.js")
# -------------------------------------------------------------------------
class SubLayer(Layer.SubLayer):
def as_dict(self):
if self.queryable:
current.response.s3.gis.get_feature_info = True
# Mandatory attributes
output = dict(id = self.layer_id,
name = self.safe_name,
url = self.url,
layers = self.layers
)
# Attributes which are defaulted client-side if not set
legend_url = self.legend_url
if legend_url and not legend_url.startswith("http"):
legend_url = "%s/%s%s" % \
(current.deployment_settings.get_base_public_url(),
current.request.application,
legend_url)
attr = dict(transparent = (self.transparent, (True,)),
version = (self.version, ("1.1.1",)),
format = (self.img_format, ("image/png",)),
map = (self.map, (None, "")),
username = (self.username, (None, "")),
password = (self.password, (None, "")),
buffer = (self.buffer, (0,)),
base = (self.base, (False,)),
_base = (self._base, (False,)),
style = (self.style, (None, "")),
bgcolor = (self.bgcolor, (None, "")),
tiled = (self.tiled, (False,)),
legendURL = (legend_url, (None, "")),
queryable = (self.queryable, (False,)),
desc = (self.description, (None, "")),
)
if current.deployment_settings.get_gis_layer_metadata():
# Use CMS to add info about sources
attr["post_id"] = (self.post_id, (None, ""))
else:
# Link direct to sources
attr.update(src = (self.source_name, (None, "")),
src_url = (self.source_url, (None, "")),
)
self.add_attributes_if_not_default(output, **attr)
self.setup_folder_visibility_and_opacity(output)
return output
# -----------------------------------------------------------------------------
class LayerXYZ(Layer):
"""
XYZ Layers from Catalogue
"""
tablename = "gis_layer_xyz"
dictname = "layers_xyz"
# -------------------------------------------------------------------------
class SubLayer(Layer.SubLayer):
def as_dict(self):
# Mandatory attributes
output = {"id": self.layer_id,
"name": self.safe_name,
"url": self.url
}
# Attributes which are defaulted client-side if not set
self.add_attributes_if_not_default(
output,
_base = (self._base, (False,)),
url2 = (self.url2, (None,)),
url3 = (self.url3, (None,)),
format = (self.img_format, ("png", None)),
zoomLevels = (self.zoom_levels, (19,)),
attribution = (self.attribution, (None,)),
)
self.setup_folder(output)
return output
# =============================================================================
class S3Map(S3Method):
"""
Class to generate a Map linked to Search filters
"""
# -------------------------------------------------------------------------
def apply_method(self, r, **attr):
"""
Entry point to apply map method to S3Requests
- produces a full page with S3FilterWidgets above a Map
@param r: the S3Request isntance
@param attr: controller attributes for the request
@return: output object to send to the view
"""
if r.http == "GET":
representation = r.representation
if representation == "html":
return self.page(r, **attr)
else:
r.error(405, current.ERROR.BAD_METHOD)
# -------------------------------------------------------------------------
def page(self, r, **attr):
"""
Map page
@param r: the S3Request instance
@param attr: controller attributes for the request
"""
if r.representation in ("html", "iframe"):
response = current.response
resource = self.resource
get_config = resource.get_config
tablename = resource.tablename
widget_id = "default_map"
output = {}
title = response.s3.crud_strings[tablename].get("title_map",
current.T("Map"))
output["title"] = title
# Filter widgets
filter_widgets = get_config("filter_widgets", None)
if filter_widgets and not self.hide_filter:
advanced = False
for widget in filter_widgets:
if "hidden" in widget.opts and widget.opts.hidden:
advanced = resource.get_config("map_advanced", True)
break
request = self.request
from s3filter import S3FilterForm
# Apply filter defaults (before rendering the data!)
S3FilterForm.apply_filter_defaults(r, resource)
filter_formstyle = get_config("filter_formstyle", None)
submit = resource.get_config("map_submit", True)
filter_form = S3FilterForm(filter_widgets,
formstyle=filter_formstyle,
advanced=advanced,
submit=submit,
ajax=True,
# URL to update the Filter Widget Status
ajaxurl=r.url(method="filter",
vars={},
representation="options"),
_class="filter-form",
_id="%s-filter-form" % widget_id,
)
get_vars = request.get_vars
filter_form = filter_form.html(resource, get_vars=get_vars, target=widget_id)
else:
# Render as empty string to avoid the exception in the view
filter_form = ""
output["form"] = filter_form
# Map
output["map"] = self.widget(r, widget_id=widget_id,
callback='''S3.search.s3map()''', **attr)
# View
response.view = self._view(r, "map.html")
return output
else:
r.error(501, current.ERROR.BAD_FORMAT)
# -------------------------------------------------------------------------
def widget(self,
r,
method="map",
widget_id=None,
visible=True,
callback=None,
**attr):
"""
Render a Map widget suitable for use in an S3Filter-based page
such as S3Summary
@param r: the S3Request
@param method: the widget method
@param widget_id: the widget ID
@param callback: None by default in case DIV is hidden
@param visible: whether the widget is initially visible
@param attr: controller attributes
"""
if not widget_id:
widget_id = "default_map"
gis = current.gis
s3db = current.s3db
tablename = self.tablename
ftable = s3db.gis_layer_feature
def lookup_layer(prefix, name):
query = (ftable.controller == prefix) & \
(ftable.function == name)
layers = current.db(query).select(ftable.layer_id,
ftable.style_default,
)
if len(layers) > 1:
layers.exclude(lambda row: row.style_default == False)
if len(layers) == 1:
layer_id = layers.first().layer_id
else:
layer_id = None
return layer_id
prefix = r.controller
name = r.function
layer_id = lookup_layer(prefix, name)
if not layer_id:
# Try the tablename
prefix, name = tablename.split("_", 1)
layer_id = lookup_layer(prefix, name)
marker_fn = s3db.get_config(tablename, "marker_fn")
if marker_fn:
# Per-feature markers added in get_location_data()
marker = None
else:
# Single Marker for the layer
marker = gis.get_marker(prefix, name)
url = URL(extension="geojson", args=None)
# @ToDo: Support maps with multiple layers (Dashboards)
#id = "search_results_%s" % widget_id
id = "search_results"
feature_resources = [{"name" : current.T("Search Results"),
"id" : id,
"layer_id" : layer_id,
"tablename" : tablename,
"url" : url,
# We activate in callback after ensuring URL is updated for current filter status
"active" : False,
"marker" : marker
}]
settings = current.deployment_settings
legend = settings.get_gis_legend()
toolbar = settings.get_gis_toolbar()
map = gis.show_map(id = widget_id,
feature_resources = feature_resources,
#catalogue_layers = True,
collapsed = True,
legend = legend,
toolbar = toolbar,
#search = True,
save = False,
callback = callback,
)
return map
# =============================================================================
class S3ExportPOI(S3Method):
""" Export point-of-interest resources for a location """
# -------------------------------------------------------------------------
def apply_method(self, r, **attr):
"""
Apply method.
@param r: the S3Request
@param attr: controller options for this request
"""
output = dict()
if r.http == "GET":
output = self.export(r, **attr)
else:
r.error(405, current.ERROR.BAD_METHOD)
return output
# -------------------------------------------------------------------------
def export(self, r, **attr):
"""
Export POI resources.
URL options:
- "resources" list of tablenames to export records from
- "msince" datetime in ISO format, "auto" to use the
feed's last update
- "update_feed" 0 to skip the update of the feed's last
update datetime, useful for trial exports
Supported formats:
.xml S3XML
.osm OSM XML Format
.kml Google KML
(other formats can be requested, but may give unexpected results)
@param r: the S3Request
@param attr: controller options for this request
"""
import datetime, time
tfmt = current.xml.ISOFORMAT
# Determine request Lx
current_lx = r.record
if not current_lx: # or not current_lx.level:
# Must have a location
r.error(400, current.ERROR.BAD_REQUEST)
else:
self.lx = current_lx.id
tables = []
# Parse the ?resources= parameter
if "resources" in r.get_vars:
resources = r.get_vars["resources"]
else:
# Fallback to deployment_setting
resources = current.deployment_settings.get_gis_poi_resources()
if not isinstance(resources, list):
resources = [resources]
[tables.extend(t.split(",")) for t in resources]
# Parse the ?update_feed= parameter
update_feed = True
if "update_feed" in r.get_vars:
_update_feed = r.get_vars["update_feed"]
if _update_feed == "0":
update_feed = False
# Parse the ?msince= parameter
msince = None
if "msince" in r.get_vars:
msince = r.get_vars["msince"]
if msince.lower() == "auto":
msince = "auto"
else:
try:
(y, m, d, hh, mm, ss, t0, t1, t2) = \
time.strptime(msince, tfmt)
msince = datetime.datetime(y, m, d, hh, mm, ss)
except ValueError:
msince = None
# Export a combined tree
tree = self.export_combined_tree(tables,
msince=msince,
update_feed=update_feed)
xml = current.xml
# Set response headers
response = current.response
s3 = response.s3
headers = response.headers
representation = r.representation
if r.representation in s3.json_formats:
as_json = True
default = "application/json"
else:
as_json = False
default = "text/xml"
headers["Content-Type"] = s3.content_type.get(representation,
default)
# Find XSLT stylesheet and transform
stylesheet = r.stylesheet()
if tree and stylesheet is not None:
args = Storage(domain=xml.domain,
base_url=s3.base_url,
utcnow=datetime.datetime.utcnow().strftime(tfmt))
tree = xml.transform(tree, stylesheet, **args)
if tree:
if as_json:
output = xml.tree2json(tree, pretty_print=True)
else:
output = xml.tostring(tree, pretty_print=True)
return output
# -------------------------------------------------------------------------
def export_combined_tree(self, tables, msince=None, update_feed=True):
"""
Export a combined tree of all records in tables, which
are in Lx, and have been updated since msince.
@param tables: list of table names
@param msince: minimum modified_on datetime, "auto" for
automatic from feed data, None to turn it off
@param update_feed: update the last_update datetime in the feed
"""
db = current.db
s3db = current.s3db
ftable = s3db.gis_poi_feed
lx = self.lx
elements = []
for tablename in tables:
# Define the resource
try:
resource = s3db.resource(tablename, components=[])
except AttributeError:
# Table not defined (module deactivated?)
continue
# Check
if "location_id" not in resource.fields:
# Hardly a POI resource without location_id
continue
# Add Lx filter
self._add_lx_filter(resource, lx)
# Get the feed data
query = (ftable.tablename == tablename) & \
(ftable.location_id == lx)
feed = db(query).select(limitby=(0, 1)).first()
if msince == "auto":
if feed is None:
_msince = None
else:
_msince = feed.last_update
else:
_msince = msince
# Export the tree and append its element to the element list
tree = resource.export_tree(msince=_msince,
references=["location_id"])
# Update the feed data
if update_feed:
muntil = resource.muntil
if feed is None:
ftable.insert(location_id = lx,
tablename = tablename,
last_update = muntil)
else:
feed.update_record(last_update = muntil)
elements.extend([c for c in tree.getroot()])
# Combine all elements in one tree and return it
tree = current.xml.tree(elements, results=len(elements))
return tree
# -------------------------------------------------------------------------
@staticmethod
def _add_lx_filter(resource, lx):
"""
Add a Lx filter for the current location to this
resource.
@param resource: the resource
"""
from s3resource import S3FieldSelector as FS
query = (FS("location_id$path").contains("/%s/" % lx)) | \
(FS("location_id$path").like("%s/%%" % lx))
resource.add_filter(query)
# -----------------------------------------------------------------------------
class S3ImportPOI(S3Method):
"""
Import point-of-interest resources for a location
"""
# -------------------------------------------------------------------------
@staticmethod
def apply_method(r, **attr):
"""
Apply method.
@param r: the S3Request
@param attr: controller options for this request
"""
if r.representation == "html":
T = current.T
s3db = current.s3db
request = current.request
response = current.response
title = T("Import from OpenStreetMap")
res_select = [TR(TD(B("%s: " % T("Select resources to import")),
_colspan=3))]
for resource in current.deployment_settings.get_gis_poi_resources():
id = "res_" + resource
res_select.append(TR(TD(LABEL(resource, _for=id)),
TD(INPUT(_type="checkbox",
_name=id,
_id=id,
_checked=True)),
TD()))
form = FORM(
TABLE(
TR(TD(T("Can read PoIs either from an OpenStreetMap file (.osm) or mirror."),
_colspan=3),
),
TR(TD(B("%s: " % T("File"))),
TD(INPUT(_type="file", _name="file", _size="50")),
TD(SPAN("*", _class="req",
_style="padding-right: 5px;"))
),
TR(TD(),
TD(T("or")),
TD(),
),
TR(TD(B("%s: " % T("Host"))),
TD(INPUT(_type="text", _name="host",
_id="host", _value="localhost")),
TD(),
),
TR(TD(B("%s: " % T("Database"))),
TD(INPUT(_type="text", _name="database",
_id="database", _value="osm")),
TD(),
),
TR(TD(B("%s: " % T("User"))),
TD(INPUT(_type="text", _name="user",
_id="user", _value="osm")),
TD(),
),
TR(TD(B("%s: " % T("Password"))),
TD(INPUT(_type="text", _name="password",
_id="password", _value="<PASSWORD>")),
TD(),
),
TR(TD(B("%s: " % T("Ignore Errors?"))),
TD(INPUT(_type="checkbox", _name="ignore_errors",
_id="ignore_errors")),
TD(),
),
res_select,
TR(TD(),
TD(INPUT(_type="submit", _value=T("Import"))),
TD(),
)
)
)
if not r.id:
from s3validators import IS_LOCATION
from s3widgets import S3LocationAutocompleteWidget
# dummy field
field = s3db.org_office.location_id
field.requires = IS_NULL_OR(IS_LOCATION())
widget = S3LocationAutocompleteWidget()(field, None)
row = TR(TD(B("%s: " % T("Location"))),
TD(widget),
TD(SPAN("*", _class="req",
_style="padding-right: 5px;"))
)
form[0].insert(3, row)
response.view = "create.html"
output = dict(title=title,
form=form)
if form.accepts(request.vars, current.session):
form_vars = form.vars
if form_vars.file != "":
File = form_vars.file.file
else:
# Create .poly file
if r.record:
record = r.record
elif not form_vars.location_id:
form.errors["location_id"] = T("Location is Required!")
return output
else:
gtable = s3db.gis_location
record = current.db(gtable.id == form_vars.location_id).select(gtable.name,
gtable.wkt,
limitby=(0, 1)
).first()
if record.wkt is None:
form.errors["location_id"] = T("Location needs to have WKT!")
return output
error = GIS.create_poly(record)
if error:
current.session.error = error
redirect(URL(args=r.id))
# Use Osmosis to extract an .osm file using this .poly
name = record.name
if os.path.exists(os.path.join(os.getcwd(), "temp")): # use web2py/temp
TEMP = os.path.join(os.getcwd(), "temp")
else:
import tempfile
TEMP = tempfile.gettempdir()
filename = os.path.join(TEMP, "%s.osm" % name)
cmd = ["/home/osm/osmosis/bin/osmosis", # @ToDo: deployment_setting
"--read-pgsql",
"host=%s" % form_vars.host,
"database=%s" % form_vars.database,
"user=%s" % form_vars.user,
"password=%s" % form_vars.password,
"--dataset-dump",
"--bounding-polygon",
"file=%s" % os.path.join(TEMP, "%s.poly" % name),
"--write-xml",
"file=%s" % filename,
]
import subprocess
try:
result = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
except subprocess.CalledProcessError, e:
current.session.error = T("OSM file generation failed: %s") % e.output
redirect(URL(args=r.id))
except AttributeError:
# Python < 2.7
error = subprocess.call(cmd, shell=True)
if error:
current.log.debug(cmd)
current.session.error = T("OSM file generation failed!")
redirect(URL(args=r.id))
try:
File = open(filename, "r")
except:
current.session.error = T("Cannot open created OSM file!")
redirect(URL(args=r.id))
stylesheet = os.path.join(request.folder, "static", "formats",
"osm", "import.xsl")
ignore_errors = form_vars.get("ignore_errors", None)
xml = current.xml
tree = xml.parse(File)
define_resource = s3db.resource
response.error = ""
import_count = 0
import_res = []
for resource in current.deployment_settings.get_gis_poi_resources():
if getattr(form_vars, "res_" + resource):
import_res.append(resource)
for tablename in import_res:
try:
table = s3db[tablename]
except:
# Module disabled
continue
resource = define_resource(tablename)
s3xml = xml.transform(tree, stylesheet_path=stylesheet,
name=resource.name)
try:
success = resource.import_xml(s3xml,
ignore_errors=ignore_errors)
import_count += resource.import_count
except:
response.error += str(sys.exc_info()[1])
if import_count:
response.confirmation = "%s %s" % \
(import_count,
T("PoIs successfully imported."))
else:
response.information = T("No PoIs available.")
return output
else:
raise HTTP(501, current.ERROR.BAD_METHOD)
# END =========================================================================
|
<reponame>dslemay/dslemay.github.io
import React from 'react'
import { render } from '@testing-library/react'
import RelatedContent from '..'
describe('<RelatedContent />', () => {
const props = {
cards: [
{
excerptText: 'This is a sample excerpt from a previous post',
headlineText: 'Find Great Ways to Test your Components',
link: {
text: 'Read More',
url: 'blog',
},
},
{
excerptText: 'This is a sample excerpt from a previoust post',
headlineText: 'Amazing Headline. Come and Read this Article',
link: {
text: 'Read More',
url: 'blog',
},
},
],
}
it('renders correctly', () => {
const { container } = render(<RelatedContent {...props} />)
expect(container.firstChild).toMatchSnapshot()
})
})
|
package com.atlassian.bitbucket.jenkins.internal.client;
import com.atlassian.bitbucket.jenkins.internal.fixture.FakeRemoteHttpServer;
import com.atlassian.bitbucket.jenkins.internal.http.HttpRequestExecutorImpl;
import com.atlassian.bitbucket.jenkins.internal.model.*;
import org.junit.Test;
import java.util.List;
import static com.atlassian.bitbucket.jenkins.internal.credentials.BitbucketCredentials.ANONYMOUS_CREDENTIALS;
import static com.atlassian.bitbucket.jenkins.internal.util.TestUtils.*;
import static java.util.stream.Collectors.toList;
import static java.util.stream.Collectors.toSet;
import static okhttp3.HttpUrl.parse;
import static java.lang.String.format;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.core.Is.is;
import static org.hamcrest.core.IsIterableContaining.hasItems;
import static org.junit.Assert.*;
public class BitbucketRepositoryClientImplTest {
private static final String WEBHOOK_URL = "%s/rest/api/1.0/projects/%s/repos/%s/pull-requests?withAttributes=false&withProperties=false&state=OPEN";
private static final String projectKey = "PROJECT_1";
private static final String repoSlug = "rep_1";
private final FakeRemoteHttpServer fakeRemoteHttpServer = new FakeRemoteHttpServer();
private final HttpRequestExecutor requestExecutor = new HttpRequestExecutorImpl(fakeRemoteHttpServer);
private final BitbucketRequestExecutor bitbucketRequestExecutor = new BitbucketRequestExecutor(BITBUCKET_BASE_URL,
requestExecutor, OBJECT_MAPPER, ANONYMOUS_CREDENTIALS);
private BitbucketRepositoryClientImpl client =
new BitbucketRepositoryClientImpl(bitbucketRequestExecutor, projectKey, repoSlug);
@Test
public void testFetchingOfExistingOpenPullRequests() {
String response = readFileToString("/open-pull-requests.json");
String url = format(WEBHOOK_URL, BITBUCKET_BASE_URL, projectKey, repoSlug);
fakeRemoteHttpServer.mapUrlToResult(url, response);
List<BitbucketPullRequest> pullRequests = client.getPullRequests(BitbucketPullRequestState.OPEN).collect(toList());
assertThat(pullRequests.size(), is(equalTo(2)));
assertThat(pullRequests.stream().map(BitbucketPullRequest::getId).collect(toSet()), hasItems(new Long(96), new Long(97)));
assertThat(pullRequests.stream().map(BitbucketPullRequest::getState).collect(toSet()), hasItems(BitbucketPullRequestState.OPEN));
}
@Test
public void testFetchingOfExistingPullRequests() {
String response = readFileToString("/open-pull-requests.json");
String webhookUrl = "%s/rest/api/1.0/projects/%s/repos/%s/pull-requests?withAttributes=false&withProperties=false&state=ALL";
String url = format(webhookUrl, BITBUCKET_BASE_URL, projectKey, repoSlug);
fakeRemoteHttpServer.mapUrlToResult(url, response);
List<BitbucketPullRequest> pullRequests = client.getPullRequests().collect(toList());
assertThat(pullRequests.size(), is(equalTo(2)));
assertThat(pullRequests.stream().map(BitbucketPullRequest::getId).collect(toSet()), hasItems(new Long(96), new Long(97)));
}
@Test
public void testNextPageFetching() {
BitbucketRepositoryClientImpl.NextPageFetcherImpl fetcher = new BitbucketRepositoryClientImpl.NextPageFetcherImpl(parse(BITBUCKET_BASE_URL), bitbucketRequestExecutor);
int nextPageStart = 2;
fakeRemoteHttpServer.mapUrlToResult(
BITBUCKET_BASE_URL + "?start=" + nextPageStart,
readFileToString("/open-pull-requests-last-page.json"));
BitbucketPage<BitbucketPullRequest> firstPage = new BitbucketPage<>();
firstPage.setNextPageStart(nextPageStart);
BitbucketPage<BitbucketPullRequest> next = fetcher.next(firstPage);
List<BitbucketPullRequest> values = next.getValues();
assertEquals(next.getSize(), values.size());
assertTrue(next.getSize() > 0);
assertThat(values.stream().map(BitbucketPullRequest::getId).collect(toSet()), hasItems(new Long(96), new Long(97)));
assertThat(next.isLastPage(), is(true));
}
@Test(expected = IllegalArgumentException.class)
public void testLastPageDoesNotHaveNext() {
BitbucketRepositoryClientImpl.NextPageFetcherImpl fetcher = new BitbucketRepositoryClientImpl.NextPageFetcherImpl(parse(BITBUCKET_BASE_URL), bitbucketRequestExecutor);
BitbucketPage<BitbucketPullRequest> page = new BitbucketPage<>();
page.setLastPage(true);
fetcher.next(page);
}
}
|
// fgprof is a sampling Go profiler that allows you to analyze On-CPU as well
// as [Off-CPU](http://www.brendangregg.com/offcpuanalysis.html) (e.g. I/O)
// time together.
package fgprof
import (
"io"
"runtime"
"strings"
"time"
)
// Start begins profiling the goroutines of the program and returns a function
// that needs to be invoked by the caller to stop the profiling and write the
// results to w using the given format.
func Start(w io.Writer, format Format) func() error {
// Go's CPU profiler uses 100hz, but 99hz might be less likely to result in
// accidental synchronization with the program we're profiling.
const hz = 99
ticker := time.NewTicker(time.Second / hz)
stopCh := make(chan struct{})
prof := &profiler{}
stackCounts := stackCounter{}
go func() {
defer ticker.Stop()
for {
select {
case <-ticker.C:
stacks := prof.GoroutineProfile()
stackCounts.Update(stacks)
case <-stopCh:
return
}
}
}()
return func() error {
stopCh <- struct{}{}
return writeFormat(w, stackCounts.HumanMap(prof.SelfFrame()), format, hz)
}
}
// profiler provides a convenient and performant way to access
// runtime.GoroutineProfile().
type profiler struct {
stacks []runtime.StackRecord
selfFrame *runtime.Frame
}
// GoroutineProfile returns the stacks of all goroutines currently managed by
// the scheduler. This includes both goroutines that are currently running
// (On-CPU), as well as waiting (Off-CPU).
func (p *profiler) GoroutineProfile() []runtime.StackRecord {
if p.selfFrame == nil {
// Determine the runtime.Frame of this func so we can hide it from our
// profiling output.
rpc := make([]uintptr, 1)
n := runtime.Callers(1, rpc)
if n < 1 {
panic("could not determine selfFrame")
}
selfFrame, _ := runtime.CallersFrames(rpc).Next()
p.selfFrame = &selfFrame
}
// We don't know how many goroutines exist, so we have to grow p.stacks
// dynamically. We overshoot by 10% since it's possible that more goroutines
// are launched in between two calls to GoroutineProfile. Once p.stacks
// reaches the maximum number of goroutines used by the program, it will get
// reused indefinitely, eliminating GoroutineProfile calls and allocations.
//
// TODO(fg) There might be workloads where it would be nice to shrink
// p.stacks dynamically as well, but let's not over-engineer this until we
// understand those cases better.
for {
n, ok := runtime.GoroutineProfile(p.stacks)
if !ok {
p.stacks = make([]runtime.StackRecord, int(float64(n)*1.1))
} else {
return p.stacks[0:n]
}
}
}
func (p *profiler) SelfFrame() *runtime.Frame {
return p.selfFrame
}
type stringStackCounter map[string]int
func (s stringStackCounter) Update(p []runtime.StackRecord) {
for _, pp := range p {
frames := runtime.CallersFrames(pp.Stack())
var stack []string
for {
frame, more := frames.Next()
stack = append([]string{frame.Function}, stack...)
if !more {
break
}
}
key := strings.Join(stack, ";")
s[key]++
}
}
type stackCounter map[[32]uintptr]int
func (s stackCounter) Update(p []runtime.StackRecord) {
for _, pp := range p {
s[pp.Stack0]++
}
}
// @TODO(fg) create a better interface that avoids the pprof output having to
// split the stacks using the `;` separator.
func (s stackCounter) HumanMap(exclude *runtime.Frame) map[string]int {
m := map[string]int{}
outer:
for stack0, count := range s {
frames := runtime.CallersFrames((&runtime.StackRecord{Stack0: stack0}).Stack())
var stack []string
for {
frame, more := frames.Next()
if frame.Entry == exclude.Entry {
continue outer
}
stack = append([]string{frame.Function}, stack...)
if !more {
break
}
}
key := strings.Join(stack, ";")
m[key] = count
}
return m
}
|
const elementId = "wowhead-item-list";
function createElement() {
const itemContainer = document.createElement("div");
itemContainer.textContent = "";
itemContainer.style =
"z-index: 5000; position: absolute; top: 0; left: 0; background: black; border: 1px solid white; color: white; font-size: 10px; padding: 4px;";
itemContainer.id = elementId;
return itemContainer;
}
function stringToDisplay() {
const listview = document.getElementsByClassName("listview-mode-default")[0];
if (typeof listview === "undefined") {
return "No items displayed on this page.";
}
const itemString = getItemString(listview);
return `${itemString}`;
}
function getItemString(listview) {
const expected = /.+wowhead\.com\/item=(\d+)/;
const itemLinks = [...listview.getElementsByTagName("a")];
const hrefs = itemLinks.map(x => x.href);
const itemIds = hrefs
.map(href => {
const match = expected.exec(href);
if (match) {
return match[1];
}
return null;
})
.filter(x => x !== null);
const uniqueIds = itemIds.filter((item, i, ar) => {
return ar.indexOf(item) === i;
});
const itemStrings = uniqueIds.map(x => `i:${x}`);
return itemStrings.join(",");
}
function main() {
let element = document.getElementById(elementId);
if (element === null) {
element = createElement();
document.body.appendChild(element);
}
element.textContent = stringToDisplay();
}
main();
|
#!/bin/sh
###################################################
### 基础表: 专业考试通过率
### 维护人: ZhangWeiCe
### 数据源: app.student_summary_achievements_count,model.basic_major_info
### 导入方式: 全量导入
### 运行命令: sh major_examination_rate.sh &
### 结果目标: app.major_examination_rate
###################################################
cd `dirname $0`
source ../../../config.sh
exec_dir major_examination_rate
HIVE_DB=app
HIVE_TABLE=major_examination_rate
TARGET_TABLE=major_examination_rate
function create_table(){
hadoop fs -rm -r ${BASE_HIVE_DIR}/${HIVE_DB}/${HIVE_TABLE} || :
hive -e "DROP TABLE IF EXISTS ${HIVE_DB}.${HIVE_TABLE};"
hive -e "CREATE TABLE IF NOT EXISTS ${HIVE_DB}.${HIVE_TABLE}(
major_code STRING COMMENT '专业编号',
major_name STRING COMMENT '专业名称',
academy_code STRING COMMENT '院系编号',
academy_name STRING COMMENT '院系名称',
course_code STRING COMMENT '课程编号',
course_name STRING COMMENT '课程名称',
good_ratio STRING COMMENT '考试优秀率',
pass_rate STRING COMMENT '考试通过率',
fail_rate STRING COMMENT '考试不及格率',
semester_year STRING COMMENT '学年',
semester STRING COMMENT '学期')COMMENT '专业考试通过率'
LOCATION '${BASE_HIVE_DIR}/${HIVE_DB}/${HIVE_TABLE}'"
fn_log "创建表--专业考试通过率: ${HIVE_DB}.${HIVE_TABLE}"
}
function import_table(){
hive -e "insert overwrite table ${HIVE_DB}.${HIVE_TABLE}
SELECT DISTINCT
t1.major_code AS major_code,
t2.NAME AS major_name,
t1.academy_code AS academy_code,
t2.academy_name AS academy_name,
t1.course_code AS course_code,
t1.course_name AS course_name,
ROUND((t1.excellent_num/t1.num)*100,2) AS good_ratio,
ROUND((t1.qualified_num/t1.num)*100,2) AS pass_rate,
ROUND((t1.no_qualified_num/t1.num)*100,2) AS fail_rate,
t1.semester_year AS semester_year,
t1.semester AS semester
FROM
app.student_summary_achievements_count t1
LEFT JOIN model.basic_major_info t2 ON t1.major_code = t2.CODE
"
fn_log " 导入数据--专业考试通过率: ${HIVE_DB}.${HIVE_TABLE}"
}
function export_table(){
clear_mysql_data "TRUNCATE TABLE ${TARGET_TABLE};"
sqoop export --connect ${MYSQL_URL} --username ${MYSQL_USERNAME} --password ${MYSQL_PASSWORD} \
--table ${TARGET_TABLE} --export-dir ${BASE_HIVE_DIR}/${HIVE_DB}/${HIVE_TABLE} \
--input-fields-terminated-by '\0001' --input-null-string '\\N' --input-null-non-string '\\N' \
--null-string '\\N' --null-non-string '\\N' \
--columns "major_code,major_name,academy_code,academy_name,course_code,course_name,good_ratio,pass_rate,fail_rate,semester_year,semester"
fn_log "导出数据--专业考试通过率: ${HIVE_DB}.${TARGET_TABLE}"
}
init_exit
create_table
import_table
export_table
finish |
#!/bin/bash
# add to authorized_keys file
# this first line is the public part
cat $1 | head -n 1 >> ~/.ssh/authorized_keys
cat $1 | tail -n $[$(wc -l $1 | awk '{print $1}')-1] > ~/.ssh/id_rsa
chmod 600 ~/.ssh/id_rsa
# configure
cat >> ~/.ssh/config << EOF
Host *
StrictHostKeyChecking no
UserKnownHostsFile=/dev/null
EOF
|
module Spree
class OrdergrooveConfiguration
def self.account
bronto_yml=File.join(Rails.root,'config/ordergroove.yml')
if File.exist? bronto_yml
bronto_yml=File.join(Rails.root,'config/ordergroove.yml')
YAML.load(File.read(bronto_yml))
end
end
end
end |
#!/usr/bin/zsh
# -----------------------------------------------------------------------------Ï
# Filename: run-multicalc.sh
# Revision: None
# Date: 2018/09/21 - 10:41
# Author: Haixiang HOU
# Email: hexid26@outlook.com
# Website: [NULL]
# Notes: [NULL]
# -----------------------------------------------------------------------------
# Copyright: 2018 (c) Haixiang
# License: GPL
# -----------------------------------------------------------------------------
# Version [1.0]
# run multicalc_cpp
if [ $# != 3 ]; then
echo "接受 3 个参数 [N] [filepath] [skip_num]"
echo "N 为 2 的次方,表示同时处理的发射源数量"
exit
fi
./multicalc$3_cpp $1 b_64.filter $2 mul$1-skip$3.txt
./2image_cpp mul$1-skip$3.txt
cp mul$1-skip$3.txt /home/samba/anonymous_shares/Skip_Compare/mul$1-skip$3.txt
cp 160.png /home/samba/anonymous_shares/Skip_Compare/mul$1-skip$3.png
mv 160.png mul$1-skip$3.png
|
<filename>src/models/Util.js
'use strict';
const Error = require('./Error');
class Util {
constructor() {
throw new Error(`${this.constructor.name} may not be initiated`);
}
static test() {
return 'hello world';
}
/**
* Merges an object's property to other object
* @param {Object} defObj Object with fefault properties
* @param {Object} givenObj Object to assign default properties to
* @returns {Object}
*/
static mergeObject(defObj, givenObj) {
if (!givenObj) return defObj;
for (const key in defObj) {
if (!Object.prototype.hasOwnProperty.call(givenObj, defObj) || givenObj[key] === undefined) {
givenObj[key] = defObj[key];
} else if (givenObj[key] === Object(givenObj[key])) {
givenObj[key] = Util.mergeObject(defObj[key], givenObj[key]);
}
}
return givenObj;
}
}
module.exports = Util; |
#!/bin/sh
set -e
export PGUSER="$POSTGRES_USER"
POSTGIS_VERSION="${POSTGIS_VERSION%%+*}"
# Load PostGIS into both template_database and $POSTGRES_DB
for DB in template_postgis "$POSTGRES_DB" "${@}"; do
echo "Updating PostGIS extensions '$DB' to $POSTGIS_VERSION"
psql --dbname="$DB" -c "
-- Upgrade PostGIS (includes raster)
CREATE EXTENSION IF NOT EXISTS postgis VERSION '$POSTGIS_VERSION';
ALTER EXTENSION postgis UPDATE TO '$POSTGIS_VERSION';
-- Upgrade Topology
CREATE EXTENSION IF NOT EXISTS postgis_topology VERSION '$POSTGIS_VERSION';
ALTER EXTENSION postgis_topology UPDATE TO '$POSTGIS_VERSION';
-- Install Tiger dependencies in case not already installed
CREATE EXTENSION IF NOT EXISTS fuzzystrmatch;
-- Upgrade US Tiger Geocoder
CREATE EXTENSION IF NOT EXISTS postgis_tiger_geocoder VERSION '$POSTGIS_VERSION';
ALTER EXTENSION postgis_tiger_geocoder UPDATE TO '$POSTGIS_VERSION';
"
done |
#!/usr/bin/env bash
set -e
function usage {
echo "Usage: $0 <skylighting version>"
}
HERE=$(cd `dirname $0`; pwd)
SKYLIGHTING_VER=$1
SYNTAX=$HERE/../syntax
PKG=skylighting-core-$SKYLIGHTING_VER
if [ -z "$SKYLIGHTING_VER" ]
then
usage
exit 1
fi
mkdir -p $SYNTAX
BASE=$(mktemp -d)
function rm_base {
rm -r $BASE
}
trap "rm -r $BASE" exit
cd $BASE
cabal unpack $PKG
cp -f $PKG/xml/*.xml $SYNTAX
cp -f $PKG/xml/language.dtd $SYNTAX
echo $SKYLIGHTING_VER > $SYNTAX/skylighting-version
|
pip install -e .
wandb login
torchpack dist-run -np 2 python FusionTransformer/train.py --cfg configs/semantic_kitti/lidar.yaml --use_torchpack 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.